1use super::{ignore::IgnoreStack, DiagnosticSummary};
2use crate::{copy_recursive, ProjectEntryId, RemoveOptions};
3use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
4use anyhow::{anyhow, Context, Result};
5use client::{proto, Client};
6use clock::ReplicaId;
7use collections::{HashMap, VecDeque};
8use fs::{repository::GitRepository, Fs};
9use fs::{HomeDir, LineEnding};
10use futures::{
11 channel::{
12 mpsc::{self, UnboundedSender},
13 oneshot,
14 },
15 Stream, StreamExt,
16};
17use fuzzy::CharBag;
18use git::{DOT_GIT, GITIGNORE};
19use gpui::{
20 executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
21 Task,
22};
23use language::Unclipped;
24use language::{
25 proto::{deserialize_version, serialize_line_ending, serialize_version},
26 Buffer, DiagnosticEntry, PointUtf16, Rope,
27};
28use parking_lot::Mutex;
29use postage::{
30 prelude::{Sink as _, Stream as _},
31 watch,
32};
33
34use smol::channel::{self, Sender};
35use std::{
36 any::Any,
37 cmp::{self, Ordering},
38 convert::TryFrom,
39 ffi::{OsStr, OsString},
40 fmt,
41 future::Future,
42 mem,
43 ops::{Deref, DerefMut},
44 path::{Path, PathBuf},
45 sync::{atomic::AtomicUsize, Arc},
46 task::Poll,
47 time::{Duration, SystemTime},
48};
49use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
50use util::{ResultExt, TryFutureExt};
51
52#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
53pub struct WorktreeId(usize);
54
55#[allow(clippy::large_enum_variant)]
56pub enum Worktree {
57 Local(LocalWorktree),
58 Remote(RemoteWorktree),
59}
60
61pub struct LocalWorktree {
62 snapshot: LocalSnapshot,
63 background_snapshot: Arc<Mutex<LocalSnapshot>>,
64 last_scan_state_rx: watch::Receiver<ScanState>,
65 _background_scanner_task: Option<Task<()>>,
66 poll_task: Option<Task<()>>,
67 share: Option<ShareState>,
68 diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<Unclipped<PointUtf16>>>>,
69 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
70 client: Arc<Client>,
71 fs: Arc<dyn Fs>,
72 visible: bool,
73}
74
75pub struct RemoteWorktree {
76 pub snapshot: Snapshot,
77 pub(crate) background_snapshot: Arc<Mutex<Snapshot>>,
78 project_id: u64,
79 client: Arc<Client>,
80 updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
81 snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
82 replica_id: ReplicaId,
83 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
84 visible: bool,
85 disconnected: bool,
86}
87
88#[derive(Clone)]
89pub struct Snapshot {
90 id: WorktreeId,
91 abs_path: Arc<Path>,
92 root_name: String,
93 root_char_bag: CharBag,
94 entries_by_path: SumTree<Entry>,
95 entries_by_id: SumTree<PathEntry>,
96 scan_id: usize,
97 is_complete: bool,
98}
99
100#[derive(Clone)]
101pub struct GitRepositoryEntry {
102 pub(crate) repo: Arc<Mutex<dyn GitRepository>>,
103
104 pub(crate) scan_id: usize,
105 // Path to folder containing the .git file or directory
106 pub(crate) content_path: Arc<Path>,
107 // Path to the actual .git folder.
108 // Note: if .git is a file, this points to the folder indicated by the .git file
109 pub(crate) git_dir_path: Arc<Path>,
110}
111
112impl std::fmt::Debug for GitRepositoryEntry {
113 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
114 f.debug_struct("GitRepositoryEntry")
115 .field("content_path", &self.content_path)
116 .field("git_dir_path", &self.git_dir_path)
117 .field("libgit_repository", &"LibGitRepository")
118 .finish()
119 }
120}
121
122pub struct LocalSnapshot {
123 ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
124 git_repositories: Vec<GitRepositoryEntry>,
125 removed_entry_ids: HashMap<u64, ProjectEntryId>,
126 next_entry_id: Arc<AtomicUsize>,
127 snapshot: Snapshot,
128 extension_counts: HashMap<OsString, usize>,
129}
130
131impl Clone for LocalSnapshot {
132 fn clone(&self) -> Self {
133 Self {
134 ignores_by_parent_abs_path: self.ignores_by_parent_abs_path.clone(),
135 git_repositories: self.git_repositories.iter().cloned().collect(),
136 removed_entry_ids: self.removed_entry_ids.clone(),
137 next_entry_id: self.next_entry_id.clone(),
138 snapshot: self.snapshot.clone(),
139 extension_counts: self.extension_counts.clone(),
140 }
141 }
142}
143
144impl Deref for LocalSnapshot {
145 type Target = Snapshot;
146
147 fn deref(&self) -> &Self::Target {
148 &self.snapshot
149 }
150}
151
152impl DerefMut for LocalSnapshot {
153 fn deref_mut(&mut self) -> &mut Self::Target {
154 &mut self.snapshot
155 }
156}
157
158#[derive(Clone, Debug)]
159enum ScanState {
160 Idle,
161 /// The worktree is performing its initial scan of the filesystem.
162 Initializing,
163 /// The worktree is updating in response to filesystem events.
164 Updating,
165 Err(Arc<anyhow::Error>),
166}
167
168struct ShareState {
169 project_id: u64,
170 snapshots_tx: watch::Sender<LocalSnapshot>,
171 _maintain_remote_snapshot: Task<Option<()>>,
172}
173
174pub enum Event {
175 UpdatedEntries,
176 UpdatedGitRepositories(Vec<GitRepositoryEntry>),
177}
178
179impl Entity for Worktree {
180 type Event = Event;
181}
182
183impl Worktree {
184 pub async fn local(
185 client: Arc<Client>,
186 path: impl Into<Arc<Path>>,
187 visible: bool,
188 fs: Arc<dyn Fs>,
189 next_entry_id: Arc<AtomicUsize>,
190 cx: &mut AsyncAppContext,
191 ) -> Result<ModelHandle<Self>> {
192 let (tree, scan_states_tx) =
193 LocalWorktree::create(client, path, visible, fs.clone(), next_entry_id, cx).await?;
194 tree.update(cx, |tree, cx| {
195 let tree = tree.as_local_mut().unwrap();
196 let abs_path = tree.abs_path().clone();
197 let background_snapshot = tree.background_snapshot.clone();
198 let background = cx.background().clone();
199 tree._background_scanner_task = Some(cx.background().spawn(async move {
200 let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
201 let scanner =
202 BackgroundScanner::new(background_snapshot, scan_states_tx, fs, background);
203 scanner.run(events).await;
204 }));
205 });
206 Ok(tree)
207 }
208
209 pub fn remote(
210 project_remote_id: u64,
211 replica_id: ReplicaId,
212 worktree: proto::WorktreeMetadata,
213 client: Arc<Client>,
214 cx: &mut MutableAppContext,
215 ) -> ModelHandle<Self> {
216 let remote_id = worktree.id;
217 let root_char_bag: CharBag = worktree
218 .root_name
219 .chars()
220 .map(|c| c.to_ascii_lowercase())
221 .collect();
222 let root_name = worktree.root_name.clone();
223 let visible = worktree.visible;
224
225 let abs_path = PathBuf::from(worktree.abs_path);
226 let snapshot = Snapshot {
227 id: WorktreeId(remote_id as usize),
228 abs_path: Arc::from(abs_path.deref()),
229 root_name,
230 root_char_bag,
231 entries_by_path: Default::default(),
232 entries_by_id: Default::default(),
233 scan_id: 0,
234 is_complete: false,
235 };
236
237 let (updates_tx, mut updates_rx) = mpsc::unbounded();
238 let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
239 let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
240 let worktree_handle = cx.add_model(|_: &mut ModelContext<Worktree>| {
241 Worktree::Remote(RemoteWorktree {
242 project_id: project_remote_id,
243 replica_id,
244 snapshot: snapshot.clone(),
245 background_snapshot: background_snapshot.clone(),
246 updates_tx: Some(updates_tx),
247 snapshot_subscriptions: Default::default(),
248 client: client.clone(),
249 diagnostic_summaries: Default::default(),
250 visible,
251 disconnected: false,
252 })
253 });
254
255 cx.background()
256 .spawn(async move {
257 while let Some(update) = updates_rx.next().await {
258 if let Err(error) = background_snapshot.lock().apply_remote_update(update) {
259 log::error!("error applying worktree update: {}", error);
260 }
261 snapshot_updated_tx.send(()).await.ok();
262 }
263 })
264 .detach();
265
266 cx.spawn(|mut cx| {
267 let this = worktree_handle.downgrade();
268 async move {
269 while (snapshot_updated_rx.recv().await).is_some() {
270 if let Some(this) = this.upgrade(&cx) {
271 this.update(&mut cx, |this, cx| {
272 this.poll_snapshot(cx);
273 let this = this.as_remote_mut().unwrap();
274 while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
275 if this.observed_snapshot(*scan_id) {
276 let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
277 let _ = tx.send(());
278 } else {
279 break;
280 }
281 }
282 });
283 } else {
284 break;
285 }
286 }
287 }
288 })
289 .detach();
290
291 worktree_handle
292 }
293
294 pub fn as_local(&self) -> Option<&LocalWorktree> {
295 if let Worktree::Local(worktree) = self {
296 Some(worktree)
297 } else {
298 None
299 }
300 }
301
302 pub fn as_remote(&self) -> Option<&RemoteWorktree> {
303 if let Worktree::Remote(worktree) = self {
304 Some(worktree)
305 } else {
306 None
307 }
308 }
309
310 pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
311 if let Worktree::Local(worktree) = self {
312 Some(worktree)
313 } else {
314 None
315 }
316 }
317
318 pub fn as_remote_mut(&mut self) -> Option<&mut RemoteWorktree> {
319 if let Worktree::Remote(worktree) = self {
320 Some(worktree)
321 } else {
322 None
323 }
324 }
325
326 pub fn is_local(&self) -> bool {
327 matches!(self, Worktree::Local(_))
328 }
329
330 pub fn is_remote(&self) -> bool {
331 !self.is_local()
332 }
333
334 pub fn snapshot(&self) -> Snapshot {
335 match self {
336 Worktree::Local(worktree) => worktree.snapshot().snapshot,
337 Worktree::Remote(worktree) => worktree.snapshot(),
338 }
339 }
340
341 pub fn scan_id(&self) -> usize {
342 match self {
343 Worktree::Local(worktree) => worktree.snapshot.scan_id,
344 Worktree::Remote(worktree) => worktree.snapshot.scan_id,
345 }
346 }
347
348 pub fn is_visible(&self) -> bool {
349 match self {
350 Worktree::Local(worktree) => worktree.visible,
351 Worktree::Remote(worktree) => worktree.visible,
352 }
353 }
354
355 pub fn replica_id(&self) -> ReplicaId {
356 match self {
357 Worktree::Local(_) => 0,
358 Worktree::Remote(worktree) => worktree.replica_id,
359 }
360 }
361
362 pub fn diagnostic_summaries(
363 &self,
364 ) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + '_ {
365 match self {
366 Worktree::Local(worktree) => &worktree.diagnostic_summaries,
367 Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
368 }
369 .iter()
370 .map(|(path, summary)| (path.0.clone(), *summary))
371 }
372
373 fn poll_snapshot(&mut self, cx: &mut ModelContext<Self>) {
374 match self {
375 Self::Local(worktree) => worktree.poll_snapshot(false, cx),
376 Self::Remote(worktree) => worktree.poll_snapshot(cx),
377 };
378 }
379
380 pub fn abs_path(&self) -> Arc<Path> {
381 match self {
382 Worktree::Local(worktree) => worktree.abs_path.clone(),
383 Worktree::Remote(worktree) => worktree.abs_path.clone(),
384 }
385 }
386}
387
388impl LocalWorktree {
389 async fn create(
390 client: Arc<Client>,
391 path: impl Into<Arc<Path>>,
392 visible: bool,
393 fs: Arc<dyn Fs>,
394 next_entry_id: Arc<AtomicUsize>,
395 cx: &mut AsyncAppContext,
396 ) -> Result<(ModelHandle<Worktree>, UnboundedSender<ScanState>)> {
397 let abs_path = path.into();
398 let path: Arc<Path> = Arc::from(Path::new(""));
399
400 // After determining whether the root entry is a file or a directory, populate the
401 // snapshot's "root name", which will be used for the purpose of fuzzy matching.
402 let root_name = abs_path
403 .file_name()
404 .map_or(String::new(), |f| f.to_string_lossy().to_string());
405 let root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
406 let metadata = fs
407 .metadata(&abs_path)
408 .await
409 .context("failed to stat worktree path")?;
410
411 let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
412 let (mut last_scan_state_tx, last_scan_state_rx) =
413 watch::channel_with(ScanState::Initializing);
414 let tree = cx.add_model(move |cx: &mut ModelContext<Worktree>| {
415 let mut snapshot = LocalSnapshot {
416 ignores_by_parent_abs_path: Default::default(),
417 git_repositories: Default::default(),
418 removed_entry_ids: Default::default(),
419 next_entry_id,
420 snapshot: Snapshot {
421 id: WorktreeId::from_usize(cx.model_id()),
422 abs_path,
423 root_name: root_name.clone(),
424 root_char_bag,
425 entries_by_path: Default::default(),
426 entries_by_id: Default::default(),
427 scan_id: 0,
428 is_complete: true,
429 },
430 extension_counts: Default::default(),
431 };
432 if let Some(metadata) = metadata {
433 let entry = Entry::new(
434 path,
435 &metadata,
436 &snapshot.next_entry_id,
437 snapshot.root_char_bag,
438 );
439 snapshot.insert_entry(entry, fs.as_ref());
440 }
441
442 let tree = Self {
443 snapshot: snapshot.clone(),
444 background_snapshot: Arc::new(Mutex::new(snapshot)),
445 last_scan_state_rx,
446 _background_scanner_task: None,
447 share: None,
448 poll_task: None,
449 diagnostics: Default::default(),
450 diagnostic_summaries: Default::default(),
451 client,
452 fs,
453 visible,
454 };
455
456 cx.spawn_weak(|this, mut cx| async move {
457 while let Some(scan_state) = scan_states_rx.next().await {
458 if let Some(this) = this.upgrade(&cx) {
459 last_scan_state_tx.blocking_send(scan_state).ok();
460 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
461 } else {
462 break;
463 }
464 }
465 })
466 .detach();
467
468 Worktree::Local(tree)
469 });
470
471 Ok((tree, scan_states_tx))
472 }
473
474 pub fn contains_abs_path(&self, path: &Path) -> bool {
475 path.starts_with(&self.abs_path)
476 }
477
478 fn absolutize(&self, path: &Path) -> PathBuf {
479 if path.file_name().is_some() {
480 self.abs_path.join(path)
481 } else {
482 self.abs_path.to_path_buf()
483 }
484 }
485
486 pub(crate) fn load_buffer(
487 &mut self,
488 path: &Path,
489 cx: &mut ModelContext<Worktree>,
490 ) -> Task<Result<ModelHandle<Buffer>>> {
491 let path = Arc::from(path);
492 cx.spawn(move |this, mut cx| async move {
493 let (file, contents, diff_base) = this
494 .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
495 .await?;
496 Ok(cx.add_model(|cx| {
497 let mut buffer = Buffer::from_file(0, contents, diff_base, Arc::new(file), cx);
498 buffer.git_diff_recalc(cx);
499 buffer
500 }))
501 })
502 }
503
504 pub fn diagnostics_for_path(
505 &self,
506 path: &Path,
507 ) -> Option<Vec<DiagnosticEntry<Unclipped<PointUtf16>>>> {
508 self.diagnostics.get(path).cloned()
509 }
510
511 pub fn update_diagnostics(
512 &mut self,
513 language_server_id: usize,
514 worktree_path: Arc<Path>,
515 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
516 _: &mut ModelContext<Worktree>,
517 ) -> Result<bool> {
518 self.diagnostics.remove(&worktree_path);
519 let old_summary = self
520 .diagnostic_summaries
521 .remove(&PathKey(worktree_path.clone()))
522 .unwrap_or_default();
523 let new_summary = DiagnosticSummary::new(language_server_id, &diagnostics);
524 if !new_summary.is_empty() {
525 self.diagnostic_summaries
526 .insert(PathKey(worktree_path.clone()), new_summary);
527 self.diagnostics.insert(worktree_path.clone(), diagnostics);
528 }
529
530 let updated = !old_summary.is_empty() || !new_summary.is_empty();
531 if updated {
532 if let Some(share) = self.share.as_ref() {
533 self.client
534 .send(proto::UpdateDiagnosticSummary {
535 project_id: share.project_id,
536 worktree_id: self.id().to_proto(),
537 summary: Some(proto::DiagnosticSummary {
538 path: worktree_path.to_string_lossy().to_string(),
539 language_server_id: language_server_id as u64,
540 error_count: new_summary.error_count as u32,
541 warning_count: new_summary.warning_count as u32,
542 }),
543 })
544 .log_err();
545 }
546 }
547
548 Ok(updated)
549 }
550
551 fn poll_snapshot(&mut self, force: bool, cx: &mut ModelContext<Worktree>) {
552 self.poll_task.take();
553
554 match self.scan_state() {
555 ScanState::Idle => {
556 let new_snapshot = self.background_snapshot.lock().clone();
557 let updated_repos = Self::changed_repos(
558 &self.snapshot.git_repositories,
559 &new_snapshot.git_repositories,
560 );
561 self.snapshot = new_snapshot;
562
563 if let Some(share) = self.share.as_mut() {
564 *share.snapshots_tx.borrow_mut() = self.snapshot.clone();
565 }
566
567 cx.emit(Event::UpdatedEntries);
568
569 if !updated_repos.is_empty() {
570 cx.emit(Event::UpdatedGitRepositories(updated_repos));
571 }
572 }
573
574 ScanState::Initializing => {
575 let is_fake_fs = self.fs.is_fake();
576
577 let new_snapshot = self.background_snapshot.lock().clone();
578 let updated_repos = Self::changed_repos(
579 &self.snapshot.git_repositories,
580 &new_snapshot.git_repositories,
581 );
582 self.snapshot = new_snapshot;
583
584 self.poll_task = Some(cx.spawn_weak(|this, mut cx| async move {
585 if is_fake_fs {
586 #[cfg(any(test, feature = "test-support"))]
587 cx.background().simulate_random_delay().await;
588 } else {
589 smol::Timer::after(Duration::from_millis(100)).await;
590 }
591 if let Some(this) = this.upgrade(&cx) {
592 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
593 }
594 }));
595
596 cx.emit(Event::UpdatedEntries);
597
598 if !updated_repos.is_empty() {
599 cx.emit(Event::UpdatedGitRepositories(updated_repos));
600 }
601 }
602
603 _ => {
604 if force {
605 self.snapshot = self.background_snapshot.lock().clone();
606 }
607 }
608 }
609
610 cx.notify();
611 }
612
613 fn changed_repos(
614 old_repos: &[GitRepositoryEntry],
615 new_repos: &[GitRepositoryEntry],
616 ) -> Vec<GitRepositoryEntry> {
617 fn diff<'a>(
618 a: &'a [GitRepositoryEntry],
619 b: &'a [GitRepositoryEntry],
620 updated: &mut HashMap<&'a Path, GitRepositoryEntry>,
621 ) {
622 for a_repo in a {
623 let matched = b.iter().find(|b_repo| {
624 a_repo.git_dir_path == b_repo.git_dir_path && a_repo.scan_id == b_repo.scan_id
625 });
626
627 if matched.is_none() {
628 updated.insert(a_repo.git_dir_path.as_ref(), a_repo.clone());
629 }
630 }
631 }
632
633 let mut updated = HashMap::<&Path, GitRepositoryEntry>::default();
634
635 diff(old_repos, new_repos, &mut updated);
636 diff(new_repos, old_repos, &mut updated);
637
638 updated.into_values().collect()
639 }
640
641 pub fn scan_complete(&self) -> impl Future<Output = ()> {
642 let mut scan_state_rx = self.last_scan_state_rx.clone();
643 async move {
644 let mut scan_state = Some(scan_state_rx.borrow().clone());
645 while let Some(ScanState::Initializing | ScanState::Updating) = scan_state {
646 scan_state = scan_state_rx.recv().await;
647 }
648 }
649 }
650
651 fn scan_state(&self) -> ScanState {
652 self.last_scan_state_rx.borrow().clone()
653 }
654
655 pub fn snapshot(&self) -> LocalSnapshot {
656 self.snapshot.clone()
657 }
658
659 pub fn metadata_proto(&self) -> proto::WorktreeMetadata {
660 proto::WorktreeMetadata {
661 id: self.id().to_proto(),
662 root_name: self.root_name().to_string(),
663 visible: self.visible,
664 abs_path: self.abs_path().as_os_str().to_string_lossy().into(),
665 }
666 }
667
668 fn load(
669 &self,
670 path: &Path,
671 cx: &mut ModelContext<Worktree>,
672 ) -> Task<Result<(File, String, Option<String>)>> {
673 let handle = cx.handle();
674 let path = Arc::from(path);
675 let abs_path = self.absolutize(&path);
676 let fs = self.fs.clone();
677 let snapshot = self.snapshot();
678
679 cx.spawn(|this, mut cx| async move {
680 let text = fs.load(&abs_path).await?;
681
682 let diff_base = if let Some(repo) = snapshot.repo_for(&path) {
683 if let Ok(repo_relative) = path.strip_prefix(repo.content_path) {
684 let repo_relative = repo_relative.to_owned();
685 cx.background()
686 .spawn(async move { repo.repo.lock().load_index_text(&repo_relative) })
687 .await
688 } else {
689 None
690 }
691 } else {
692 None
693 };
694
695 // Eagerly populate the snapshot with an updated entry for the loaded file
696 let entry = this
697 .update(&mut cx, |this, cx| {
698 this.as_local()
699 .unwrap()
700 .refresh_entry(path, abs_path, None, cx)
701 })
702 .await?;
703
704 Ok((
705 File {
706 entry_id: entry.id,
707 worktree: handle,
708 path: entry.path,
709 mtime: entry.mtime,
710 is_local: true,
711 is_deleted: false,
712 },
713 text,
714 diff_base,
715 ))
716 })
717 }
718
719 pub fn save_buffer_as(
720 &self,
721 buffer_handle: ModelHandle<Buffer>,
722 path: impl Into<Arc<Path>>,
723 cx: &mut ModelContext<Worktree>,
724 ) -> Task<Result<()>> {
725 let buffer = buffer_handle.read(cx);
726 let text = buffer.as_rope().clone();
727 let fingerprint = text.fingerprint();
728 let version = buffer.version();
729 let save = self.write_file(path, text, buffer.line_ending(), cx);
730 let handle = cx.handle();
731 cx.as_mut().spawn(|mut cx| async move {
732 let entry = save.await?;
733 let file = File {
734 entry_id: entry.id,
735 worktree: handle,
736 path: entry.path,
737 mtime: entry.mtime,
738 is_local: true,
739 is_deleted: false,
740 };
741
742 buffer_handle.update(&mut cx, |buffer, cx| {
743 buffer.did_save(version, fingerprint, file.mtime, Some(Arc::new(file)), cx);
744 });
745
746 Ok(())
747 })
748 }
749
750 pub fn create_entry(
751 &self,
752 path: impl Into<Arc<Path>>,
753 is_dir: bool,
754 cx: &mut ModelContext<Worktree>,
755 ) -> Task<Result<Entry>> {
756 self.write_entry_internal(
757 path,
758 if is_dir {
759 None
760 } else {
761 Some(Default::default())
762 },
763 cx,
764 )
765 }
766
767 pub fn write_file(
768 &self,
769 path: impl Into<Arc<Path>>,
770 text: Rope,
771 line_ending: LineEnding,
772 cx: &mut ModelContext<Worktree>,
773 ) -> Task<Result<Entry>> {
774 self.write_entry_internal(path, Some((text, line_ending)), cx)
775 }
776
777 pub fn delete_entry(
778 &self,
779 entry_id: ProjectEntryId,
780 cx: &mut ModelContext<Worktree>,
781 ) -> Option<Task<Result<()>>> {
782 let entry = self.entry_for_id(entry_id)?.clone();
783 let abs_path = self.absolutize(&entry.path);
784 let delete = cx.background().spawn({
785 let fs = self.fs.clone();
786 let abs_path = abs_path;
787 async move {
788 if entry.is_file() {
789 fs.remove_file(&abs_path, Default::default()).await
790 } else {
791 fs.remove_dir(
792 &abs_path,
793 RemoveOptions {
794 recursive: true,
795 ignore_if_not_exists: false,
796 },
797 )
798 .await
799 }
800 }
801 });
802
803 Some(cx.spawn(|this, mut cx| async move {
804 delete.await?;
805 this.update(&mut cx, |this, cx| {
806 let this = this.as_local_mut().unwrap();
807 {
808 let mut snapshot = this.background_snapshot.lock();
809 snapshot.delete_entry(entry_id);
810 }
811 this.poll_snapshot(true, cx);
812 });
813 Ok(())
814 }))
815 }
816
817 pub fn rename_entry(
818 &self,
819 entry_id: ProjectEntryId,
820 new_path: impl Into<Arc<Path>>,
821 cx: &mut ModelContext<Worktree>,
822 ) -> Option<Task<Result<Entry>>> {
823 let old_path = self.entry_for_id(entry_id)?.path.clone();
824 let new_path = new_path.into();
825 let abs_old_path = self.absolutize(&old_path);
826 let abs_new_path = self.absolutize(&new_path);
827 let rename = cx.background().spawn({
828 let fs = self.fs.clone();
829 let abs_new_path = abs_new_path.clone();
830 async move {
831 fs.rename(&abs_old_path, &abs_new_path, Default::default())
832 .await
833 }
834 });
835
836 Some(cx.spawn(|this, mut cx| async move {
837 rename.await?;
838 let entry = this
839 .update(&mut cx, |this, cx| {
840 this.as_local_mut().unwrap().refresh_entry(
841 new_path.clone(),
842 abs_new_path,
843 Some(old_path),
844 cx,
845 )
846 })
847 .await?;
848 Ok(entry)
849 }))
850 }
851
852 pub fn copy_entry(
853 &self,
854 entry_id: ProjectEntryId,
855 new_path: impl Into<Arc<Path>>,
856 cx: &mut ModelContext<Worktree>,
857 ) -> Option<Task<Result<Entry>>> {
858 let old_path = self.entry_for_id(entry_id)?.path.clone();
859 let new_path = new_path.into();
860 let abs_old_path = self.absolutize(&old_path);
861 let abs_new_path = self.absolutize(&new_path);
862 let copy = cx.background().spawn({
863 let fs = self.fs.clone();
864 let abs_new_path = abs_new_path.clone();
865 async move {
866 copy_recursive(
867 fs.as_ref(),
868 &abs_old_path,
869 &abs_new_path,
870 Default::default(),
871 )
872 .await
873 }
874 });
875
876 Some(cx.spawn(|this, mut cx| async move {
877 copy.await?;
878 let entry = this
879 .update(&mut cx, |this, cx| {
880 this.as_local_mut().unwrap().refresh_entry(
881 new_path.clone(),
882 abs_new_path,
883 None,
884 cx,
885 )
886 })
887 .await?;
888 Ok(entry)
889 }))
890 }
891
892 fn write_entry_internal(
893 &self,
894 path: impl Into<Arc<Path>>,
895 text_if_file: Option<(Rope, LineEnding)>,
896 cx: &mut ModelContext<Worktree>,
897 ) -> Task<Result<Entry>> {
898 let path = path.into();
899 let abs_path = self.absolutize(&path);
900 let write = cx.background().spawn({
901 let fs = self.fs.clone();
902 let abs_path = abs_path.clone();
903 async move {
904 if let Some((text, line_ending)) = text_if_file {
905 fs.save(&abs_path, &text, line_ending).await
906 } else {
907 fs.create_dir(&abs_path).await
908 }
909 }
910 });
911
912 cx.spawn(|this, mut cx| async move {
913 write.await?;
914 let entry = this
915 .update(&mut cx, |this, cx| {
916 this.as_local_mut()
917 .unwrap()
918 .refresh_entry(path, abs_path, None, cx)
919 })
920 .await?;
921 Ok(entry)
922 })
923 }
924
925 fn refresh_entry(
926 &self,
927 path: Arc<Path>,
928 abs_path: PathBuf,
929 old_path: Option<Arc<Path>>,
930 cx: &mut ModelContext<Worktree>,
931 ) -> Task<Result<Entry>> {
932 let fs = self.fs.clone();
933 let root_char_bag;
934 let next_entry_id;
935 {
936 let snapshot = self.background_snapshot.lock();
937 root_char_bag = snapshot.root_char_bag;
938 next_entry_id = snapshot.next_entry_id.clone();
939 }
940 cx.spawn_weak(|this, mut cx| async move {
941 let metadata = fs
942 .metadata(&abs_path)
943 .await?
944 .ok_or_else(|| anyhow!("could not read saved file metadata"))?;
945 let this = this
946 .upgrade(&cx)
947 .ok_or_else(|| anyhow!("worktree was dropped"))?;
948 this.update(&mut cx, |this, cx| {
949 let this = this.as_local_mut().unwrap();
950 let inserted_entry;
951 {
952 let mut snapshot = this.background_snapshot.lock();
953 let mut entry = Entry::new(path, &metadata, &next_entry_id, root_char_bag);
954 entry.is_ignored = snapshot
955 .ignore_stack_for_abs_path(&abs_path, entry.is_dir())
956 .is_abs_path_ignored(&abs_path, entry.is_dir());
957 if let Some(old_path) = old_path {
958 snapshot.remove_path(&old_path);
959 }
960 inserted_entry = snapshot.insert_entry(entry, fs.as_ref());
961 snapshot.scan_id += 1;
962 }
963 this.poll_snapshot(true, cx);
964 Ok(inserted_entry)
965 })
966 })
967 }
968
969 pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
970 let (share_tx, share_rx) = oneshot::channel();
971
972 if self.share.is_some() {
973 let _ = share_tx.send(Ok(()));
974 } else {
975 let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
976 let worktree_id = cx.model_id() as u64;
977
978 for (path, summary) in self.diagnostic_summaries.iter() {
979 if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
980 project_id,
981 worktree_id,
982 summary: Some(summary.to_proto(&path.0)),
983 }) {
984 return Task::ready(Err(e));
985 }
986 }
987
988 let maintain_remote_snapshot = cx.background().spawn({
989 let rpc = self.client.clone();
990 async move {
991 let mut prev_snapshot = match snapshots_rx.recv().await {
992 Some(snapshot) => {
993 let update = proto::UpdateWorktree {
994 project_id,
995 worktree_id,
996 abs_path: snapshot.abs_path().to_string_lossy().into(),
997 root_name: snapshot.root_name().to_string(),
998 updated_entries: snapshot
999 .entries_by_path
1000 .iter()
1001 .map(Into::into)
1002 .collect(),
1003 removed_entries: Default::default(),
1004 scan_id: snapshot.scan_id as u64,
1005 is_last_update: true,
1006 };
1007 if let Err(error) = send_worktree_update(&rpc, update).await {
1008 let _ = share_tx.send(Err(error));
1009 return Err(anyhow!("failed to send initial update worktree"));
1010 } else {
1011 let _ = share_tx.send(Ok(()));
1012 snapshot
1013 }
1014 }
1015 None => {
1016 share_tx
1017 .send(Err(anyhow!("worktree dropped before share completed")))
1018 .ok();
1019 return Err(anyhow!("failed to send initial update worktree"));
1020 }
1021 };
1022
1023 while let Some(snapshot) = snapshots_rx.recv().await {
1024 send_worktree_update(
1025 &rpc,
1026 snapshot.build_update(&prev_snapshot, project_id, worktree_id, true),
1027 )
1028 .await?;
1029 prev_snapshot = snapshot;
1030 }
1031
1032 Ok::<_, anyhow::Error>(())
1033 }
1034 .log_err()
1035 });
1036
1037 self.share = Some(ShareState {
1038 project_id,
1039 snapshots_tx,
1040 _maintain_remote_snapshot: maintain_remote_snapshot,
1041 });
1042 }
1043
1044 cx.foreground().spawn(async move {
1045 share_rx
1046 .await
1047 .unwrap_or_else(|_| Err(anyhow!("share ended")))
1048 })
1049 }
1050
1051 pub fn unshare(&mut self) {
1052 self.share.take();
1053 }
1054
1055 pub fn is_shared(&self) -> bool {
1056 self.share.is_some()
1057 }
1058}
1059
1060impl RemoteWorktree {
1061 fn snapshot(&self) -> Snapshot {
1062 self.snapshot.clone()
1063 }
1064
1065 fn poll_snapshot(&mut self, cx: &mut ModelContext<Worktree>) {
1066 self.snapshot = self.background_snapshot.lock().clone();
1067 cx.emit(Event::UpdatedEntries);
1068 cx.notify();
1069 }
1070
1071 pub fn disconnected_from_host(&mut self) {
1072 self.updates_tx.take();
1073 self.snapshot_subscriptions.clear();
1074 self.disconnected = true;
1075 }
1076
1077 pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
1078 if let Some(updates_tx) = &self.updates_tx {
1079 updates_tx
1080 .unbounded_send(update)
1081 .expect("consumer runs to completion");
1082 }
1083 }
1084
1085 fn observed_snapshot(&self, scan_id: usize) -> bool {
1086 self.scan_id > scan_id || (self.scan_id == scan_id && self.is_complete)
1087 }
1088
1089 fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = Result<()>> {
1090 let (tx, rx) = oneshot::channel();
1091 if self.observed_snapshot(scan_id) {
1092 let _ = tx.send(());
1093 } else if self.disconnected {
1094 drop(tx);
1095 } else {
1096 match self
1097 .snapshot_subscriptions
1098 .binary_search_by_key(&scan_id, |probe| probe.0)
1099 {
1100 Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)),
1101 }
1102 }
1103
1104 async move {
1105 rx.await?;
1106 Ok(())
1107 }
1108 }
1109
1110 pub fn update_diagnostic_summary(
1111 &mut self,
1112 path: Arc<Path>,
1113 summary: &proto::DiagnosticSummary,
1114 ) {
1115 let summary = DiagnosticSummary {
1116 language_server_id: summary.language_server_id as usize,
1117 error_count: summary.error_count as usize,
1118 warning_count: summary.warning_count as usize,
1119 };
1120 if summary.is_empty() {
1121 self.diagnostic_summaries.remove(&PathKey(path));
1122 } else {
1123 self.diagnostic_summaries.insert(PathKey(path), summary);
1124 }
1125 }
1126
1127 pub fn insert_entry(
1128 &mut self,
1129 entry: proto::Entry,
1130 scan_id: usize,
1131 cx: &mut ModelContext<Worktree>,
1132 ) -> Task<Result<Entry>> {
1133 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1134 cx.spawn(|this, mut cx| async move {
1135 wait_for_snapshot.await?;
1136 this.update(&mut cx, |worktree, _| {
1137 let worktree = worktree.as_remote_mut().unwrap();
1138 let mut snapshot = worktree.background_snapshot.lock();
1139 let entry = snapshot.insert_entry(entry);
1140 worktree.snapshot = snapshot.clone();
1141 entry
1142 })
1143 })
1144 }
1145
1146 pub(crate) fn delete_entry(
1147 &mut self,
1148 id: ProjectEntryId,
1149 scan_id: usize,
1150 cx: &mut ModelContext<Worktree>,
1151 ) -> Task<Result<()>> {
1152 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1153 cx.spawn(|this, mut cx| async move {
1154 wait_for_snapshot.await?;
1155 this.update(&mut cx, |worktree, _| {
1156 let worktree = worktree.as_remote_mut().unwrap();
1157 let mut snapshot = worktree.background_snapshot.lock();
1158 snapshot.delete_entry(id);
1159 worktree.snapshot = snapshot.clone();
1160 });
1161 Ok(())
1162 })
1163 }
1164}
1165
1166impl Snapshot {
1167 pub fn id(&self) -> WorktreeId {
1168 self.id
1169 }
1170
1171 pub fn abs_path(&self) -> &Arc<Path> {
1172 &self.abs_path
1173 }
1174
1175 pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
1176 self.entries_by_id.get(&entry_id, &()).is_some()
1177 }
1178
1179 pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
1180 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1181 let old_entry = self.entries_by_id.insert_or_replace(
1182 PathEntry {
1183 id: entry.id,
1184 path: entry.path.clone(),
1185 is_ignored: entry.is_ignored,
1186 scan_id: 0,
1187 },
1188 &(),
1189 );
1190 if let Some(old_entry) = old_entry {
1191 self.entries_by_path.remove(&PathKey(old_entry.path), &());
1192 }
1193 self.entries_by_path.insert_or_replace(entry.clone(), &());
1194 Ok(entry)
1195 }
1196
1197 fn delete_entry(&mut self, entry_id: ProjectEntryId) -> bool {
1198 if let Some(removed_entry) = self.entries_by_id.remove(&entry_id, &()) {
1199 self.entries_by_path = {
1200 let mut cursor = self.entries_by_path.cursor();
1201 let mut new_entries_by_path =
1202 cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &());
1203 while let Some(entry) = cursor.item() {
1204 if entry.path.starts_with(&removed_entry.path) {
1205 self.entries_by_id.remove(&entry.id, &());
1206 cursor.next(&());
1207 } else {
1208 break;
1209 }
1210 }
1211 new_entries_by_path.push_tree(cursor.suffix(&()), &());
1212 new_entries_by_path
1213 };
1214
1215 true
1216 } else {
1217 false
1218 }
1219 }
1220
1221 pub(crate) fn apply_remote_update(&mut self, update: proto::UpdateWorktree) -> Result<()> {
1222 let mut entries_by_path_edits = Vec::new();
1223 let mut entries_by_id_edits = Vec::new();
1224 for entry_id in update.removed_entries {
1225 let entry = self
1226 .entry_for_id(ProjectEntryId::from_proto(entry_id))
1227 .ok_or_else(|| anyhow!("unknown entry {}", entry_id))?;
1228 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
1229 entries_by_id_edits.push(Edit::Remove(entry.id));
1230 }
1231
1232 for entry in update.updated_entries {
1233 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1234 if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
1235 entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
1236 }
1237 entries_by_id_edits.push(Edit::Insert(PathEntry {
1238 id: entry.id,
1239 path: entry.path.clone(),
1240 is_ignored: entry.is_ignored,
1241 scan_id: 0,
1242 }));
1243 entries_by_path_edits.push(Edit::Insert(entry));
1244 }
1245
1246 self.entries_by_path.edit(entries_by_path_edits, &());
1247 self.entries_by_id.edit(entries_by_id_edits, &());
1248 self.scan_id = update.scan_id as usize;
1249 self.is_complete = update.is_last_update;
1250
1251 Ok(())
1252 }
1253
1254 pub fn file_count(&self) -> usize {
1255 self.entries_by_path.summary().file_count
1256 }
1257
1258 pub fn visible_file_count(&self) -> usize {
1259 self.entries_by_path.summary().visible_file_count
1260 }
1261
1262 fn traverse_from_offset(
1263 &self,
1264 include_dirs: bool,
1265 include_ignored: bool,
1266 start_offset: usize,
1267 ) -> Traversal {
1268 let mut cursor = self.entries_by_path.cursor();
1269 cursor.seek(
1270 &TraversalTarget::Count {
1271 count: start_offset,
1272 include_dirs,
1273 include_ignored,
1274 },
1275 Bias::Right,
1276 &(),
1277 );
1278 Traversal {
1279 cursor,
1280 include_dirs,
1281 include_ignored,
1282 }
1283 }
1284
1285 fn traverse_from_path(
1286 &self,
1287 include_dirs: bool,
1288 include_ignored: bool,
1289 path: &Path,
1290 ) -> Traversal {
1291 let mut cursor = self.entries_by_path.cursor();
1292 cursor.seek(&TraversalTarget::Path(path), Bias::Left, &());
1293 Traversal {
1294 cursor,
1295 include_dirs,
1296 include_ignored,
1297 }
1298 }
1299
1300 pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
1301 self.traverse_from_offset(false, include_ignored, start)
1302 }
1303
1304 pub fn entries(&self, include_ignored: bool) -> Traversal {
1305 self.traverse_from_offset(true, include_ignored, 0)
1306 }
1307
1308 pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
1309 let empty_path = Path::new("");
1310 self.entries_by_path
1311 .cursor::<()>()
1312 .filter(move |entry| entry.path.as_ref() != empty_path)
1313 .map(|entry| &entry.path)
1314 }
1315
1316 fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
1317 let mut cursor = self.entries_by_path.cursor();
1318 cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &());
1319 let traversal = Traversal {
1320 cursor,
1321 include_dirs: true,
1322 include_ignored: true,
1323 };
1324 ChildEntriesIter {
1325 traversal,
1326 parent_path,
1327 }
1328 }
1329
1330 pub fn root_entry(&self) -> Option<&Entry> {
1331 self.entry_for_path("")
1332 }
1333
1334 pub fn root_name(&self) -> &str {
1335 &self.root_name
1336 }
1337
1338 pub fn scan_id(&self) -> usize {
1339 self.scan_id
1340 }
1341
1342 pub fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
1343 let path = path.as_ref();
1344 self.traverse_from_path(true, true, path)
1345 .entry()
1346 .and_then(|entry| {
1347 if entry.path.as_ref() == path {
1348 Some(entry)
1349 } else {
1350 None
1351 }
1352 })
1353 }
1354
1355 pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> {
1356 let entry = self.entries_by_id.get(&id, &())?;
1357 self.entry_for_path(&entry.path)
1358 }
1359
1360 pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
1361 self.entry_for_path(path.as_ref()).map(|e| e.inode)
1362 }
1363}
1364
1365impl LocalSnapshot {
1366 pub fn extension_counts(&self) -> &HashMap<OsString, usize> {
1367 &self.extension_counts
1368 }
1369
1370 // Gives the most specific git repository for a given path
1371 pub(crate) fn repo_for(&self, path: &Path) -> Option<GitRepositoryEntry> {
1372 self.git_repositories
1373 .iter()
1374 .rev() //git_repository is ordered lexicographically
1375 .find(|repo| repo.manages(path))
1376 .cloned()
1377 }
1378
1379 pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut GitRepositoryEntry> {
1380 // Git repositories cannot be nested, so we don't need to reverse the order
1381 self.git_repositories
1382 .iter_mut()
1383 .find(|repo| repo.in_dot_git(path))
1384 }
1385
1386 #[cfg(test)]
1387 pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree {
1388 let root_name = self.root_name.clone();
1389 proto::UpdateWorktree {
1390 project_id,
1391 worktree_id: self.id().to_proto(),
1392 abs_path: self.abs_path().to_string_lossy().into(),
1393 root_name,
1394 updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
1395 removed_entries: Default::default(),
1396 scan_id: self.scan_id as u64,
1397 is_last_update: true,
1398 }
1399 }
1400
1401 pub(crate) fn build_update(
1402 &self,
1403 other: &Self,
1404 project_id: u64,
1405 worktree_id: u64,
1406 include_ignored: bool,
1407 ) -> proto::UpdateWorktree {
1408 let mut updated_entries = Vec::new();
1409 let mut removed_entries = Vec::new();
1410 let mut self_entries = self
1411 .entries_by_id
1412 .cursor::<()>()
1413 .filter(|e| include_ignored || !e.is_ignored)
1414 .peekable();
1415 let mut other_entries = other
1416 .entries_by_id
1417 .cursor::<()>()
1418 .filter(|e| include_ignored || !e.is_ignored)
1419 .peekable();
1420 loop {
1421 match (self_entries.peek(), other_entries.peek()) {
1422 (Some(self_entry), Some(other_entry)) => {
1423 match Ord::cmp(&self_entry.id, &other_entry.id) {
1424 Ordering::Less => {
1425 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1426 updated_entries.push(entry);
1427 self_entries.next();
1428 }
1429 Ordering::Equal => {
1430 if self_entry.scan_id != other_entry.scan_id {
1431 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1432 updated_entries.push(entry);
1433 }
1434
1435 self_entries.next();
1436 other_entries.next();
1437 }
1438 Ordering::Greater => {
1439 removed_entries.push(other_entry.id.to_proto());
1440 other_entries.next();
1441 }
1442 }
1443 }
1444 (Some(self_entry), None) => {
1445 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1446 updated_entries.push(entry);
1447 self_entries.next();
1448 }
1449 (None, Some(other_entry)) => {
1450 removed_entries.push(other_entry.id.to_proto());
1451 other_entries.next();
1452 }
1453 (None, None) => break,
1454 }
1455 }
1456
1457 proto::UpdateWorktree {
1458 project_id,
1459 worktree_id,
1460 abs_path: self.abs_path().to_string_lossy().into(),
1461 root_name: self.root_name().to_string(),
1462 updated_entries,
1463 removed_entries,
1464 scan_id: self.scan_id as u64,
1465 is_last_update: true,
1466 }
1467 }
1468
1469 fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
1470 if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) {
1471 let abs_path = self.abs_path.join(&entry.path);
1472 match smol::block_on(build_gitignore(&abs_path, fs)) {
1473 Ok(ignore) => {
1474 self.ignores_by_parent_abs_path.insert(
1475 abs_path.parent().unwrap().into(),
1476 (Arc::new(ignore), self.scan_id),
1477 );
1478 }
1479 Err(error) => {
1480 log::error!(
1481 "error loading .gitignore file {:?} - {:?}",
1482 &entry.path,
1483 error
1484 );
1485 }
1486 }
1487 }
1488
1489 self.reuse_entry_id(&mut entry);
1490
1491 if entry.kind == EntryKind::PendingDir {
1492 if let Some(existing_entry) =
1493 self.entries_by_path.get(&PathKey(entry.path.clone()), &())
1494 {
1495 entry.kind = existing_entry.kind;
1496 }
1497 }
1498
1499 self.entries_by_path.insert_or_replace(entry.clone(), &());
1500 let scan_id = self.scan_id;
1501 let removed_entry = self.entries_by_id.insert_or_replace(
1502 PathEntry {
1503 id: entry.id,
1504 path: entry.path.clone(),
1505 is_ignored: entry.is_ignored,
1506 scan_id,
1507 },
1508 &(),
1509 );
1510
1511 if let Some(removed_entry) = removed_entry {
1512 self.dec_extension_count(&removed_entry.path, removed_entry.is_ignored);
1513 }
1514 self.inc_extension_count(&entry.path, entry.is_ignored);
1515
1516 entry
1517 }
1518
1519 fn populate_dir(
1520 &mut self,
1521 parent_path: Arc<Path>,
1522 entries: impl IntoIterator<Item = Entry>,
1523 ignore: Option<Arc<Gitignore>>,
1524 fs: &dyn Fs,
1525 ) {
1526 let mut parent_entry = if let Some(parent_entry) =
1527 self.entries_by_path.get(&PathKey(parent_path.clone()), &())
1528 {
1529 parent_entry.clone()
1530 } else {
1531 log::warn!(
1532 "populating a directory {:?} that has been removed",
1533 parent_path
1534 );
1535 return;
1536 };
1537
1538 if let Some(ignore) = ignore {
1539 self.ignores_by_parent_abs_path.insert(
1540 self.abs_path.join(&parent_path).into(),
1541 (ignore, self.scan_id),
1542 );
1543 }
1544 if matches!(parent_entry.kind, EntryKind::PendingDir) {
1545 parent_entry.kind = EntryKind::Dir;
1546 } else {
1547 unreachable!();
1548 }
1549
1550 if parent_path.file_name() == Some(&DOT_GIT) {
1551 let abs_path = self.abs_path.join(&parent_path);
1552 let content_path: Arc<Path> = parent_path.parent().unwrap().into();
1553 if let Err(ix) = self
1554 .git_repositories
1555 .binary_search_by_key(&&content_path, |repo| &repo.content_path)
1556 {
1557 if let Some(repo) = fs.open_repo(abs_path.as_path()) {
1558 self.git_repositories.insert(
1559 ix,
1560 GitRepositoryEntry {
1561 repo,
1562 scan_id: 0,
1563 content_path,
1564 git_dir_path: parent_path,
1565 },
1566 );
1567 }
1568 }
1569 }
1570
1571 let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
1572 let mut entries_by_id_edits = Vec::new();
1573
1574 for mut entry in entries {
1575 self.reuse_entry_id(&mut entry);
1576 self.inc_extension_count(&entry.path, entry.is_ignored);
1577 entries_by_id_edits.push(Edit::Insert(PathEntry {
1578 id: entry.id,
1579 path: entry.path.clone(),
1580 is_ignored: entry.is_ignored,
1581 scan_id: self.scan_id,
1582 }));
1583 entries_by_path_edits.push(Edit::Insert(entry));
1584 }
1585
1586 self.entries_by_path.edit(entries_by_path_edits, &());
1587 let removed_entries = self.entries_by_id.edit(entries_by_id_edits, &());
1588
1589 for removed_entry in removed_entries {
1590 self.dec_extension_count(&removed_entry.path, removed_entry.is_ignored);
1591 }
1592 }
1593
1594 fn inc_extension_count(&mut self, path: &Path, ignored: bool) {
1595 if !ignored {
1596 if let Some(extension) = path.extension() {
1597 if let Some(count) = self.extension_counts.get_mut(extension) {
1598 *count += 1;
1599 } else {
1600 self.extension_counts.insert(extension.into(), 1);
1601 }
1602 }
1603 }
1604 }
1605
1606 fn dec_extension_count(&mut self, path: &Path, ignored: bool) {
1607 if !ignored {
1608 if let Some(extension) = path.extension() {
1609 if let Some(count) = self.extension_counts.get_mut(extension) {
1610 *count -= 1;
1611 }
1612 }
1613 }
1614 }
1615
1616 fn reuse_entry_id(&mut self, entry: &mut Entry) {
1617 if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
1618 entry.id = removed_entry_id;
1619 } else if let Some(existing_entry) = self.entry_for_path(&entry.path) {
1620 entry.id = existing_entry.id;
1621 }
1622 }
1623
1624 fn remove_path(&mut self, path: &Path) {
1625 let mut new_entries;
1626 let removed_entries;
1627 {
1628 let mut cursor = self.entries_by_path.cursor::<TraversalProgress>();
1629 new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
1630 removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
1631 new_entries.push_tree(cursor.suffix(&()), &());
1632 }
1633 self.entries_by_path = new_entries;
1634
1635 let mut entries_by_id_edits = Vec::new();
1636 for entry in removed_entries.cursor::<()>() {
1637 let removed_entry_id = self
1638 .removed_entry_ids
1639 .entry(entry.inode)
1640 .or_insert(entry.id);
1641 *removed_entry_id = cmp::max(*removed_entry_id, entry.id);
1642 entries_by_id_edits.push(Edit::Remove(entry.id));
1643 self.dec_extension_count(&entry.path, entry.is_ignored);
1644 }
1645 self.entries_by_id.edit(entries_by_id_edits, &());
1646
1647 if path.file_name() == Some(&GITIGNORE) {
1648 let abs_parent_path = self.abs_path.join(path.parent().unwrap());
1649 if let Some((_, scan_id)) = self
1650 .ignores_by_parent_abs_path
1651 .get_mut(abs_parent_path.as_path())
1652 {
1653 *scan_id = self.snapshot.scan_id;
1654 }
1655 } else if path.file_name() == Some(&DOT_GIT) {
1656 let parent_path = path.parent().unwrap();
1657 if let Ok(ix) = self
1658 .git_repositories
1659 .binary_search_by_key(&parent_path, |repo| repo.git_dir_path.as_ref())
1660 {
1661 self.git_repositories[ix].scan_id = self.snapshot.scan_id;
1662 }
1663 }
1664 }
1665
1666 fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
1667 let mut inodes = TreeSet::default();
1668 for ancestor in path.ancestors().skip(1) {
1669 if let Some(entry) = self.entry_for_path(ancestor) {
1670 inodes.insert(entry.inode);
1671 }
1672 }
1673 inodes
1674 }
1675
1676 fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
1677 let mut new_ignores = Vec::new();
1678 for ancestor in abs_path.ancestors().skip(1) {
1679 if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
1680 new_ignores.push((ancestor, Some(ignore.clone())));
1681 } else {
1682 new_ignores.push((ancestor, None));
1683 }
1684 }
1685
1686 let mut ignore_stack = IgnoreStack::none();
1687 for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
1688 if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
1689 ignore_stack = IgnoreStack::all();
1690 break;
1691 } else if let Some(ignore) = ignore {
1692 ignore_stack = ignore_stack.append(parent_abs_path.into(), ignore);
1693 }
1694 }
1695
1696 if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
1697 ignore_stack = IgnoreStack::all();
1698 }
1699
1700 ignore_stack
1701 }
1702
1703 pub fn git_repo_entries(&self) -> &[GitRepositoryEntry] {
1704 &self.git_repositories
1705 }
1706}
1707
1708impl GitRepositoryEntry {
1709 // Note that these paths should be relative to the worktree root.
1710 pub(crate) fn manages(&self, path: &Path) -> bool {
1711 path.starts_with(self.content_path.as_ref())
1712 }
1713
1714 // Note that theis path should be relative to the worktree root.
1715 pub(crate) fn in_dot_git(&self, path: &Path) -> bool {
1716 path.starts_with(self.git_dir_path.as_ref())
1717 }
1718}
1719
1720async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
1721 let contents = fs.load(abs_path).await?;
1722 let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
1723 let mut builder = GitignoreBuilder::new(parent);
1724 for line in contents.lines() {
1725 builder.add_line(Some(abs_path.into()), line)?;
1726 }
1727 Ok(builder.build()?)
1728}
1729
1730impl WorktreeId {
1731 pub fn from_usize(handle_id: usize) -> Self {
1732 Self(handle_id)
1733 }
1734
1735 pub(crate) fn from_proto(id: u64) -> Self {
1736 Self(id as usize)
1737 }
1738
1739 pub fn to_proto(&self) -> u64 {
1740 self.0 as u64
1741 }
1742
1743 pub fn to_usize(&self) -> usize {
1744 self.0
1745 }
1746}
1747
1748impl fmt::Display for WorktreeId {
1749 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1750 self.0.fmt(f)
1751 }
1752}
1753
1754impl Deref for Worktree {
1755 type Target = Snapshot;
1756
1757 fn deref(&self) -> &Self::Target {
1758 match self {
1759 Worktree::Local(worktree) => &worktree.snapshot,
1760 Worktree::Remote(worktree) => &worktree.snapshot,
1761 }
1762 }
1763}
1764
1765impl Deref for LocalWorktree {
1766 type Target = LocalSnapshot;
1767
1768 fn deref(&self) -> &Self::Target {
1769 &self.snapshot
1770 }
1771}
1772
1773impl Deref for RemoteWorktree {
1774 type Target = Snapshot;
1775
1776 fn deref(&self) -> &Self::Target {
1777 &self.snapshot
1778 }
1779}
1780
1781impl fmt::Debug for LocalWorktree {
1782 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1783 self.snapshot.fmt(f)
1784 }
1785}
1786
1787impl fmt::Debug for Snapshot {
1788 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1789 struct EntriesById<'a>(&'a SumTree<PathEntry>);
1790 struct EntriesByPath<'a>(&'a SumTree<Entry>);
1791
1792 impl<'a> fmt::Debug for EntriesByPath<'a> {
1793 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1794 f.debug_map()
1795 .entries(self.0.iter().map(|entry| (&entry.path, entry.id)))
1796 .finish()
1797 }
1798 }
1799
1800 impl<'a> fmt::Debug for EntriesById<'a> {
1801 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1802 f.debug_list().entries(self.0.iter()).finish()
1803 }
1804 }
1805
1806 f.debug_struct("Snapshot")
1807 .field("id", &self.id)
1808 .field("root_name", &self.root_name)
1809 .field("entries_by_path", &EntriesByPath(&self.entries_by_path))
1810 .field("entries_by_id", &EntriesById(&self.entries_by_id))
1811 .finish()
1812 }
1813}
1814
1815#[derive(Clone, PartialEq)]
1816pub struct File {
1817 pub worktree: ModelHandle<Worktree>,
1818 pub path: Arc<Path>,
1819 pub mtime: SystemTime,
1820 pub(crate) entry_id: ProjectEntryId,
1821 pub(crate) is_local: bool,
1822 pub(crate) is_deleted: bool,
1823}
1824
1825impl language::File for File {
1826 fn as_local(&self) -> Option<&dyn language::LocalFile> {
1827 if self.is_local {
1828 Some(self)
1829 } else {
1830 None
1831 }
1832 }
1833
1834 fn mtime(&self) -> SystemTime {
1835 self.mtime
1836 }
1837
1838 fn path(&self) -> &Arc<Path> {
1839 &self.path
1840 }
1841
1842 fn full_path(&self, cx: &AppContext) -> PathBuf {
1843 let mut full_path = PathBuf::new();
1844 let worktree = self.worktree.read(cx);
1845
1846 if worktree.is_visible() {
1847 full_path.push(worktree.root_name());
1848 } else {
1849 let path = worktree.abs_path();
1850
1851 if worktree.is_local() && path.starts_with(cx.global::<HomeDir>().as_path()) {
1852 full_path.push("~");
1853 full_path.push(path.strip_prefix(cx.global::<HomeDir>().as_path()).unwrap());
1854 } else {
1855 full_path.push(path)
1856 }
1857 }
1858
1859 if self.path.components().next().is_some() {
1860 full_path.push(&self.path);
1861 }
1862
1863 full_path
1864 }
1865
1866 /// Returns the last component of this handle's absolute path. If this handle refers to the root
1867 /// of its worktree, then this method will return the name of the worktree itself.
1868 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr {
1869 self.path
1870 .file_name()
1871 .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name))
1872 }
1873
1874 fn is_deleted(&self) -> bool {
1875 self.is_deleted
1876 }
1877
1878 fn save(
1879 &self,
1880 buffer_id: u64,
1881 text: Rope,
1882 version: clock::Global,
1883 line_ending: LineEnding,
1884 cx: &mut MutableAppContext,
1885 ) -> Task<Result<(clock::Global, String, SystemTime)>> {
1886 self.worktree.update(cx, |worktree, cx| match worktree {
1887 Worktree::Local(worktree) => {
1888 let rpc = worktree.client.clone();
1889 let project_id = worktree.share.as_ref().map(|share| share.project_id);
1890 let fingerprint = text.fingerprint();
1891 let save = worktree.write_file(self.path.clone(), text, line_ending, cx);
1892 cx.background().spawn(async move {
1893 let entry = save.await?;
1894 if let Some(project_id) = project_id {
1895 rpc.send(proto::BufferSaved {
1896 project_id,
1897 buffer_id,
1898 version: serialize_version(&version),
1899 mtime: Some(entry.mtime.into()),
1900 fingerprint: fingerprint.clone(),
1901 })?;
1902 }
1903 Ok((version, fingerprint, entry.mtime))
1904 })
1905 }
1906 Worktree::Remote(worktree) => {
1907 let rpc = worktree.client.clone();
1908 let project_id = worktree.project_id;
1909 cx.foreground().spawn(async move {
1910 let response = rpc
1911 .request(proto::SaveBuffer {
1912 project_id,
1913 buffer_id,
1914 version: serialize_version(&version),
1915 })
1916 .await?;
1917 let version = deserialize_version(response.version);
1918 let mtime = response
1919 .mtime
1920 .ok_or_else(|| anyhow!("missing mtime"))?
1921 .into();
1922 Ok((version, response.fingerprint, mtime))
1923 })
1924 }
1925 })
1926 }
1927
1928 fn as_any(&self) -> &dyn Any {
1929 self
1930 }
1931
1932 fn to_proto(&self) -> rpc::proto::File {
1933 rpc::proto::File {
1934 worktree_id: self.worktree.id() as u64,
1935 entry_id: self.entry_id.to_proto(),
1936 path: self.path.to_string_lossy().into(),
1937 mtime: Some(self.mtime.into()),
1938 is_deleted: self.is_deleted,
1939 }
1940 }
1941}
1942
1943impl language::LocalFile for File {
1944 fn abs_path(&self, cx: &AppContext) -> PathBuf {
1945 self.worktree
1946 .read(cx)
1947 .as_local()
1948 .unwrap()
1949 .abs_path
1950 .join(&self.path)
1951 }
1952
1953 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
1954 let worktree = self.worktree.read(cx).as_local().unwrap();
1955 let abs_path = worktree.absolutize(&self.path);
1956 let fs = worktree.fs.clone();
1957 cx.background()
1958 .spawn(async move { fs.load(&abs_path).await })
1959 }
1960
1961 fn buffer_reloaded(
1962 &self,
1963 buffer_id: u64,
1964 version: &clock::Global,
1965 fingerprint: String,
1966 line_ending: LineEnding,
1967 mtime: SystemTime,
1968 cx: &mut MutableAppContext,
1969 ) {
1970 let worktree = self.worktree.read(cx).as_local().unwrap();
1971 if let Some(project_id) = worktree.share.as_ref().map(|share| share.project_id) {
1972 worktree
1973 .client
1974 .send(proto::BufferReloaded {
1975 project_id,
1976 buffer_id,
1977 version: serialize_version(version),
1978 mtime: Some(mtime.into()),
1979 fingerprint,
1980 line_ending: serialize_line_ending(line_ending) as i32,
1981 })
1982 .log_err();
1983 }
1984 }
1985}
1986
1987impl File {
1988 pub fn from_proto(
1989 proto: rpc::proto::File,
1990 worktree: ModelHandle<Worktree>,
1991 cx: &AppContext,
1992 ) -> Result<Self> {
1993 let worktree_id = worktree
1994 .read(cx)
1995 .as_remote()
1996 .ok_or_else(|| anyhow!("not remote"))?
1997 .id();
1998
1999 if worktree_id.to_proto() != proto.worktree_id {
2000 return Err(anyhow!("worktree id does not match file"));
2001 }
2002
2003 Ok(Self {
2004 worktree,
2005 path: Path::new(&proto.path).into(),
2006 mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
2007 entry_id: ProjectEntryId::from_proto(proto.entry_id),
2008 is_local: false,
2009 is_deleted: proto.is_deleted,
2010 })
2011 }
2012
2013 pub fn from_dyn(file: Option<&dyn language::File>) -> Option<&Self> {
2014 file.and_then(|f| f.as_any().downcast_ref())
2015 }
2016
2017 pub fn worktree_id(&self, cx: &AppContext) -> WorktreeId {
2018 self.worktree.read(cx).id()
2019 }
2020
2021 pub fn project_entry_id(&self, _: &AppContext) -> Option<ProjectEntryId> {
2022 if self.is_deleted {
2023 None
2024 } else {
2025 Some(self.entry_id)
2026 }
2027 }
2028}
2029
2030#[derive(Clone, Debug, PartialEq, Eq)]
2031pub struct Entry {
2032 pub id: ProjectEntryId,
2033 pub kind: EntryKind,
2034 pub path: Arc<Path>,
2035 pub inode: u64,
2036 pub mtime: SystemTime,
2037 pub is_symlink: bool,
2038 pub is_ignored: bool,
2039}
2040
2041#[derive(Clone, Copy, Debug, PartialEq, Eq)]
2042pub enum EntryKind {
2043 PendingDir,
2044 Dir,
2045 File(CharBag),
2046}
2047
2048impl Entry {
2049 fn new(
2050 path: Arc<Path>,
2051 metadata: &fs::Metadata,
2052 next_entry_id: &AtomicUsize,
2053 root_char_bag: CharBag,
2054 ) -> Self {
2055 Self {
2056 id: ProjectEntryId::new(next_entry_id),
2057 kind: if metadata.is_dir {
2058 EntryKind::PendingDir
2059 } else {
2060 EntryKind::File(char_bag_for_path(root_char_bag, &path))
2061 },
2062 path,
2063 inode: metadata.inode,
2064 mtime: metadata.mtime,
2065 is_symlink: metadata.is_symlink,
2066 is_ignored: false,
2067 }
2068 }
2069
2070 pub fn is_dir(&self) -> bool {
2071 matches!(self.kind, EntryKind::Dir | EntryKind::PendingDir)
2072 }
2073
2074 pub fn is_file(&self) -> bool {
2075 matches!(self.kind, EntryKind::File(_))
2076 }
2077}
2078
2079impl sum_tree::Item for Entry {
2080 type Summary = EntrySummary;
2081
2082 fn summary(&self) -> Self::Summary {
2083 let visible_count = if self.is_ignored { 0 } else { 1 };
2084 let file_count;
2085 let visible_file_count;
2086 if self.is_file() {
2087 file_count = 1;
2088 visible_file_count = visible_count;
2089 } else {
2090 file_count = 0;
2091 visible_file_count = 0;
2092 }
2093
2094 EntrySummary {
2095 max_path: self.path.clone(),
2096 count: 1,
2097 visible_count,
2098 file_count,
2099 visible_file_count,
2100 }
2101 }
2102}
2103
2104impl sum_tree::KeyedItem for Entry {
2105 type Key = PathKey;
2106
2107 fn key(&self) -> Self::Key {
2108 PathKey(self.path.clone())
2109 }
2110}
2111
2112#[derive(Clone, Debug)]
2113pub struct EntrySummary {
2114 max_path: Arc<Path>,
2115 count: usize,
2116 visible_count: usize,
2117 file_count: usize,
2118 visible_file_count: usize,
2119}
2120
2121impl Default for EntrySummary {
2122 fn default() -> Self {
2123 Self {
2124 max_path: Arc::from(Path::new("")),
2125 count: 0,
2126 visible_count: 0,
2127 file_count: 0,
2128 visible_file_count: 0,
2129 }
2130 }
2131}
2132
2133impl sum_tree::Summary for EntrySummary {
2134 type Context = ();
2135
2136 fn add_summary(&mut self, rhs: &Self, _: &()) {
2137 self.max_path = rhs.max_path.clone();
2138 self.count += rhs.count;
2139 self.visible_count += rhs.visible_count;
2140 self.file_count += rhs.file_count;
2141 self.visible_file_count += rhs.visible_file_count;
2142 }
2143}
2144
2145#[derive(Clone, Debug)]
2146struct PathEntry {
2147 id: ProjectEntryId,
2148 path: Arc<Path>,
2149 is_ignored: bool,
2150 scan_id: usize,
2151}
2152
2153impl sum_tree::Item for PathEntry {
2154 type Summary = PathEntrySummary;
2155
2156 fn summary(&self) -> Self::Summary {
2157 PathEntrySummary { max_id: self.id }
2158 }
2159}
2160
2161impl sum_tree::KeyedItem for PathEntry {
2162 type Key = ProjectEntryId;
2163
2164 fn key(&self) -> Self::Key {
2165 self.id
2166 }
2167}
2168
2169#[derive(Clone, Debug, Default)]
2170struct PathEntrySummary {
2171 max_id: ProjectEntryId,
2172}
2173
2174impl sum_tree::Summary for PathEntrySummary {
2175 type Context = ();
2176
2177 fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
2178 self.max_id = summary.max_id;
2179 }
2180}
2181
2182impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
2183 fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) {
2184 *self = summary.max_id;
2185 }
2186}
2187
2188#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
2189pub struct PathKey(Arc<Path>);
2190
2191impl Default for PathKey {
2192 fn default() -> Self {
2193 Self(Path::new("").into())
2194 }
2195}
2196
2197impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
2198 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2199 self.0 = summary.max_path.clone();
2200 }
2201}
2202
2203struct BackgroundScanner {
2204 fs: Arc<dyn Fs>,
2205 snapshot: Arc<Mutex<LocalSnapshot>>,
2206 notify: UnboundedSender<ScanState>,
2207 executor: Arc<executor::Background>,
2208}
2209
2210impl BackgroundScanner {
2211 fn new(
2212 snapshot: Arc<Mutex<LocalSnapshot>>,
2213 notify: UnboundedSender<ScanState>,
2214 fs: Arc<dyn Fs>,
2215 executor: Arc<executor::Background>,
2216 ) -> Self {
2217 Self {
2218 fs,
2219 snapshot,
2220 notify,
2221 executor,
2222 }
2223 }
2224
2225 fn abs_path(&self) -> Arc<Path> {
2226 self.snapshot.lock().abs_path.clone()
2227 }
2228
2229 fn snapshot(&self) -> LocalSnapshot {
2230 self.snapshot.lock().clone()
2231 }
2232
2233 async fn run(mut self, events_rx: impl Stream<Item = Vec<fsevent::Event>>) {
2234 if self.notify.unbounded_send(ScanState::Initializing).is_err() {
2235 return;
2236 }
2237
2238 if let Err(err) = self.scan_dirs().await {
2239 if self
2240 .notify
2241 .unbounded_send(ScanState::Err(Arc::new(err)))
2242 .is_err()
2243 {
2244 return;
2245 }
2246 }
2247
2248 if self.notify.unbounded_send(ScanState::Idle).is_err() {
2249 return;
2250 }
2251
2252 futures::pin_mut!(events_rx);
2253
2254 while let Some(mut events) = events_rx.next().await {
2255 while let Poll::Ready(Some(additional_events)) = futures::poll!(events_rx.next()) {
2256 events.extend(additional_events);
2257 }
2258
2259 if self.notify.unbounded_send(ScanState::Updating).is_err() {
2260 break;
2261 }
2262
2263 if !self.process_events(events).await {
2264 break;
2265 }
2266
2267 if self.notify.unbounded_send(ScanState::Idle).is_err() {
2268 break;
2269 }
2270 }
2271 }
2272
2273 async fn scan_dirs(&mut self) -> Result<()> {
2274 let root_char_bag;
2275 let root_abs_path;
2276 let root_inode;
2277 let is_dir;
2278 let next_entry_id;
2279 {
2280 let snapshot = self.snapshot.lock();
2281 root_char_bag = snapshot.root_char_bag;
2282 root_abs_path = snapshot.abs_path.clone();
2283 root_inode = snapshot.root_entry().map(|e| e.inode);
2284 is_dir = snapshot.root_entry().map_or(false, |e| e.is_dir());
2285 next_entry_id = snapshot.next_entry_id.clone();
2286 };
2287
2288 // Populate ignores above the root.
2289 for ancestor in root_abs_path.ancestors().skip(1) {
2290 if let Ok(ignore) = build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
2291 {
2292 self.snapshot
2293 .lock()
2294 .ignores_by_parent_abs_path
2295 .insert(ancestor.into(), (ignore.into(), 0));
2296 }
2297 }
2298
2299 let ignore_stack = {
2300 let mut snapshot = self.snapshot.lock();
2301 let ignore_stack = snapshot.ignore_stack_for_abs_path(&root_abs_path, true);
2302 if ignore_stack.is_all() {
2303 if let Some(mut root_entry) = snapshot.root_entry().cloned() {
2304 root_entry.is_ignored = true;
2305 snapshot.insert_entry(root_entry, self.fs.as_ref());
2306 }
2307 }
2308 ignore_stack
2309 };
2310
2311 if is_dir {
2312 let path: Arc<Path> = Arc::from(Path::new(""));
2313 let mut ancestor_inodes = TreeSet::default();
2314 if let Some(root_inode) = root_inode {
2315 ancestor_inodes.insert(root_inode);
2316 }
2317
2318 let (tx, rx) = channel::unbounded();
2319 self.executor
2320 .block(tx.send(ScanJob {
2321 abs_path: root_abs_path.to_path_buf(),
2322 path,
2323 ignore_stack,
2324 ancestor_inodes,
2325 scan_queue: tx.clone(),
2326 }))
2327 .unwrap();
2328 drop(tx);
2329
2330 self.executor
2331 .scoped(|scope| {
2332 for _ in 0..self.executor.num_cpus() {
2333 scope.spawn(async {
2334 while let Ok(job) = rx.recv().await {
2335 if let Err(err) = self
2336 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2337 .await
2338 {
2339 log::error!("error scanning {:?}: {}", job.abs_path, err);
2340 }
2341 }
2342 });
2343 }
2344 })
2345 .await;
2346 }
2347
2348 Ok(())
2349 }
2350
2351 async fn scan_dir(
2352 &self,
2353 root_char_bag: CharBag,
2354 next_entry_id: Arc<AtomicUsize>,
2355 job: &ScanJob,
2356 ) -> Result<()> {
2357 let mut new_entries: Vec<Entry> = Vec::new();
2358 let mut new_jobs: Vec<ScanJob> = Vec::new();
2359 let mut ignore_stack = job.ignore_stack.clone();
2360 let mut new_ignore = None;
2361
2362 let mut child_paths = self.fs.read_dir(&job.abs_path).await?;
2363 while let Some(child_abs_path) = child_paths.next().await {
2364 let child_abs_path = match child_abs_path {
2365 Ok(child_abs_path) => child_abs_path,
2366 Err(error) => {
2367 log::error!("error processing entry {:?}", error);
2368 continue;
2369 }
2370 };
2371 let child_name = child_abs_path.file_name().unwrap();
2372 let child_path: Arc<Path> = job.path.join(child_name).into();
2373 let child_metadata = match self.fs.metadata(&child_abs_path).await {
2374 Ok(Some(metadata)) => metadata,
2375 Ok(None) => continue,
2376 Err(err) => {
2377 log::error!("error processing {:?}: {:?}", child_abs_path, err);
2378 continue;
2379 }
2380 };
2381
2382 // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
2383 if child_name == *GITIGNORE {
2384 match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
2385 Ok(ignore) => {
2386 let ignore = Arc::new(ignore);
2387 ignore_stack =
2388 ignore_stack.append(job.abs_path.as_path().into(), ignore.clone());
2389 new_ignore = Some(ignore);
2390 }
2391 Err(error) => {
2392 log::error!(
2393 "error loading .gitignore file {:?} - {:?}",
2394 child_name,
2395 error
2396 );
2397 }
2398 }
2399
2400 // Update ignore status of any child entries we've already processed to reflect the
2401 // ignore file in the current directory. Because `.gitignore` starts with a `.`,
2402 // there should rarely be too numerous. Update the ignore stack associated with any
2403 // new jobs as well.
2404 let mut new_jobs = new_jobs.iter_mut();
2405 for entry in &mut new_entries {
2406 let entry_abs_path = self.abs_path().join(&entry.path);
2407 entry.is_ignored =
2408 ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir());
2409 if entry.is_dir() {
2410 new_jobs.next().unwrap().ignore_stack = if entry.is_ignored {
2411 IgnoreStack::all()
2412 } else {
2413 ignore_stack.clone()
2414 };
2415 }
2416 }
2417 }
2418
2419 let mut child_entry = Entry::new(
2420 child_path.clone(),
2421 &child_metadata,
2422 &next_entry_id,
2423 root_char_bag,
2424 );
2425
2426 if child_entry.is_dir() {
2427 let is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
2428 child_entry.is_ignored = is_ignored;
2429
2430 if !job.ancestor_inodes.contains(&child_entry.inode) {
2431 let mut ancestor_inodes = job.ancestor_inodes.clone();
2432 ancestor_inodes.insert(child_entry.inode);
2433 new_jobs.push(ScanJob {
2434 abs_path: child_abs_path,
2435 path: child_path,
2436 ignore_stack: if is_ignored {
2437 IgnoreStack::all()
2438 } else {
2439 ignore_stack.clone()
2440 },
2441 ancestor_inodes,
2442 scan_queue: job.scan_queue.clone(),
2443 });
2444 }
2445 } else {
2446 child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
2447 }
2448
2449 new_entries.push(child_entry);
2450 }
2451
2452 self.snapshot.lock().populate_dir(
2453 job.path.clone(),
2454 new_entries,
2455 new_ignore,
2456 self.fs.as_ref(),
2457 );
2458 for new_job in new_jobs {
2459 job.scan_queue.send(new_job).await.unwrap();
2460 }
2461
2462 Ok(())
2463 }
2464
2465 async fn process_events(&mut self, mut events: Vec<fsevent::Event>) -> bool {
2466 events.sort_unstable_by(|a, b| a.path.cmp(&b.path));
2467 events.dedup_by(|a, b| a.path.starts_with(&b.path));
2468
2469 let root_char_bag;
2470 let root_abs_path;
2471 let next_entry_id;
2472 {
2473 let snapshot = self.snapshot.lock();
2474 root_char_bag = snapshot.root_char_bag;
2475 root_abs_path = snapshot.abs_path.clone();
2476 next_entry_id = snapshot.next_entry_id.clone();
2477 }
2478
2479 let root_canonical_path = if let Ok(path) = self.fs.canonicalize(&root_abs_path).await {
2480 path
2481 } else {
2482 return false;
2483 };
2484 let metadata = futures::future::join_all(
2485 events
2486 .iter()
2487 .map(|event| self.fs.metadata(&event.path))
2488 .collect::<Vec<_>>(),
2489 )
2490 .await;
2491
2492 // Hold the snapshot lock while clearing and re-inserting the root entries
2493 // for each event. This way, the snapshot is not observable to the foreground
2494 // thread while this operation is in-progress.
2495 let (scan_queue_tx, scan_queue_rx) = channel::unbounded();
2496 {
2497 let mut snapshot = self.snapshot.lock();
2498 snapshot.scan_id += 1;
2499 for event in &events {
2500 if let Ok(path) = event.path.strip_prefix(&root_canonical_path) {
2501 snapshot.remove_path(path);
2502 }
2503 }
2504
2505 for (event, metadata) in events.into_iter().zip(metadata.into_iter()) {
2506 let path: Arc<Path> = match event.path.strip_prefix(&root_canonical_path) {
2507 Ok(path) => Arc::from(path.to_path_buf()),
2508 Err(_) => {
2509 log::error!(
2510 "unexpected event {:?} for root path {:?}",
2511 event.path,
2512 root_canonical_path
2513 );
2514 continue;
2515 }
2516 };
2517 let abs_path = root_abs_path.join(&path);
2518
2519 match metadata {
2520 Ok(Some(metadata)) => {
2521 let ignore_stack =
2522 snapshot.ignore_stack_for_abs_path(&abs_path, metadata.is_dir);
2523 let mut fs_entry = Entry::new(
2524 path.clone(),
2525 &metadata,
2526 snapshot.next_entry_id.as_ref(),
2527 snapshot.root_char_bag,
2528 );
2529 fs_entry.is_ignored = ignore_stack.is_all();
2530 snapshot.insert_entry(fs_entry, self.fs.as_ref());
2531
2532 let scan_id = snapshot.scan_id;
2533 if let Some(repo) = snapshot.in_dot_git(&path) {
2534 repo.repo.lock().reload_index();
2535 repo.scan_id = scan_id;
2536 }
2537
2538 let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
2539 if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
2540 ancestor_inodes.insert(metadata.inode);
2541 self.executor
2542 .block(scan_queue_tx.send(ScanJob {
2543 abs_path,
2544 path,
2545 ignore_stack,
2546 ancestor_inodes,
2547 scan_queue: scan_queue_tx.clone(),
2548 }))
2549 .unwrap();
2550 }
2551 }
2552 Ok(None) => {}
2553 Err(err) => {
2554 // TODO - create a special 'error' entry in the entries tree to mark this
2555 log::error!("error reading file on event {:?}", err);
2556 }
2557 }
2558 }
2559 drop(scan_queue_tx);
2560 }
2561
2562 // Scan any directories that were created as part of this event batch.
2563 self.executor
2564 .scoped(|scope| {
2565 for _ in 0..self.executor.num_cpus() {
2566 scope.spawn(async {
2567 while let Ok(job) = scan_queue_rx.recv().await {
2568 if let Err(err) = self
2569 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2570 .await
2571 {
2572 log::error!("error scanning {:?}: {}", job.abs_path, err);
2573 }
2574 }
2575 });
2576 }
2577 })
2578 .await;
2579
2580 // Attempt to detect renames only over a single batch of file-system events.
2581 self.snapshot.lock().removed_entry_ids.clear();
2582
2583 self.update_ignore_statuses().await;
2584 self.update_git_repositories();
2585 true
2586 }
2587
2588 async fn update_ignore_statuses(&self) {
2589 let mut snapshot = self.snapshot();
2590
2591 let mut ignores_to_update = Vec::new();
2592 let mut ignores_to_delete = Vec::new();
2593 for (parent_abs_path, (_, scan_id)) in &snapshot.ignores_by_parent_abs_path {
2594 if let Ok(parent_path) = parent_abs_path.strip_prefix(&snapshot.abs_path) {
2595 if *scan_id == snapshot.scan_id && snapshot.entry_for_path(parent_path).is_some() {
2596 ignores_to_update.push(parent_abs_path.clone());
2597 }
2598
2599 let ignore_path = parent_path.join(&*GITIGNORE);
2600 if snapshot.entry_for_path(ignore_path).is_none() {
2601 ignores_to_delete.push(parent_abs_path.clone());
2602 }
2603 }
2604 }
2605
2606 for parent_abs_path in ignores_to_delete {
2607 snapshot.ignores_by_parent_abs_path.remove(&parent_abs_path);
2608 self.snapshot
2609 .lock()
2610 .ignores_by_parent_abs_path
2611 .remove(&parent_abs_path);
2612 }
2613
2614 let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
2615 ignores_to_update.sort_unstable();
2616 let mut ignores_to_update = ignores_to_update.into_iter().peekable();
2617 while let Some(parent_abs_path) = ignores_to_update.next() {
2618 while ignores_to_update
2619 .peek()
2620 .map_or(false, |p| p.starts_with(&parent_abs_path))
2621 {
2622 ignores_to_update.next().unwrap();
2623 }
2624
2625 let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true);
2626 ignore_queue_tx
2627 .send(UpdateIgnoreStatusJob {
2628 abs_path: parent_abs_path,
2629 ignore_stack,
2630 ignore_queue: ignore_queue_tx.clone(),
2631 })
2632 .await
2633 .unwrap();
2634 }
2635 drop(ignore_queue_tx);
2636
2637 self.executor
2638 .scoped(|scope| {
2639 for _ in 0..self.executor.num_cpus() {
2640 scope.spawn(async {
2641 while let Ok(job) = ignore_queue_rx.recv().await {
2642 self.update_ignore_status(job, &snapshot).await;
2643 }
2644 });
2645 }
2646 })
2647 .await;
2648 }
2649
2650 fn update_git_repositories(&self) {
2651 let mut snapshot = self.snapshot.lock();
2652 let mut git_repositories = mem::take(&mut snapshot.git_repositories);
2653 git_repositories.retain(|repo| snapshot.entry_for_path(&repo.git_dir_path).is_some());
2654 snapshot.git_repositories = git_repositories;
2655 }
2656
2657 async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
2658 let mut ignore_stack = job.ignore_stack;
2659 if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) {
2660 ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
2661 }
2662
2663 let mut entries_by_id_edits = Vec::new();
2664 let mut entries_by_path_edits = Vec::new();
2665 let path = job.abs_path.strip_prefix(&snapshot.abs_path).unwrap();
2666 for mut entry in snapshot.child_entries(path).cloned() {
2667 let was_ignored = entry.is_ignored;
2668 let abs_path = self.abs_path().join(&entry.path);
2669 entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
2670 if entry.is_dir() {
2671 let child_ignore_stack = if entry.is_ignored {
2672 IgnoreStack::all()
2673 } else {
2674 ignore_stack.clone()
2675 };
2676 job.ignore_queue
2677 .send(UpdateIgnoreStatusJob {
2678 abs_path: abs_path.into(),
2679 ignore_stack: child_ignore_stack,
2680 ignore_queue: job.ignore_queue.clone(),
2681 })
2682 .await
2683 .unwrap();
2684 }
2685
2686 if entry.is_ignored != was_ignored {
2687 let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
2688 path_entry.scan_id = snapshot.scan_id;
2689 path_entry.is_ignored = entry.is_ignored;
2690 entries_by_id_edits.push(Edit::Insert(path_entry));
2691 entries_by_path_edits.push(Edit::Insert(entry));
2692 }
2693 }
2694
2695 let mut snapshot = self.snapshot.lock();
2696 snapshot.entries_by_path.edit(entries_by_path_edits, &());
2697 snapshot.entries_by_id.edit(entries_by_id_edits, &());
2698 }
2699}
2700
2701fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
2702 let mut result = root_char_bag;
2703 result.extend(
2704 path.to_string_lossy()
2705 .chars()
2706 .map(|c| c.to_ascii_lowercase()),
2707 );
2708 result
2709}
2710
2711struct ScanJob {
2712 abs_path: PathBuf,
2713 path: Arc<Path>,
2714 ignore_stack: Arc<IgnoreStack>,
2715 scan_queue: Sender<ScanJob>,
2716 ancestor_inodes: TreeSet<u64>,
2717}
2718
2719struct UpdateIgnoreStatusJob {
2720 abs_path: Arc<Path>,
2721 ignore_stack: Arc<IgnoreStack>,
2722 ignore_queue: Sender<UpdateIgnoreStatusJob>,
2723}
2724
2725pub trait WorktreeHandle {
2726 #[cfg(any(test, feature = "test-support"))]
2727 fn flush_fs_events<'a>(
2728 &self,
2729 cx: &'a gpui::TestAppContext,
2730 ) -> futures::future::LocalBoxFuture<'a, ()>;
2731}
2732
2733impl WorktreeHandle for ModelHandle<Worktree> {
2734 // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
2735 // occurred before the worktree was constructed. These events can cause the worktree to perfrom
2736 // extra directory scans, and emit extra scan-state notifications.
2737 //
2738 // This function mutates the worktree's directory and waits for those mutations to be picked up,
2739 // to ensure that all redundant FS events have already been processed.
2740 #[cfg(any(test, feature = "test-support"))]
2741 fn flush_fs_events<'a>(
2742 &self,
2743 cx: &'a gpui::TestAppContext,
2744 ) -> futures::future::LocalBoxFuture<'a, ()> {
2745 use smol::future::FutureExt;
2746
2747 let filename = "fs-event-sentinel";
2748 let tree = self.clone();
2749 let (fs, root_path) = self.read_with(cx, |tree, _| {
2750 let tree = tree.as_local().unwrap();
2751 (tree.fs.clone(), tree.abs_path().clone())
2752 });
2753
2754 async move {
2755 fs.create_file(&root_path.join(filename), Default::default())
2756 .await
2757 .unwrap();
2758 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
2759 .await;
2760
2761 fs.remove_file(&root_path.join(filename), Default::default())
2762 .await
2763 .unwrap();
2764 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
2765 .await;
2766
2767 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2768 .await;
2769 }
2770 .boxed_local()
2771 }
2772}
2773
2774#[derive(Clone, Debug)]
2775struct TraversalProgress<'a> {
2776 max_path: &'a Path,
2777 count: usize,
2778 visible_count: usize,
2779 file_count: usize,
2780 visible_file_count: usize,
2781}
2782
2783impl<'a> TraversalProgress<'a> {
2784 fn count(&self, include_dirs: bool, include_ignored: bool) -> usize {
2785 match (include_ignored, include_dirs) {
2786 (true, true) => self.count,
2787 (true, false) => self.file_count,
2788 (false, true) => self.visible_count,
2789 (false, false) => self.visible_file_count,
2790 }
2791 }
2792}
2793
2794impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
2795 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2796 self.max_path = summary.max_path.as_ref();
2797 self.count += summary.count;
2798 self.visible_count += summary.visible_count;
2799 self.file_count += summary.file_count;
2800 self.visible_file_count += summary.visible_file_count;
2801 }
2802}
2803
2804impl<'a> Default for TraversalProgress<'a> {
2805 fn default() -> Self {
2806 Self {
2807 max_path: Path::new(""),
2808 count: 0,
2809 visible_count: 0,
2810 file_count: 0,
2811 visible_file_count: 0,
2812 }
2813 }
2814}
2815
2816pub struct Traversal<'a> {
2817 cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
2818 include_ignored: bool,
2819 include_dirs: bool,
2820}
2821
2822impl<'a> Traversal<'a> {
2823 pub fn advance(&mut self) -> bool {
2824 self.advance_to_offset(self.offset() + 1)
2825 }
2826
2827 pub fn advance_to_offset(&mut self, offset: usize) -> bool {
2828 self.cursor.seek_forward(
2829 &TraversalTarget::Count {
2830 count: offset,
2831 include_dirs: self.include_dirs,
2832 include_ignored: self.include_ignored,
2833 },
2834 Bias::Right,
2835 &(),
2836 )
2837 }
2838
2839 pub fn advance_to_sibling(&mut self) -> bool {
2840 while let Some(entry) = self.cursor.item() {
2841 self.cursor.seek_forward(
2842 &TraversalTarget::PathSuccessor(&entry.path),
2843 Bias::Left,
2844 &(),
2845 );
2846 if let Some(entry) = self.cursor.item() {
2847 if (self.include_dirs || !entry.is_dir())
2848 && (self.include_ignored || !entry.is_ignored)
2849 {
2850 return true;
2851 }
2852 }
2853 }
2854 false
2855 }
2856
2857 pub fn entry(&self) -> Option<&'a Entry> {
2858 self.cursor.item()
2859 }
2860
2861 pub fn offset(&self) -> usize {
2862 self.cursor
2863 .start()
2864 .count(self.include_dirs, self.include_ignored)
2865 }
2866}
2867
2868impl<'a> Iterator for Traversal<'a> {
2869 type Item = &'a Entry;
2870
2871 fn next(&mut self) -> Option<Self::Item> {
2872 if let Some(item) = self.entry() {
2873 self.advance();
2874 Some(item)
2875 } else {
2876 None
2877 }
2878 }
2879}
2880
2881#[derive(Debug)]
2882enum TraversalTarget<'a> {
2883 Path(&'a Path),
2884 PathSuccessor(&'a Path),
2885 Count {
2886 count: usize,
2887 include_ignored: bool,
2888 include_dirs: bool,
2889 },
2890}
2891
2892impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> {
2893 fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
2894 match self {
2895 TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path),
2896 TraversalTarget::PathSuccessor(path) => {
2897 if !cursor_location.max_path.starts_with(path) {
2898 Ordering::Equal
2899 } else {
2900 Ordering::Greater
2901 }
2902 }
2903 TraversalTarget::Count {
2904 count,
2905 include_dirs,
2906 include_ignored,
2907 } => Ord::cmp(
2908 count,
2909 &cursor_location.count(*include_dirs, *include_ignored),
2910 ),
2911 }
2912 }
2913}
2914
2915struct ChildEntriesIter<'a> {
2916 parent_path: &'a Path,
2917 traversal: Traversal<'a>,
2918}
2919
2920impl<'a> Iterator for ChildEntriesIter<'a> {
2921 type Item = &'a Entry;
2922
2923 fn next(&mut self) -> Option<Self::Item> {
2924 if let Some(item) = self.traversal.entry() {
2925 if item.path.starts_with(&self.parent_path) {
2926 self.traversal.advance_to_sibling();
2927 return Some(item);
2928 }
2929 }
2930 None
2931 }
2932}
2933
2934impl<'a> From<&'a Entry> for proto::Entry {
2935 fn from(entry: &'a Entry) -> Self {
2936 Self {
2937 id: entry.id.to_proto(),
2938 is_dir: entry.is_dir(),
2939 path: entry.path.to_string_lossy().into(),
2940 inode: entry.inode,
2941 mtime: Some(entry.mtime.into()),
2942 is_symlink: entry.is_symlink,
2943 is_ignored: entry.is_ignored,
2944 }
2945 }
2946}
2947
2948impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
2949 type Error = anyhow::Error;
2950
2951 fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result<Self> {
2952 if let Some(mtime) = entry.mtime {
2953 let kind = if entry.is_dir {
2954 EntryKind::Dir
2955 } else {
2956 let mut char_bag = *root_char_bag;
2957 char_bag.extend(entry.path.chars().map(|c| c.to_ascii_lowercase()));
2958 EntryKind::File(char_bag)
2959 };
2960 let path: Arc<Path> = PathBuf::from(entry.path).into();
2961 Ok(Entry {
2962 id: ProjectEntryId::from_proto(entry.id),
2963 kind,
2964 path,
2965 inode: entry.inode,
2966 mtime: mtime.into(),
2967 is_symlink: entry.is_symlink,
2968 is_ignored: entry.is_ignored,
2969 })
2970 } else {
2971 Err(anyhow!(
2972 "missing mtime in remote worktree entry {:?}",
2973 entry.path
2974 ))
2975 }
2976 }
2977}
2978
2979async fn send_worktree_update(client: &Arc<Client>, update: proto::UpdateWorktree) -> Result<()> {
2980 #[cfg(any(test, feature = "test-support"))]
2981 const MAX_CHUNK_SIZE: usize = 2;
2982 #[cfg(not(any(test, feature = "test-support")))]
2983 const MAX_CHUNK_SIZE: usize = 256;
2984
2985 for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
2986 client.request(update).await?;
2987 }
2988
2989 Ok(())
2990}
2991
2992#[cfg(test)]
2993mod tests {
2994 use super::*;
2995 use anyhow::Result;
2996 use client::test::FakeHttpClient;
2997 use fs::repository::FakeGitRepository;
2998 use fs::{FakeFs, RealFs};
2999 use gpui::{executor::Deterministic, TestAppContext};
3000 use rand::prelude::*;
3001 use serde_json::json;
3002 use std::{
3003 env,
3004 fmt::Write,
3005 time::{SystemTime, UNIX_EPOCH},
3006 };
3007
3008 use util::test::temp_tree;
3009
3010 #[gpui::test]
3011 async fn test_traversal(cx: &mut TestAppContext) {
3012 let fs = FakeFs::new(cx.background());
3013 fs.insert_tree(
3014 "/root",
3015 json!({
3016 ".gitignore": "a/b\n",
3017 "a": {
3018 "b": "",
3019 "c": "",
3020 }
3021 }),
3022 )
3023 .await;
3024
3025 let http_client = FakeHttpClient::with_404_response();
3026 let client = cx.read(|cx| Client::new(http_client, cx));
3027
3028 let tree = Worktree::local(
3029 client,
3030 Arc::from(Path::new("/root")),
3031 true,
3032 fs,
3033 Default::default(),
3034 &mut cx.to_async(),
3035 )
3036 .await
3037 .unwrap();
3038 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3039 .await;
3040
3041 tree.read_with(cx, |tree, _| {
3042 assert_eq!(
3043 tree.entries(false)
3044 .map(|entry| entry.path.as_ref())
3045 .collect::<Vec<_>>(),
3046 vec![
3047 Path::new(""),
3048 Path::new(".gitignore"),
3049 Path::new("a"),
3050 Path::new("a/c"),
3051 ]
3052 );
3053 assert_eq!(
3054 tree.entries(true)
3055 .map(|entry| entry.path.as_ref())
3056 .collect::<Vec<_>>(),
3057 vec![
3058 Path::new(""),
3059 Path::new(".gitignore"),
3060 Path::new("a"),
3061 Path::new("a/b"),
3062 Path::new("a/c"),
3063 ]
3064 );
3065 })
3066 }
3067
3068 #[gpui::test(iterations = 10)]
3069 async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
3070 let fs = FakeFs::new(cx.background());
3071 fs.insert_tree(
3072 "/root",
3073 json!({
3074 "lib": {
3075 "a": {
3076 "a.txt": ""
3077 },
3078 "b": {
3079 "b.txt": ""
3080 }
3081 }
3082 }),
3083 )
3084 .await;
3085 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
3086 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
3087
3088 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3089 let tree = Worktree::local(
3090 client,
3091 Arc::from(Path::new("/root")),
3092 true,
3093 fs.clone(),
3094 Default::default(),
3095 &mut cx.to_async(),
3096 )
3097 .await
3098 .unwrap();
3099
3100 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3101 .await;
3102
3103 tree.read_with(cx, |tree, _| {
3104 assert_eq!(
3105 tree.entries(false)
3106 .map(|entry| entry.path.as_ref())
3107 .collect::<Vec<_>>(),
3108 vec![
3109 Path::new(""),
3110 Path::new("lib"),
3111 Path::new("lib/a"),
3112 Path::new("lib/a/a.txt"),
3113 Path::new("lib/a/lib"),
3114 Path::new("lib/b"),
3115 Path::new("lib/b/b.txt"),
3116 Path::new("lib/b/lib"),
3117 ]
3118 );
3119 });
3120
3121 fs.rename(
3122 Path::new("/root/lib/a/lib"),
3123 Path::new("/root/lib/a/lib-2"),
3124 Default::default(),
3125 )
3126 .await
3127 .unwrap();
3128 executor.run_until_parked();
3129 tree.read_with(cx, |tree, _| {
3130 assert_eq!(
3131 tree.entries(false)
3132 .map(|entry| entry.path.as_ref())
3133 .collect::<Vec<_>>(),
3134 vec![
3135 Path::new(""),
3136 Path::new("lib"),
3137 Path::new("lib/a"),
3138 Path::new("lib/a/a.txt"),
3139 Path::new("lib/a/lib-2"),
3140 Path::new("lib/b"),
3141 Path::new("lib/b/b.txt"),
3142 Path::new("lib/b/lib"),
3143 ]
3144 );
3145 });
3146 }
3147
3148 #[gpui::test]
3149 async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
3150 let parent_dir = temp_tree(json!({
3151 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
3152 "tree": {
3153 ".git": {},
3154 ".gitignore": "ignored-dir\n",
3155 "tracked-dir": {
3156 "tracked-file1": "",
3157 "ancestor-ignored-file1": "",
3158 },
3159 "ignored-dir": {
3160 "ignored-file1": ""
3161 }
3162 }
3163 }));
3164 let dir = parent_dir.path().join("tree");
3165
3166 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3167
3168 let tree = Worktree::local(
3169 client,
3170 dir.as_path(),
3171 true,
3172 Arc::new(RealFs),
3173 Default::default(),
3174 &mut cx.to_async(),
3175 )
3176 .await
3177 .unwrap();
3178 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3179 .await;
3180 tree.flush_fs_events(cx).await;
3181 cx.read(|cx| {
3182 let tree = tree.read(cx);
3183 assert!(
3184 !tree
3185 .entry_for_path("tracked-dir/tracked-file1")
3186 .unwrap()
3187 .is_ignored
3188 );
3189 assert!(
3190 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
3191 .unwrap()
3192 .is_ignored
3193 );
3194 assert!(
3195 tree.entry_for_path("ignored-dir/ignored-file1")
3196 .unwrap()
3197 .is_ignored
3198 );
3199 });
3200
3201 std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
3202 std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
3203 std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
3204 tree.flush_fs_events(cx).await;
3205 cx.read(|cx| {
3206 let tree = tree.read(cx);
3207 assert!(
3208 !tree
3209 .entry_for_path("tracked-dir/tracked-file2")
3210 .unwrap()
3211 .is_ignored
3212 );
3213 assert!(
3214 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
3215 .unwrap()
3216 .is_ignored
3217 );
3218 assert!(
3219 tree.entry_for_path("ignored-dir/ignored-file2")
3220 .unwrap()
3221 .is_ignored
3222 );
3223 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
3224 });
3225 }
3226
3227 #[gpui::test]
3228 async fn test_git_repository_for_path(cx: &mut TestAppContext) {
3229 let root = temp_tree(json!({
3230 "dir1": {
3231 ".git": {},
3232 "deps": {
3233 "dep1": {
3234 ".git": {},
3235 "src": {
3236 "a.txt": ""
3237 }
3238 }
3239 },
3240 "src": {
3241 "b.txt": ""
3242 }
3243 },
3244 "c.txt": "",
3245 }));
3246
3247 let http_client = FakeHttpClient::with_404_response();
3248 let client = cx.read(|cx| Client::new(http_client, cx));
3249 let tree = Worktree::local(
3250 client,
3251 root.path(),
3252 true,
3253 Arc::new(RealFs),
3254 Default::default(),
3255 &mut cx.to_async(),
3256 )
3257 .await
3258 .unwrap();
3259
3260 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3261 .await;
3262 tree.flush_fs_events(cx).await;
3263
3264 tree.read_with(cx, |tree, _cx| {
3265 let tree = tree.as_local().unwrap();
3266
3267 assert!(tree.repo_for("c.txt".as_ref()).is_none());
3268
3269 let repo = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap();
3270 assert_eq!(repo.content_path.as_ref(), Path::new("dir1"));
3271 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git"));
3272
3273 let repo = tree.repo_for("dir1/deps/dep1/src/a.txt".as_ref()).unwrap();
3274 assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1"));
3275 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/deps/dep1/.git"),);
3276 });
3277
3278 let original_scan_id = tree.read_with(cx, |tree, _cx| {
3279 let tree = tree.as_local().unwrap();
3280 tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id
3281 });
3282
3283 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
3284 tree.flush_fs_events(cx).await;
3285
3286 tree.read_with(cx, |tree, _cx| {
3287 let tree = tree.as_local().unwrap();
3288 let new_scan_id = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id;
3289 assert_ne!(
3290 original_scan_id, new_scan_id,
3291 "original {original_scan_id}, new {new_scan_id}"
3292 );
3293 });
3294
3295 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
3296 tree.flush_fs_events(cx).await;
3297
3298 tree.read_with(cx, |tree, _cx| {
3299 let tree = tree.as_local().unwrap();
3300
3301 assert!(tree.repo_for("dir1/src/b.txt".as_ref()).is_none());
3302 });
3303 }
3304
3305 #[test]
3306 fn test_changed_repos() {
3307 fn fake_entry(git_dir_path: impl AsRef<Path>, scan_id: usize) -> GitRepositoryEntry {
3308 GitRepositoryEntry {
3309 repo: Arc::new(Mutex::new(FakeGitRepository::default())),
3310 scan_id,
3311 content_path: git_dir_path.as_ref().parent().unwrap().into(),
3312 git_dir_path: git_dir_path.as_ref().into(),
3313 }
3314 }
3315
3316 let prev_repos: Vec<GitRepositoryEntry> = vec![
3317 fake_entry("/.git", 0),
3318 fake_entry("/a/.git", 0),
3319 fake_entry("/a/b/.git", 0),
3320 ];
3321
3322 let new_repos: Vec<GitRepositoryEntry> = vec![
3323 fake_entry("/a/.git", 1),
3324 fake_entry("/a/b/.git", 0),
3325 fake_entry("/a/c/.git", 0),
3326 ];
3327
3328 let res = LocalWorktree::changed_repos(&prev_repos, &new_repos);
3329
3330 // Deletion retained
3331 assert!(res
3332 .iter()
3333 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/.git") && repo.scan_id == 0)
3334 .is_some());
3335
3336 // Update retained
3337 assert!(res
3338 .iter()
3339 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/.git") && repo.scan_id == 1)
3340 .is_some());
3341
3342 // Addition retained
3343 assert!(res
3344 .iter()
3345 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/c/.git") && repo.scan_id == 0)
3346 .is_some());
3347
3348 // Nochange, not retained
3349 assert!(res
3350 .iter()
3351 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/b/.git") && repo.scan_id == 0)
3352 .is_none());
3353 }
3354
3355 #[gpui::test]
3356 async fn test_write_file(cx: &mut TestAppContext) {
3357 let dir = temp_tree(json!({
3358 ".git": {},
3359 ".gitignore": "ignored-dir\n",
3360 "tracked-dir": {},
3361 "ignored-dir": {}
3362 }));
3363
3364 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3365
3366 let tree = Worktree::local(
3367 client,
3368 dir.path(),
3369 true,
3370 Arc::new(RealFs),
3371 Default::default(),
3372 &mut cx.to_async(),
3373 )
3374 .await
3375 .unwrap();
3376 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3377 .await;
3378 tree.flush_fs_events(cx).await;
3379
3380 tree.update(cx, |tree, cx| {
3381 tree.as_local().unwrap().write_file(
3382 Path::new("tracked-dir/file.txt"),
3383 "hello".into(),
3384 Default::default(),
3385 cx,
3386 )
3387 })
3388 .await
3389 .unwrap();
3390 tree.update(cx, |tree, cx| {
3391 tree.as_local().unwrap().write_file(
3392 Path::new("ignored-dir/file.txt"),
3393 "world".into(),
3394 Default::default(),
3395 cx,
3396 )
3397 })
3398 .await
3399 .unwrap();
3400
3401 tree.read_with(cx, |tree, _| {
3402 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
3403 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
3404 assert!(!tracked.is_ignored);
3405 assert!(ignored.is_ignored);
3406 });
3407 }
3408
3409 #[gpui::test(iterations = 30)]
3410 async fn test_create_directory(cx: &mut TestAppContext) {
3411 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3412
3413 let fs = FakeFs::new(cx.background());
3414 fs.insert_tree(
3415 "/a",
3416 json!({
3417 "b": {},
3418 "c": {},
3419 "d": {},
3420 }),
3421 )
3422 .await;
3423
3424 let tree = Worktree::local(
3425 client,
3426 "/a".as_ref(),
3427 true,
3428 fs,
3429 Default::default(),
3430 &mut cx.to_async(),
3431 )
3432 .await
3433 .unwrap();
3434
3435 let entry = tree
3436 .update(cx, |tree, cx| {
3437 tree.as_local_mut()
3438 .unwrap()
3439 .create_entry("a/e".as_ref(), true, cx)
3440 })
3441 .await
3442 .unwrap();
3443 assert!(entry.is_dir());
3444
3445 cx.foreground().run_until_parked();
3446 tree.read_with(cx, |tree, _| {
3447 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
3448 });
3449 }
3450
3451 #[gpui::test(iterations = 100)]
3452 fn test_random(mut rng: StdRng) {
3453 let operations = env::var("OPERATIONS")
3454 .map(|o| o.parse().unwrap())
3455 .unwrap_or(40);
3456 let initial_entries = env::var("INITIAL_ENTRIES")
3457 .map(|o| o.parse().unwrap())
3458 .unwrap_or(20);
3459
3460 let root_dir = tempdir::TempDir::new("worktree-test").unwrap();
3461 for _ in 0..initial_entries {
3462 randomly_mutate_tree(root_dir.path(), 1.0, &mut rng).unwrap();
3463 }
3464 log::info!("Generated initial tree");
3465
3466 let (notify_tx, _notify_rx) = mpsc::unbounded();
3467 let fs = Arc::new(RealFs);
3468 let next_entry_id = Arc::new(AtomicUsize::new(0));
3469 let mut initial_snapshot = LocalSnapshot {
3470 removed_entry_ids: Default::default(),
3471 ignores_by_parent_abs_path: Default::default(),
3472 git_repositories: Default::default(),
3473 next_entry_id: next_entry_id.clone(),
3474 snapshot: Snapshot {
3475 id: WorktreeId::from_usize(0),
3476 entries_by_path: Default::default(),
3477 entries_by_id: Default::default(),
3478 abs_path: root_dir.path().into(),
3479 root_name: Default::default(),
3480 root_char_bag: Default::default(),
3481 scan_id: 0,
3482 is_complete: true,
3483 },
3484 extension_counts: Default::default(),
3485 };
3486 initial_snapshot.insert_entry(
3487 Entry::new(
3488 Path::new("").into(),
3489 &smol::block_on(fs.metadata(root_dir.path()))
3490 .unwrap()
3491 .unwrap(),
3492 &next_entry_id,
3493 Default::default(),
3494 ),
3495 fs.as_ref(),
3496 );
3497 let mut scanner = BackgroundScanner::new(
3498 Arc::new(Mutex::new(initial_snapshot.clone())),
3499 notify_tx,
3500 fs.clone(),
3501 Arc::new(gpui::executor::Background::new()),
3502 );
3503 smol::block_on(scanner.scan_dirs()).unwrap();
3504 scanner.snapshot().check_invariants();
3505
3506 let mut events = Vec::new();
3507 let mut snapshots = Vec::new();
3508 let mut mutations_len = operations;
3509 while mutations_len > 1 {
3510 if !events.is_empty() && rng.gen_bool(0.4) {
3511 let len = rng.gen_range(0..=events.len());
3512 let to_deliver = events.drain(0..len).collect::<Vec<_>>();
3513 log::info!("Delivering events: {:#?}", to_deliver);
3514 smol::block_on(scanner.process_events(to_deliver));
3515 scanner.snapshot().check_invariants();
3516 } else {
3517 events.extend(randomly_mutate_tree(root_dir.path(), 0.6, &mut rng).unwrap());
3518 mutations_len -= 1;
3519 }
3520
3521 if rng.gen_bool(0.2) {
3522 snapshots.push(scanner.snapshot());
3523 }
3524 }
3525 log::info!("Quiescing: {:#?}", events);
3526 smol::block_on(scanner.process_events(events));
3527 scanner.snapshot().check_invariants();
3528
3529 let (notify_tx, _notify_rx) = mpsc::unbounded();
3530 let mut new_scanner = BackgroundScanner::new(
3531 Arc::new(Mutex::new(initial_snapshot)),
3532 notify_tx,
3533 scanner.fs.clone(),
3534 scanner.executor.clone(),
3535 );
3536 smol::block_on(new_scanner.scan_dirs()).unwrap();
3537 assert_eq!(
3538 scanner.snapshot().to_vec(true),
3539 new_scanner.snapshot().to_vec(true)
3540 );
3541
3542 for mut prev_snapshot in snapshots {
3543 let include_ignored = rng.gen::<bool>();
3544 if !include_ignored {
3545 let mut entries_by_path_edits = Vec::new();
3546 let mut entries_by_id_edits = Vec::new();
3547 for entry in prev_snapshot
3548 .entries_by_id
3549 .cursor::<()>()
3550 .filter(|e| e.is_ignored)
3551 {
3552 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
3553 entries_by_id_edits.push(Edit::Remove(entry.id));
3554 }
3555
3556 prev_snapshot
3557 .entries_by_path
3558 .edit(entries_by_path_edits, &());
3559 prev_snapshot.entries_by_id.edit(entries_by_id_edits, &());
3560 }
3561
3562 let update = scanner
3563 .snapshot()
3564 .build_update(&prev_snapshot, 0, 0, include_ignored);
3565 prev_snapshot.apply_remote_update(update).unwrap();
3566 assert_eq!(
3567 prev_snapshot.to_vec(true),
3568 scanner.snapshot().to_vec(include_ignored)
3569 );
3570 }
3571 }
3572
3573 fn randomly_mutate_tree(
3574 root_path: &Path,
3575 insertion_probability: f64,
3576 rng: &mut impl Rng,
3577 ) -> Result<Vec<fsevent::Event>> {
3578 let root_path = root_path.canonicalize().unwrap();
3579 let (dirs, files) = read_dir_recursive(root_path.clone());
3580
3581 let mut events = Vec::new();
3582 let mut record_event = |path: PathBuf| {
3583 events.push(fsevent::Event {
3584 event_id: SystemTime::now()
3585 .duration_since(UNIX_EPOCH)
3586 .unwrap()
3587 .as_secs(),
3588 flags: fsevent::StreamFlags::empty(),
3589 path,
3590 });
3591 };
3592
3593 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
3594 let path = dirs.choose(rng).unwrap();
3595 let new_path = path.join(gen_name(rng));
3596
3597 if rng.gen() {
3598 log::info!("Creating dir {:?}", new_path.strip_prefix(root_path)?);
3599 std::fs::create_dir(&new_path)?;
3600 } else {
3601 log::info!("Creating file {:?}", new_path.strip_prefix(root_path)?);
3602 std::fs::write(&new_path, "")?;
3603 }
3604 record_event(new_path);
3605 } else if rng.gen_bool(0.05) {
3606 let ignore_dir_path = dirs.choose(rng).unwrap();
3607 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
3608
3609 let (subdirs, subfiles) = read_dir_recursive(ignore_dir_path.clone());
3610 let files_to_ignore = {
3611 let len = rng.gen_range(0..=subfiles.len());
3612 subfiles.choose_multiple(rng, len)
3613 };
3614 let dirs_to_ignore = {
3615 let len = rng.gen_range(0..subdirs.len());
3616 subdirs.choose_multiple(rng, len)
3617 };
3618
3619 let mut ignore_contents = String::new();
3620 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
3621 writeln!(
3622 ignore_contents,
3623 "{}",
3624 path_to_ignore
3625 .strip_prefix(&ignore_dir_path)?
3626 .to_str()
3627 .unwrap()
3628 )
3629 .unwrap();
3630 }
3631 log::info!(
3632 "Creating {:?} with contents:\n{}",
3633 ignore_path.strip_prefix(&root_path)?,
3634 ignore_contents
3635 );
3636 std::fs::write(&ignore_path, ignore_contents).unwrap();
3637 record_event(ignore_path);
3638 } else {
3639 let old_path = {
3640 let file_path = files.choose(rng);
3641 let dir_path = dirs[1..].choose(rng);
3642 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
3643 };
3644
3645 let is_rename = rng.gen();
3646 if is_rename {
3647 let new_path_parent = dirs
3648 .iter()
3649 .filter(|d| !d.starts_with(old_path))
3650 .choose(rng)
3651 .unwrap();
3652
3653 let overwrite_existing_dir =
3654 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
3655 let new_path = if overwrite_existing_dir {
3656 std::fs::remove_dir_all(&new_path_parent).ok();
3657 new_path_parent.to_path_buf()
3658 } else {
3659 new_path_parent.join(gen_name(rng))
3660 };
3661
3662 log::info!(
3663 "Renaming {:?} to {}{:?}",
3664 old_path.strip_prefix(&root_path)?,
3665 if overwrite_existing_dir {
3666 "overwrite "
3667 } else {
3668 ""
3669 },
3670 new_path.strip_prefix(&root_path)?
3671 );
3672 std::fs::rename(&old_path, &new_path)?;
3673 record_event(old_path.clone());
3674 record_event(new_path);
3675 } else if old_path.is_dir() {
3676 let (dirs, files) = read_dir_recursive(old_path.clone());
3677
3678 log::info!("Deleting dir {:?}", old_path.strip_prefix(&root_path)?);
3679 std::fs::remove_dir_all(&old_path).unwrap();
3680 for file in files {
3681 record_event(file);
3682 }
3683 for dir in dirs {
3684 record_event(dir);
3685 }
3686 } else {
3687 log::info!("Deleting file {:?}", old_path.strip_prefix(&root_path)?);
3688 std::fs::remove_file(old_path).unwrap();
3689 record_event(old_path.clone());
3690 }
3691 }
3692
3693 Ok(events)
3694 }
3695
3696 fn read_dir_recursive(path: PathBuf) -> (Vec<PathBuf>, Vec<PathBuf>) {
3697 let child_entries = std::fs::read_dir(&path).unwrap();
3698 let mut dirs = vec![path];
3699 let mut files = Vec::new();
3700 for child_entry in child_entries {
3701 let child_path = child_entry.unwrap().path();
3702 if child_path.is_dir() {
3703 let (child_dirs, child_files) = read_dir_recursive(child_path);
3704 dirs.extend(child_dirs);
3705 files.extend(child_files);
3706 } else {
3707 files.push(child_path);
3708 }
3709 }
3710 (dirs, files)
3711 }
3712
3713 fn gen_name(rng: &mut impl Rng) -> String {
3714 (0..6)
3715 .map(|_| rng.sample(rand::distributions::Alphanumeric))
3716 .map(char::from)
3717 .collect()
3718 }
3719
3720 impl LocalSnapshot {
3721 fn check_invariants(&self) {
3722 let mut files = self.files(true, 0);
3723 let mut visible_files = self.files(false, 0);
3724 for entry in self.entries_by_path.cursor::<()>() {
3725 if entry.is_file() {
3726 assert_eq!(files.next().unwrap().inode, entry.inode);
3727 if !entry.is_ignored {
3728 assert_eq!(visible_files.next().unwrap().inode, entry.inode);
3729 }
3730 }
3731 }
3732 assert!(files.next().is_none());
3733 assert!(visible_files.next().is_none());
3734
3735 let mut bfs_paths = Vec::new();
3736 let mut stack = vec![Path::new("")];
3737 while let Some(path) = stack.pop() {
3738 bfs_paths.push(path);
3739 let ix = stack.len();
3740 for child_entry in self.child_entries(path) {
3741 stack.insert(ix, &child_entry.path);
3742 }
3743 }
3744
3745 let dfs_paths_via_iter = self
3746 .entries_by_path
3747 .cursor::<()>()
3748 .map(|e| e.path.as_ref())
3749 .collect::<Vec<_>>();
3750 assert_eq!(bfs_paths, dfs_paths_via_iter);
3751
3752 let dfs_paths_via_traversal = self
3753 .entries(true)
3754 .map(|e| e.path.as_ref())
3755 .collect::<Vec<_>>();
3756 assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
3757
3758 for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
3759 let ignore_parent_path =
3760 ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
3761 assert!(self.entry_for_path(&ignore_parent_path).is_some());
3762 assert!(self
3763 .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
3764 .is_some());
3765 }
3766
3767 // Ensure extension counts are correct.
3768 let mut expected_extension_counts = HashMap::default();
3769 for extension in self.entries(false).filter_map(|e| e.path.extension()) {
3770 *expected_extension_counts
3771 .entry(extension.into())
3772 .or_insert(0) += 1;
3773 }
3774 assert_eq!(self.extension_counts, expected_extension_counts);
3775 }
3776
3777 fn to_vec(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
3778 let mut paths = Vec::new();
3779 for entry in self.entries_by_path.cursor::<()>() {
3780 if include_ignored || !entry.is_ignored {
3781 paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
3782 }
3783 }
3784 paths.sort_by(|a, b| a.0.cmp(b.0));
3785 paths
3786 }
3787 }
3788}