1use super::{
2 fs::{self, Fs},
3 ignore::IgnoreStack,
4 DiagnosticSummary,
5};
6use crate::{copy_recursive, ProjectEntryId, RemoveOptions};
7use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client};
10use clock::ReplicaId;
11use collections::{HashMap, VecDeque};
12use futures::{
13 channel::{
14 mpsc::{self, UnboundedSender},
15 oneshot,
16 },
17 Stream, StreamExt,
18};
19use fuzzy::CharBag;
20use git::repository::GitRepository;
21use git::{DOT_GIT, GITIGNORE};
22use gpui::{
23 executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
24 Task,
25};
26use language::{
27 proto::{deserialize_version, serialize_line_ending, serialize_version},
28 Buffer, DiagnosticEntry, LineEnding, PointUtf16, Rope,
29};
30use parking_lot::Mutex;
31use postage::{
32 prelude::{Sink as _, Stream as _},
33 watch,
34};
35use settings::Settings;
36use smol::channel::{self, Sender};
37use std::{
38 any::Any,
39 cmp::{self, Ordering},
40 convert::TryFrom,
41 ffi::{OsStr, OsString},
42 fmt,
43 future::Future,
44 mem,
45 ops::{Deref, DerefMut},
46 os::unix::prelude::{OsStrExt, OsStringExt},
47 path::{Path, PathBuf},
48 sync::{atomic::AtomicUsize, Arc},
49 task::Poll,
50 time::{Duration, SystemTime},
51};
52use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
53use util::{ResultExt, TryFutureExt};
54
55#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
56pub struct WorktreeId(usize);
57
58#[allow(clippy::large_enum_variant)]
59pub enum Worktree {
60 Local(LocalWorktree),
61 Remote(RemoteWorktree),
62}
63
64pub struct LocalWorktree {
65 snapshot: LocalSnapshot,
66 background_snapshot: Arc<Mutex<LocalSnapshot>>,
67 last_scan_state_rx: watch::Receiver<ScanState>,
68 _background_scanner_task: Option<Task<()>>,
69 poll_task: Option<Task<()>>,
70 share: Option<ShareState>,
71 diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
72 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
73 client: Arc<Client>,
74 fs: Arc<dyn Fs>,
75 visible: bool,
76}
77
78pub struct RemoteWorktree {
79 pub snapshot: Snapshot,
80 pub(crate) background_snapshot: Arc<Mutex<Snapshot>>,
81 project_id: u64,
82 client: Arc<Client>,
83 updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
84 snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
85 replica_id: ReplicaId,
86 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
87 visible: bool,
88}
89
90#[derive(Clone)]
91pub struct Snapshot {
92 id: WorktreeId,
93 root_name: String,
94 root_char_bag: CharBag,
95 entries_by_path: SumTree<Entry>,
96 entries_by_id: SumTree<PathEntry>,
97 scan_id: usize,
98 is_complete: bool,
99}
100
101#[derive(Clone)]
102pub struct GitRepositoryEntry {
103 pub(crate) repo: Arc<Mutex<dyn GitRepository>>,
104
105 pub(crate) scan_id: usize,
106 // Path to folder containing the .git file or directory
107 pub(crate) content_path: Arc<Path>,
108 // Path to the actual .git folder.
109 // Note: if .git is a file, this points to the folder indicated by the .git file
110 pub(crate) git_dir_path: Arc<Path>,
111}
112
113impl std::fmt::Debug for GitRepositoryEntry {
114 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
115 f.debug_struct("GitRepositoryEntry")
116 .field("content_path", &self.content_path)
117 .field("git_dir_path", &self.git_dir_path)
118 .field("libgit_repository", &"LibGitRepository")
119 .finish()
120 }
121}
122
123pub struct LocalSnapshot {
124 abs_path: Arc<Path>,
125 ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
126 git_repositories: Vec<GitRepositoryEntry>,
127 removed_entry_ids: HashMap<u64, ProjectEntryId>,
128 next_entry_id: Arc<AtomicUsize>,
129 snapshot: Snapshot,
130 extension_counts: HashMap<OsString, usize>,
131}
132
133impl Clone for LocalSnapshot {
134 fn clone(&self) -> Self {
135 Self {
136 abs_path: self.abs_path.clone(),
137 ignores_by_parent_abs_path: self.ignores_by_parent_abs_path.clone(),
138 git_repositories: self.git_repositories.iter().cloned().collect(),
139 removed_entry_ids: self.removed_entry_ids.clone(),
140 next_entry_id: self.next_entry_id.clone(),
141 snapshot: self.snapshot.clone(),
142 extension_counts: self.extension_counts.clone(),
143 }
144 }
145}
146
147impl Deref for LocalSnapshot {
148 type Target = Snapshot;
149
150 fn deref(&self) -> &Self::Target {
151 &self.snapshot
152 }
153}
154
155impl DerefMut for LocalSnapshot {
156 fn deref_mut(&mut self) -> &mut Self::Target {
157 &mut self.snapshot
158 }
159}
160
161#[derive(Clone, Debug)]
162enum ScanState {
163 Idle,
164 /// The worktree is performing its initial scan of the filesystem.
165 Initializing,
166 /// The worktree is updating in response to filesystem events.
167 Updating,
168 Err(Arc<anyhow::Error>),
169}
170
171struct ShareState {
172 project_id: u64,
173 snapshots_tx: watch::Sender<LocalSnapshot>,
174 _maintain_remote_snapshot: Option<Task<Option<()>>>,
175}
176
177pub enum Event {
178 UpdatedEntries,
179 UpdatedGitRepositories(Vec<GitRepositoryEntry>),
180}
181
182impl Entity for Worktree {
183 type Event = Event;
184}
185
186impl Worktree {
187 pub async fn local(
188 client: Arc<Client>,
189 path: impl Into<Arc<Path>>,
190 visible: bool,
191 fs: Arc<dyn Fs>,
192 next_entry_id: Arc<AtomicUsize>,
193 cx: &mut AsyncAppContext,
194 ) -> Result<ModelHandle<Self>> {
195 let (tree, scan_states_tx) =
196 LocalWorktree::create(client, path, visible, fs.clone(), next_entry_id, cx).await?;
197 tree.update(cx, |tree, cx| {
198 let tree = tree.as_local_mut().unwrap();
199 let abs_path = tree.abs_path().clone();
200 let background_snapshot = tree.background_snapshot.clone();
201 let background = cx.background().clone();
202 tree._background_scanner_task = Some(cx.background().spawn(async move {
203 let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
204 let scanner =
205 BackgroundScanner::new(background_snapshot, scan_states_tx, fs, background);
206 scanner.run(events).await;
207 }));
208 });
209 Ok(tree)
210 }
211
212 pub fn remote(
213 project_remote_id: u64,
214 replica_id: ReplicaId,
215 worktree: proto::WorktreeMetadata,
216 client: Arc<Client>,
217 cx: &mut MutableAppContext,
218 ) -> ModelHandle<Self> {
219 let remote_id = worktree.id;
220 let root_char_bag: CharBag = worktree
221 .root_name
222 .chars()
223 .map(|c| c.to_ascii_lowercase())
224 .collect();
225 let root_name = worktree.root_name.clone();
226 let visible = worktree.visible;
227 let snapshot = Snapshot {
228 id: WorktreeId(remote_id as usize),
229 root_name,
230 root_char_bag,
231 entries_by_path: Default::default(),
232 entries_by_id: Default::default(),
233 scan_id: 0,
234 is_complete: false,
235 };
236
237 let (updates_tx, mut updates_rx) = mpsc::unbounded();
238 let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
239 let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
240 let worktree_handle = cx.add_model(|_: &mut ModelContext<Worktree>| {
241 Worktree::Remote(RemoteWorktree {
242 project_id: project_remote_id,
243 replica_id,
244 snapshot: snapshot.clone(),
245 background_snapshot: background_snapshot.clone(),
246 updates_tx: Some(updates_tx),
247 snapshot_subscriptions: Default::default(),
248 client: client.clone(),
249 diagnostic_summaries: Default::default(),
250 visible,
251 })
252 });
253
254 cx.background()
255 .spawn(async move {
256 while let Some(update) = updates_rx.next().await {
257 if let Err(error) = background_snapshot.lock().apply_remote_update(update) {
258 log::error!("error applying worktree update: {}", error);
259 }
260 snapshot_updated_tx.send(()).await.ok();
261 }
262 })
263 .detach();
264
265 cx.spawn(|mut cx| {
266 let this = worktree_handle.downgrade();
267 async move {
268 while (snapshot_updated_rx.recv().await).is_some() {
269 if let Some(this) = this.upgrade(&cx) {
270 this.update(&mut cx, |this, cx| {
271 this.poll_snapshot(cx);
272 let this = this.as_remote_mut().unwrap();
273 while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
274 if this.observed_snapshot(*scan_id) {
275 let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
276 let _ = tx.send(());
277 } else {
278 break;
279 }
280 }
281 });
282 } else {
283 break;
284 }
285 }
286 }
287 })
288 .detach();
289
290 worktree_handle
291 }
292
293 pub fn as_local(&self) -> Option<&LocalWorktree> {
294 if let Worktree::Local(worktree) = self {
295 Some(worktree)
296 } else {
297 None
298 }
299 }
300
301 pub fn as_remote(&self) -> Option<&RemoteWorktree> {
302 if let Worktree::Remote(worktree) = self {
303 Some(worktree)
304 } else {
305 None
306 }
307 }
308
309 pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
310 if let Worktree::Local(worktree) = self {
311 Some(worktree)
312 } else {
313 None
314 }
315 }
316
317 pub fn as_remote_mut(&mut self) -> Option<&mut RemoteWorktree> {
318 if let Worktree::Remote(worktree) = self {
319 Some(worktree)
320 } else {
321 None
322 }
323 }
324
325 pub fn is_local(&self) -> bool {
326 matches!(self, Worktree::Local(_))
327 }
328
329 pub fn is_remote(&self) -> bool {
330 !self.is_local()
331 }
332
333 pub fn snapshot(&self) -> Snapshot {
334 match self {
335 Worktree::Local(worktree) => worktree.snapshot().snapshot,
336 Worktree::Remote(worktree) => worktree.snapshot(),
337 }
338 }
339
340 pub fn scan_id(&self) -> usize {
341 match self {
342 Worktree::Local(worktree) => worktree.snapshot.scan_id,
343 Worktree::Remote(worktree) => worktree.snapshot.scan_id,
344 }
345 }
346
347 pub fn is_visible(&self) -> bool {
348 match self {
349 Worktree::Local(worktree) => worktree.visible,
350 Worktree::Remote(worktree) => worktree.visible,
351 }
352 }
353
354 pub fn replica_id(&self) -> ReplicaId {
355 match self {
356 Worktree::Local(_) => 0,
357 Worktree::Remote(worktree) => worktree.replica_id,
358 }
359 }
360
361 pub fn diagnostic_summaries(
362 &self,
363 ) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + '_ {
364 match self {
365 Worktree::Local(worktree) => &worktree.diagnostic_summaries,
366 Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
367 }
368 .iter()
369 .map(|(path, summary)| (path.0.clone(), *summary))
370 }
371
372 fn poll_snapshot(&mut self, cx: &mut ModelContext<Self>) {
373 match self {
374 Self::Local(worktree) => worktree.poll_snapshot(false, cx),
375 Self::Remote(worktree) => worktree.poll_snapshot(cx),
376 };
377 }
378}
379
380impl LocalWorktree {
381 async fn create(
382 client: Arc<Client>,
383 path: impl Into<Arc<Path>>,
384 visible: bool,
385 fs: Arc<dyn Fs>,
386 next_entry_id: Arc<AtomicUsize>,
387 cx: &mut AsyncAppContext,
388 ) -> Result<(ModelHandle<Worktree>, UnboundedSender<ScanState>)> {
389 let abs_path = path.into();
390 let path: Arc<Path> = Arc::from(Path::new(""));
391
392 // After determining whether the root entry is a file or a directory, populate the
393 // snapshot's "root name", which will be used for the purpose of fuzzy matching.
394 let root_name = abs_path
395 .file_name()
396 .map_or(String::new(), |f| f.to_string_lossy().to_string());
397 let root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
398 let metadata = fs
399 .metadata(&abs_path)
400 .await
401 .context("failed to stat worktree path")?;
402
403 let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
404 let (mut last_scan_state_tx, last_scan_state_rx) =
405 watch::channel_with(ScanState::Initializing);
406 let tree = cx.add_model(move |cx: &mut ModelContext<Worktree>| {
407 let mut snapshot = LocalSnapshot {
408 abs_path,
409 ignores_by_parent_abs_path: Default::default(),
410 git_repositories: Default::default(),
411 removed_entry_ids: Default::default(),
412 next_entry_id,
413 snapshot: Snapshot {
414 id: WorktreeId::from_usize(cx.model_id()),
415 root_name: root_name.clone(),
416 root_char_bag,
417 entries_by_path: Default::default(),
418 entries_by_id: Default::default(),
419 scan_id: 0,
420 is_complete: true,
421 },
422 extension_counts: Default::default(),
423 };
424 if let Some(metadata) = metadata {
425 let entry = Entry::new(
426 path,
427 &metadata,
428 &snapshot.next_entry_id,
429 snapshot.root_char_bag,
430 );
431 snapshot.insert_entry(entry, fs.as_ref());
432 }
433
434 let tree = Self {
435 snapshot: snapshot.clone(),
436 background_snapshot: Arc::new(Mutex::new(snapshot)),
437 last_scan_state_rx,
438 _background_scanner_task: None,
439 share: None,
440 poll_task: None,
441 diagnostics: Default::default(),
442 diagnostic_summaries: Default::default(),
443 client,
444 fs,
445 visible,
446 };
447
448 cx.spawn_weak(|this, mut cx| async move {
449 while let Some(scan_state) = scan_states_rx.next().await {
450 if let Some(this) = this.upgrade(&cx) {
451 last_scan_state_tx.blocking_send(scan_state).ok();
452 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
453 } else {
454 break;
455 }
456 }
457 })
458 .detach();
459
460 Worktree::Local(tree)
461 });
462
463 Ok((tree, scan_states_tx))
464 }
465
466 pub fn contains_abs_path(&self, path: &Path) -> bool {
467 path.starts_with(&self.abs_path)
468 }
469
470 fn absolutize(&self, path: &Path) -> PathBuf {
471 if path.file_name().is_some() {
472 self.abs_path.join(path)
473 } else {
474 self.abs_path.to_path_buf()
475 }
476 }
477
478 pub(crate) fn load_buffer(
479 &mut self,
480 path: &Path,
481 cx: &mut ModelContext<Worktree>,
482 ) -> Task<Result<ModelHandle<Buffer>>> {
483 let path = Arc::from(path);
484 cx.spawn(move |this, mut cx| async move {
485 let (file, contents, diff_base) = this
486 .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
487 .await?;
488 Ok(cx.add_model(|cx| {
489 let mut buffer = Buffer::from_file(0, contents, diff_base, Arc::new(file), cx);
490 buffer.git_diff_recalc(cx);
491 buffer
492 }))
493 })
494 }
495
496 pub fn diagnostics_for_path(&self, path: &Path) -> Option<Vec<DiagnosticEntry<PointUtf16>>> {
497 self.diagnostics.get(path).cloned()
498 }
499
500 pub fn update_diagnostics(
501 &mut self,
502 language_server_id: usize,
503 worktree_path: Arc<Path>,
504 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
505 _: &mut ModelContext<Worktree>,
506 ) -> Result<bool> {
507 self.diagnostics.remove(&worktree_path);
508 let old_summary = self
509 .diagnostic_summaries
510 .remove(&PathKey(worktree_path.clone()))
511 .unwrap_or_default();
512 let new_summary = DiagnosticSummary::new(language_server_id, &diagnostics);
513 if !new_summary.is_empty() {
514 self.diagnostic_summaries
515 .insert(PathKey(worktree_path.clone()), new_summary);
516 self.diagnostics.insert(worktree_path.clone(), diagnostics);
517 }
518
519 let updated = !old_summary.is_empty() || !new_summary.is_empty();
520 if updated {
521 if let Some(share) = self.share.as_ref() {
522 self.client
523 .send(proto::UpdateDiagnosticSummary {
524 project_id: share.project_id,
525 worktree_id: self.id().to_proto(),
526 summary: Some(proto::DiagnosticSummary {
527 path: worktree_path.to_string_lossy().to_string(),
528 language_server_id: language_server_id as u64,
529 error_count: new_summary.error_count as u32,
530 warning_count: new_summary.warning_count as u32,
531 }),
532 })
533 .log_err();
534 }
535 }
536
537 Ok(updated)
538 }
539
540 fn poll_snapshot(&mut self, force: bool, cx: &mut ModelContext<Worktree>) {
541 self.poll_task.take();
542
543 match self.scan_state() {
544 ScanState::Idle => {
545 let new_snapshot = self.background_snapshot.lock().clone();
546 let updated_repos = Self::changed_repos(
547 &self.snapshot.git_repositories,
548 &new_snapshot.git_repositories,
549 );
550 self.snapshot = new_snapshot;
551
552 if let Some(share) = self.share.as_mut() {
553 *share.snapshots_tx.borrow_mut() = self.snapshot.clone();
554 }
555
556 cx.emit(Event::UpdatedEntries);
557
558 if !updated_repos.is_empty() {
559 cx.emit(Event::UpdatedGitRepositories(updated_repos));
560 }
561 }
562
563 ScanState::Initializing => {
564 let is_fake_fs = self.fs.is_fake();
565
566 let new_snapshot = self.background_snapshot.lock().clone();
567 let updated_repos = Self::changed_repos(
568 &self.snapshot.git_repositories,
569 &new_snapshot.git_repositories,
570 );
571 self.snapshot = new_snapshot;
572
573 self.poll_task = Some(cx.spawn_weak(|this, mut cx| async move {
574 if is_fake_fs {
575 #[cfg(any(test, feature = "test-support"))]
576 cx.background().simulate_random_delay().await;
577 } else {
578 smol::Timer::after(Duration::from_millis(100)).await;
579 }
580 if let Some(this) = this.upgrade(&cx) {
581 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
582 }
583 }));
584
585 cx.emit(Event::UpdatedEntries);
586
587 if !updated_repos.is_empty() {
588 cx.emit(Event::UpdatedGitRepositories(updated_repos));
589 }
590 }
591
592 _ => {
593 if force {
594 self.snapshot = self.background_snapshot.lock().clone();
595 }
596 }
597 }
598
599 cx.notify();
600 }
601
602 fn changed_repos(
603 old_repos: &[GitRepositoryEntry],
604 new_repos: &[GitRepositoryEntry],
605 ) -> Vec<GitRepositoryEntry> {
606 fn diff<'a>(
607 a: &'a [GitRepositoryEntry],
608 b: &'a [GitRepositoryEntry],
609 updated: &mut HashMap<&'a Path, GitRepositoryEntry>,
610 ) {
611 for a_repo in a {
612 let matched = b.iter().find(|b_repo| {
613 a_repo.git_dir_path == b_repo.git_dir_path && a_repo.scan_id == b_repo.scan_id
614 });
615
616 if matched.is_none() {
617 updated.insert(a_repo.git_dir_path.as_ref(), a_repo.clone());
618 }
619 }
620 }
621
622 let mut updated = HashMap::<&Path, GitRepositoryEntry>::default();
623
624 diff(old_repos, new_repos, &mut updated);
625 diff(new_repos, old_repos, &mut updated);
626
627 updated.into_values().collect()
628 }
629
630 pub fn scan_complete(&self) -> impl Future<Output = ()> {
631 let mut scan_state_rx = self.last_scan_state_rx.clone();
632 async move {
633 let mut scan_state = Some(scan_state_rx.borrow().clone());
634 while let Some(ScanState::Initializing | ScanState::Updating) = scan_state {
635 scan_state = scan_state_rx.recv().await;
636 }
637 }
638 }
639
640 fn scan_state(&self) -> ScanState {
641 self.last_scan_state_rx.borrow().clone()
642 }
643
644 pub fn snapshot(&self) -> LocalSnapshot {
645 self.snapshot.clone()
646 }
647
648 pub fn metadata_proto(&self) -> proto::WorktreeMetadata {
649 proto::WorktreeMetadata {
650 id: self.id().to_proto(),
651 root_name: self.root_name().to_string(),
652 visible: self.visible,
653 }
654 }
655
656 fn load(
657 &self,
658 path: &Path,
659 cx: &mut ModelContext<Worktree>,
660 ) -> Task<Result<(File, String, Option<String>)>> {
661 let handle = cx.handle();
662 let path = Arc::from(path);
663 let abs_path = self.absolutize(&path);
664 let fs = self.fs.clone();
665 let snapshot = self.snapshot();
666
667 let settings = cx.global::<Settings>();
668
669 // Cut files included because we want to ship!
670 // TODO:
671 // - Rename / etc. setting to be show/hide git gutters
672 // - Unconditionally load index text for all files,
673 // - then choose at rendering time based on settings
674
675 let files_included = settings.git_gutter().files_included(settings);
676
677 cx.spawn(|this, mut cx| async move {
678 let text = fs.load(&abs_path).await?;
679
680 let diff_base = match files_included {
681 settings::GitFilesIncluded::All | settings::GitFilesIncluded::OnlyTracked => {
682 let results = if let Some(repo) = snapshot.repo_for(&abs_path) {
683 cx.background()
684 .spawn({
685 let path = path.clone();
686 async move { repo.repo.lock().load_index(&path) }
687 })
688 .await
689 } else {
690 None
691 };
692
693 if files_included == settings::GitFilesIncluded::All {
694 results.or_else(|| Some(text.clone()))
695 } else {
696 results
697 }
698 }
699
700 settings::GitFilesIncluded::None => None,
701 };
702
703 // Eagerly populate the snapshot with an updated entry for the loaded file
704 let entry = this
705 .update(&mut cx, |this, cx| {
706 this.as_local()
707 .unwrap()
708 .refresh_entry(path, abs_path, None, cx)
709 })
710 .await?;
711
712 Ok((
713 File {
714 entry_id: Some(entry.id),
715 worktree: handle,
716 path: entry.path,
717 mtime: entry.mtime,
718 is_local: true,
719 },
720 text,
721 diff_base,
722 ))
723 })
724 }
725
726 pub fn save_buffer_as(
727 &self,
728 buffer_handle: ModelHandle<Buffer>,
729 path: impl Into<Arc<Path>>,
730 cx: &mut ModelContext<Worktree>,
731 ) -> Task<Result<()>> {
732 let buffer = buffer_handle.read(cx);
733 let text = buffer.as_rope().clone();
734 let fingerprint = text.fingerprint();
735 let version = buffer.version();
736 let save = self.write_file(path, text, buffer.line_ending(), cx);
737 let handle = cx.handle();
738 cx.as_mut().spawn(|mut cx| async move {
739 let entry = save.await?;
740 let file = File {
741 entry_id: Some(entry.id),
742 worktree: handle,
743 path: entry.path,
744 mtime: entry.mtime,
745 is_local: true,
746 };
747
748 buffer_handle.update(&mut cx, |buffer, cx| {
749 buffer.did_save(version, fingerprint, file.mtime, Some(Arc::new(file)), cx);
750 });
751
752 Ok(())
753 })
754 }
755
756 pub fn create_entry(
757 &self,
758 path: impl Into<Arc<Path>>,
759 is_dir: bool,
760 cx: &mut ModelContext<Worktree>,
761 ) -> Task<Result<Entry>> {
762 self.write_entry_internal(
763 path,
764 if is_dir {
765 None
766 } else {
767 Some(Default::default())
768 },
769 cx,
770 )
771 }
772
773 pub fn write_file(
774 &self,
775 path: impl Into<Arc<Path>>,
776 text: Rope,
777 line_ending: LineEnding,
778 cx: &mut ModelContext<Worktree>,
779 ) -> Task<Result<Entry>> {
780 self.write_entry_internal(path, Some((text, line_ending)), cx)
781 }
782
783 pub fn delete_entry(
784 &self,
785 entry_id: ProjectEntryId,
786 cx: &mut ModelContext<Worktree>,
787 ) -> Option<Task<Result<()>>> {
788 let entry = self.entry_for_id(entry_id)?.clone();
789 let abs_path = self.absolutize(&entry.path);
790 let delete = cx.background().spawn({
791 let fs = self.fs.clone();
792 let abs_path = abs_path;
793 async move {
794 if entry.is_file() {
795 fs.remove_file(&abs_path, Default::default()).await
796 } else {
797 fs.remove_dir(
798 &abs_path,
799 RemoveOptions {
800 recursive: true,
801 ignore_if_not_exists: false,
802 },
803 )
804 .await
805 }
806 }
807 });
808
809 Some(cx.spawn(|this, mut cx| async move {
810 delete.await?;
811 this.update(&mut cx, |this, cx| {
812 let this = this.as_local_mut().unwrap();
813 {
814 let mut snapshot = this.background_snapshot.lock();
815 snapshot.delete_entry(entry_id);
816 }
817 this.poll_snapshot(true, cx);
818 });
819 Ok(())
820 }))
821 }
822
823 pub fn rename_entry(
824 &self,
825 entry_id: ProjectEntryId,
826 new_path: impl Into<Arc<Path>>,
827 cx: &mut ModelContext<Worktree>,
828 ) -> Option<Task<Result<Entry>>> {
829 let old_path = self.entry_for_id(entry_id)?.path.clone();
830 let new_path = new_path.into();
831 let abs_old_path = self.absolutize(&old_path);
832 let abs_new_path = self.absolutize(&new_path);
833 let rename = cx.background().spawn({
834 let fs = self.fs.clone();
835 let abs_new_path = abs_new_path.clone();
836 async move {
837 fs.rename(&abs_old_path, &abs_new_path, Default::default())
838 .await
839 }
840 });
841
842 Some(cx.spawn(|this, mut cx| async move {
843 rename.await?;
844 let entry = this
845 .update(&mut cx, |this, cx| {
846 this.as_local_mut().unwrap().refresh_entry(
847 new_path.clone(),
848 abs_new_path,
849 Some(old_path),
850 cx,
851 )
852 })
853 .await?;
854 Ok(entry)
855 }))
856 }
857
858 pub fn copy_entry(
859 &self,
860 entry_id: ProjectEntryId,
861 new_path: impl Into<Arc<Path>>,
862 cx: &mut ModelContext<Worktree>,
863 ) -> Option<Task<Result<Entry>>> {
864 let old_path = self.entry_for_id(entry_id)?.path.clone();
865 let new_path = new_path.into();
866 let abs_old_path = self.absolutize(&old_path);
867 let abs_new_path = self.absolutize(&new_path);
868 let copy = cx.background().spawn({
869 let fs = self.fs.clone();
870 let abs_new_path = abs_new_path.clone();
871 async move {
872 copy_recursive(
873 fs.as_ref(),
874 &abs_old_path,
875 &abs_new_path,
876 Default::default(),
877 )
878 .await
879 }
880 });
881
882 Some(cx.spawn(|this, mut cx| async move {
883 copy.await?;
884 let entry = this
885 .update(&mut cx, |this, cx| {
886 this.as_local_mut().unwrap().refresh_entry(
887 new_path.clone(),
888 abs_new_path,
889 None,
890 cx,
891 )
892 })
893 .await?;
894 Ok(entry)
895 }))
896 }
897
898 fn write_entry_internal(
899 &self,
900 path: impl Into<Arc<Path>>,
901 text_if_file: Option<(Rope, LineEnding)>,
902 cx: &mut ModelContext<Worktree>,
903 ) -> Task<Result<Entry>> {
904 let path = path.into();
905 let abs_path = self.absolutize(&path);
906 let write = cx.background().spawn({
907 let fs = self.fs.clone();
908 let abs_path = abs_path.clone();
909 async move {
910 if let Some((text, line_ending)) = text_if_file {
911 fs.save(&abs_path, &text, line_ending).await
912 } else {
913 fs.create_dir(&abs_path).await
914 }
915 }
916 });
917
918 cx.spawn(|this, mut cx| async move {
919 write.await?;
920 let entry = this
921 .update(&mut cx, |this, cx| {
922 this.as_local_mut()
923 .unwrap()
924 .refresh_entry(path, abs_path, None, cx)
925 })
926 .await?;
927 Ok(entry)
928 })
929 }
930
931 fn refresh_entry(
932 &self,
933 path: Arc<Path>,
934 abs_path: PathBuf,
935 old_path: Option<Arc<Path>>,
936 cx: &mut ModelContext<Worktree>,
937 ) -> Task<Result<Entry>> {
938 let fs = self.fs.clone();
939 let root_char_bag;
940 let next_entry_id;
941 {
942 let snapshot = self.background_snapshot.lock();
943 root_char_bag = snapshot.root_char_bag;
944 next_entry_id = snapshot.next_entry_id.clone();
945 }
946 cx.spawn_weak(|this, mut cx| async move {
947 let metadata = fs
948 .metadata(&abs_path)
949 .await?
950 .ok_or_else(|| anyhow!("could not read saved file metadata"))?;
951 let this = this
952 .upgrade(&cx)
953 .ok_or_else(|| anyhow!("worktree was dropped"))?;
954 this.update(&mut cx, |this, cx| {
955 let this = this.as_local_mut().unwrap();
956 let inserted_entry;
957 {
958 let mut snapshot = this.background_snapshot.lock();
959 let mut entry = Entry::new(path, &metadata, &next_entry_id, root_char_bag);
960 entry.is_ignored = snapshot
961 .ignore_stack_for_abs_path(&abs_path, entry.is_dir())
962 .is_abs_path_ignored(&abs_path, entry.is_dir());
963 if let Some(old_path) = old_path {
964 snapshot.remove_path(&old_path);
965 }
966 inserted_entry = snapshot.insert_entry(entry, fs.as_ref());
967 snapshot.scan_id += 1;
968 }
969 this.poll_snapshot(true, cx);
970 Ok(inserted_entry)
971 })
972 })
973 }
974
975 pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
976 let (share_tx, share_rx) = oneshot::channel();
977
978 if self.share.is_some() {
979 let _ = share_tx.send(Ok(()));
980 } else {
981 let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
982 let rpc = self.client.clone();
983 let worktree_id = cx.model_id() as u64;
984 let maintain_remote_snapshot = cx.background().spawn({
985 let rpc = rpc;
986 let diagnostic_summaries = self.diagnostic_summaries.clone();
987 async move {
988 let mut prev_snapshot = match snapshots_rx.recv().await {
989 Some(snapshot) => {
990 let update = proto::UpdateWorktree {
991 project_id,
992 worktree_id,
993 root_name: snapshot.root_name().to_string(),
994 updated_entries: snapshot
995 .entries_by_path
996 .iter()
997 .map(Into::into)
998 .collect(),
999 removed_entries: Default::default(),
1000 scan_id: snapshot.scan_id as u64,
1001 is_last_update: true,
1002 };
1003 if let Err(error) = send_worktree_update(&rpc, update).await {
1004 let _ = share_tx.send(Err(error));
1005 return Err(anyhow!("failed to send initial update worktree"));
1006 } else {
1007 let _ = share_tx.send(Ok(()));
1008 snapshot
1009 }
1010 }
1011 None => {
1012 share_tx
1013 .send(Err(anyhow!("worktree dropped before share completed")))
1014 .ok();
1015 return Err(anyhow!("failed to send initial update worktree"));
1016 }
1017 };
1018
1019 for (path, summary) in diagnostic_summaries.iter() {
1020 rpc.send(proto::UpdateDiagnosticSummary {
1021 project_id,
1022 worktree_id,
1023 summary: Some(summary.to_proto(&path.0)),
1024 })?;
1025 }
1026
1027 while let Some(snapshot) = snapshots_rx.recv().await {
1028 send_worktree_update(
1029 &rpc,
1030 snapshot.build_update(&prev_snapshot, project_id, worktree_id, true),
1031 )
1032 .await?;
1033 prev_snapshot = snapshot;
1034 }
1035
1036 Ok::<_, anyhow::Error>(())
1037 }
1038 .log_err()
1039 });
1040 self.share = Some(ShareState {
1041 project_id,
1042 snapshots_tx,
1043 _maintain_remote_snapshot: Some(maintain_remote_snapshot),
1044 });
1045 }
1046
1047 cx.foreground().spawn(async move {
1048 share_rx
1049 .await
1050 .unwrap_or_else(|_| Err(anyhow!("share ended")))
1051 })
1052 }
1053
1054 pub fn unshare(&mut self) {
1055 self.share.take();
1056 }
1057
1058 pub fn is_shared(&self) -> bool {
1059 self.share.is_some()
1060 }
1061
1062 pub fn send_extension_counts(&self, project_id: u64) {
1063 let mut extensions = Vec::new();
1064 let mut counts = Vec::new();
1065
1066 for (extension, count) in self.extension_counts() {
1067 extensions.push(extension.to_string_lossy().to_string());
1068 counts.push(*count as u32);
1069 }
1070
1071 self.client
1072 .send(proto::UpdateWorktreeExtensions {
1073 project_id,
1074 worktree_id: self.id().to_proto(),
1075 extensions,
1076 counts,
1077 })
1078 .log_err();
1079 }
1080}
1081
1082impl RemoteWorktree {
1083 fn snapshot(&self) -> Snapshot {
1084 self.snapshot.clone()
1085 }
1086
1087 fn poll_snapshot(&mut self, cx: &mut ModelContext<Worktree>) {
1088 self.snapshot = self.background_snapshot.lock().clone();
1089 cx.emit(Event::UpdatedEntries);
1090 cx.notify();
1091 }
1092
1093 pub fn disconnected_from_host(&mut self) {
1094 self.updates_tx.take();
1095 self.snapshot_subscriptions.clear();
1096 }
1097
1098 pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
1099 if let Some(updates_tx) = &self.updates_tx {
1100 updates_tx
1101 .unbounded_send(update)
1102 .expect("consumer runs to completion");
1103 }
1104 }
1105
1106 fn observed_snapshot(&self, scan_id: usize) -> bool {
1107 self.scan_id > scan_id || (self.scan_id == scan_id && self.is_complete)
1108 }
1109
1110 fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = ()> {
1111 let (tx, rx) = oneshot::channel();
1112 if self.observed_snapshot(scan_id) {
1113 let _ = tx.send(());
1114 } else {
1115 match self
1116 .snapshot_subscriptions
1117 .binary_search_by_key(&scan_id, |probe| probe.0)
1118 {
1119 Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)),
1120 }
1121 }
1122
1123 async move {
1124 let _ = rx.await;
1125 }
1126 }
1127
1128 pub fn update_diagnostic_summary(
1129 &mut self,
1130 path: Arc<Path>,
1131 summary: &proto::DiagnosticSummary,
1132 ) {
1133 let summary = DiagnosticSummary {
1134 language_server_id: summary.language_server_id as usize,
1135 error_count: summary.error_count as usize,
1136 warning_count: summary.warning_count as usize,
1137 };
1138 if summary.is_empty() {
1139 self.diagnostic_summaries.remove(&PathKey(path));
1140 } else {
1141 self.diagnostic_summaries.insert(PathKey(path), summary);
1142 }
1143 }
1144
1145 pub fn insert_entry(
1146 &mut self,
1147 entry: proto::Entry,
1148 scan_id: usize,
1149 cx: &mut ModelContext<Worktree>,
1150 ) -> Task<Result<Entry>> {
1151 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1152 cx.spawn(|this, mut cx| async move {
1153 wait_for_snapshot.await;
1154 this.update(&mut cx, |worktree, _| {
1155 let worktree = worktree.as_remote_mut().unwrap();
1156 let mut snapshot = worktree.background_snapshot.lock();
1157 let entry = snapshot.insert_entry(entry);
1158 worktree.snapshot = snapshot.clone();
1159 entry
1160 })
1161 })
1162 }
1163
1164 pub(crate) fn delete_entry(
1165 &mut self,
1166 id: ProjectEntryId,
1167 scan_id: usize,
1168 cx: &mut ModelContext<Worktree>,
1169 ) -> Task<Result<()>> {
1170 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1171 cx.spawn(|this, mut cx| async move {
1172 wait_for_snapshot.await;
1173 this.update(&mut cx, |worktree, _| {
1174 let worktree = worktree.as_remote_mut().unwrap();
1175 let mut snapshot = worktree.background_snapshot.lock();
1176 snapshot.delete_entry(id);
1177 worktree.snapshot = snapshot.clone();
1178 });
1179 Ok(())
1180 })
1181 }
1182}
1183
1184impl Snapshot {
1185 pub fn id(&self) -> WorktreeId {
1186 self.id
1187 }
1188
1189 pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
1190 self.entries_by_id.get(&entry_id, &()).is_some()
1191 }
1192
1193 pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
1194 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1195 let old_entry = self.entries_by_id.insert_or_replace(
1196 PathEntry {
1197 id: entry.id,
1198 path: entry.path.clone(),
1199 is_ignored: entry.is_ignored,
1200 scan_id: 0,
1201 },
1202 &(),
1203 );
1204 if let Some(old_entry) = old_entry {
1205 self.entries_by_path.remove(&PathKey(old_entry.path), &());
1206 }
1207 self.entries_by_path.insert_or_replace(entry.clone(), &());
1208 Ok(entry)
1209 }
1210
1211 fn delete_entry(&mut self, entry_id: ProjectEntryId) -> bool {
1212 if let Some(removed_entry) = self.entries_by_id.remove(&entry_id, &()) {
1213 self.entries_by_path = {
1214 let mut cursor = self.entries_by_path.cursor();
1215 let mut new_entries_by_path =
1216 cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &());
1217 while let Some(entry) = cursor.item() {
1218 if entry.path.starts_with(&removed_entry.path) {
1219 self.entries_by_id.remove(&entry.id, &());
1220 cursor.next(&());
1221 } else {
1222 break;
1223 }
1224 }
1225 new_entries_by_path.push_tree(cursor.suffix(&()), &());
1226 new_entries_by_path
1227 };
1228
1229 true
1230 } else {
1231 false
1232 }
1233 }
1234
1235 pub(crate) fn apply_remote_update(&mut self, update: proto::UpdateWorktree) -> Result<()> {
1236 let mut entries_by_path_edits = Vec::new();
1237 let mut entries_by_id_edits = Vec::new();
1238 for entry_id in update.removed_entries {
1239 let entry = self
1240 .entry_for_id(ProjectEntryId::from_proto(entry_id))
1241 .ok_or_else(|| anyhow!("unknown entry {}", entry_id))?;
1242 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
1243 entries_by_id_edits.push(Edit::Remove(entry.id));
1244 }
1245
1246 for entry in update.updated_entries {
1247 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1248 if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
1249 entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
1250 }
1251 entries_by_id_edits.push(Edit::Insert(PathEntry {
1252 id: entry.id,
1253 path: entry.path.clone(),
1254 is_ignored: entry.is_ignored,
1255 scan_id: 0,
1256 }));
1257 entries_by_path_edits.push(Edit::Insert(entry));
1258 }
1259
1260 self.entries_by_path.edit(entries_by_path_edits, &());
1261 self.entries_by_id.edit(entries_by_id_edits, &());
1262 self.scan_id = update.scan_id as usize;
1263 self.is_complete = update.is_last_update;
1264
1265 Ok(())
1266 }
1267
1268 pub fn file_count(&self) -> usize {
1269 self.entries_by_path.summary().file_count
1270 }
1271
1272 pub fn visible_file_count(&self) -> usize {
1273 self.entries_by_path.summary().visible_file_count
1274 }
1275
1276 fn traverse_from_offset(
1277 &self,
1278 include_dirs: bool,
1279 include_ignored: bool,
1280 start_offset: usize,
1281 ) -> Traversal {
1282 let mut cursor = self.entries_by_path.cursor();
1283 cursor.seek(
1284 &TraversalTarget::Count {
1285 count: start_offset,
1286 include_dirs,
1287 include_ignored,
1288 },
1289 Bias::Right,
1290 &(),
1291 );
1292 Traversal {
1293 cursor,
1294 include_dirs,
1295 include_ignored,
1296 }
1297 }
1298
1299 fn traverse_from_path(
1300 &self,
1301 include_dirs: bool,
1302 include_ignored: bool,
1303 path: &Path,
1304 ) -> Traversal {
1305 let mut cursor = self.entries_by_path.cursor();
1306 cursor.seek(&TraversalTarget::Path(path), Bias::Left, &());
1307 Traversal {
1308 cursor,
1309 include_dirs,
1310 include_ignored,
1311 }
1312 }
1313
1314 pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
1315 self.traverse_from_offset(false, include_ignored, start)
1316 }
1317
1318 pub fn entries(&self, include_ignored: bool) -> Traversal {
1319 self.traverse_from_offset(true, include_ignored, 0)
1320 }
1321
1322 pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
1323 let empty_path = Path::new("");
1324 self.entries_by_path
1325 .cursor::<()>()
1326 .filter(move |entry| entry.path.as_ref() != empty_path)
1327 .map(|entry| &entry.path)
1328 }
1329
1330 fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
1331 let mut cursor = self.entries_by_path.cursor();
1332 cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &());
1333 let traversal = Traversal {
1334 cursor,
1335 include_dirs: true,
1336 include_ignored: true,
1337 };
1338 ChildEntriesIter {
1339 traversal,
1340 parent_path,
1341 }
1342 }
1343
1344 pub fn root_entry(&self) -> Option<&Entry> {
1345 self.entry_for_path("")
1346 }
1347
1348 pub fn root_name(&self) -> &str {
1349 &self.root_name
1350 }
1351
1352 pub fn scan_id(&self) -> usize {
1353 self.scan_id
1354 }
1355
1356 pub fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
1357 let path = path.as_ref();
1358 self.traverse_from_path(true, true, path)
1359 .entry()
1360 .and_then(|entry| {
1361 if entry.path.as_ref() == path {
1362 Some(entry)
1363 } else {
1364 None
1365 }
1366 })
1367 }
1368
1369 pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> {
1370 let entry = self.entries_by_id.get(&id, &())?;
1371 self.entry_for_path(&entry.path)
1372 }
1373
1374 pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
1375 self.entry_for_path(path.as_ref()).map(|e| e.inode)
1376 }
1377}
1378
1379impl LocalSnapshot {
1380 pub fn abs_path(&self) -> &Arc<Path> {
1381 &self.abs_path
1382 }
1383
1384 pub fn extension_counts(&self) -> &HashMap<OsString, usize> {
1385 &self.extension_counts
1386 }
1387
1388 // Gives the most specific git repository for a given path
1389 pub(crate) fn repo_for(&self, path: &Path) -> Option<GitRepositoryEntry> {
1390 dbg!(&self.git_repositories);
1391 self.git_repositories
1392 .iter()
1393 .rev() //git_repository is ordered lexicographically
1394 .find(|repo| repo.manages(path))
1395 .cloned()
1396 }
1397
1398 pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut GitRepositoryEntry> {
1399 // Git repositories cannot be nested, so we don't need to reverse the order
1400 self.git_repositories
1401 .iter_mut()
1402 .find(|repo| repo.in_dot_git(path))
1403 }
1404
1405 #[cfg(test)]
1406 pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree {
1407 let root_name = self.root_name.clone();
1408 proto::UpdateWorktree {
1409 project_id,
1410 worktree_id: self.id().to_proto(),
1411 root_name,
1412 updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
1413 removed_entries: Default::default(),
1414 scan_id: self.scan_id as u64,
1415 is_last_update: true,
1416 }
1417 }
1418
1419 pub(crate) fn build_update(
1420 &self,
1421 other: &Self,
1422 project_id: u64,
1423 worktree_id: u64,
1424 include_ignored: bool,
1425 ) -> proto::UpdateWorktree {
1426 let mut updated_entries = Vec::new();
1427 let mut removed_entries = Vec::new();
1428 let mut self_entries = self
1429 .entries_by_id
1430 .cursor::<()>()
1431 .filter(|e| include_ignored || !e.is_ignored)
1432 .peekable();
1433 let mut other_entries = other
1434 .entries_by_id
1435 .cursor::<()>()
1436 .filter(|e| include_ignored || !e.is_ignored)
1437 .peekable();
1438 loop {
1439 match (self_entries.peek(), other_entries.peek()) {
1440 (Some(self_entry), Some(other_entry)) => {
1441 match Ord::cmp(&self_entry.id, &other_entry.id) {
1442 Ordering::Less => {
1443 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1444 updated_entries.push(entry);
1445 self_entries.next();
1446 }
1447 Ordering::Equal => {
1448 if self_entry.scan_id != other_entry.scan_id {
1449 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1450 updated_entries.push(entry);
1451 }
1452
1453 self_entries.next();
1454 other_entries.next();
1455 }
1456 Ordering::Greater => {
1457 removed_entries.push(other_entry.id.to_proto());
1458 other_entries.next();
1459 }
1460 }
1461 }
1462 (Some(self_entry), None) => {
1463 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1464 updated_entries.push(entry);
1465 self_entries.next();
1466 }
1467 (None, Some(other_entry)) => {
1468 removed_entries.push(other_entry.id.to_proto());
1469 other_entries.next();
1470 }
1471 (None, None) => break,
1472 }
1473 }
1474
1475 proto::UpdateWorktree {
1476 project_id,
1477 worktree_id,
1478 root_name: self.root_name().to_string(),
1479 updated_entries,
1480 removed_entries,
1481 scan_id: self.scan_id as u64,
1482 is_last_update: true,
1483 }
1484 }
1485
1486 fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
1487 if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) {
1488 let abs_path = self.abs_path.join(&entry.path);
1489 match smol::block_on(build_gitignore(&abs_path, fs)) {
1490 Ok(ignore) => {
1491 self.ignores_by_parent_abs_path.insert(
1492 abs_path.parent().unwrap().into(),
1493 (Arc::new(ignore), self.scan_id),
1494 );
1495 }
1496 Err(error) => {
1497 log::error!(
1498 "error loading .gitignore file {:?} - {:?}",
1499 &entry.path,
1500 error
1501 );
1502 }
1503 }
1504 }
1505
1506 self.reuse_entry_id(&mut entry);
1507
1508 if entry.kind == EntryKind::PendingDir {
1509 if let Some(existing_entry) =
1510 self.entries_by_path.get(&PathKey(entry.path.clone()), &())
1511 {
1512 entry.kind = existing_entry.kind;
1513 }
1514 }
1515
1516 self.entries_by_path.insert_or_replace(entry.clone(), &());
1517 let scan_id = self.scan_id;
1518 let removed_entry = self.entries_by_id.insert_or_replace(
1519 PathEntry {
1520 id: entry.id,
1521 path: entry.path.clone(),
1522 is_ignored: entry.is_ignored,
1523 scan_id,
1524 },
1525 &(),
1526 );
1527
1528 if let Some(removed_entry) = removed_entry {
1529 self.dec_extension_count(&removed_entry.path, removed_entry.is_ignored);
1530 }
1531 self.inc_extension_count(&entry.path, entry.is_ignored);
1532
1533 entry
1534 }
1535
1536 fn populate_dir(
1537 &mut self,
1538 parent_path: Arc<Path>,
1539 entries: impl IntoIterator<Item = Entry>,
1540 ignore: Option<Arc<Gitignore>>,
1541 fs: &dyn Fs,
1542 ) {
1543 let mut parent_entry = if let Some(parent_entry) =
1544 self.entries_by_path.get(&PathKey(parent_path.clone()), &())
1545 {
1546 parent_entry.clone()
1547 } else {
1548 log::warn!(
1549 "populating a directory {:?} that has been removed",
1550 parent_path
1551 );
1552 return;
1553 };
1554
1555 if let Some(ignore) = ignore {
1556 self.ignores_by_parent_abs_path.insert(
1557 self.abs_path.join(&parent_path).into(),
1558 (ignore, self.scan_id),
1559 );
1560 }
1561 if matches!(parent_entry.kind, EntryKind::PendingDir) {
1562 parent_entry.kind = EntryKind::Dir;
1563 } else {
1564 unreachable!();
1565 }
1566
1567 if parent_path.file_name() == Some(&DOT_GIT) {
1568 let abs_path = self.abs_path.join(&parent_path);
1569 let content_path: Arc<Path> = parent_path.parent().unwrap().into();
1570 if let Err(ix) = self
1571 .git_repositories
1572 .binary_search_by_key(&&content_path, |repo| &repo.content_path)
1573 {
1574 if let Some(repo) = fs.open_repo(abs_path.as_path()) {
1575 self.git_repositories.insert(
1576 ix,
1577 GitRepositoryEntry {
1578 repo,
1579 scan_id: 0,
1580 content_path,
1581 git_dir_path: parent_path,
1582 },
1583 );
1584 }
1585 }
1586 }
1587
1588 let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
1589 let mut entries_by_id_edits = Vec::new();
1590
1591 for mut entry in entries {
1592 self.reuse_entry_id(&mut entry);
1593 self.inc_extension_count(&entry.path, entry.is_ignored);
1594 entries_by_id_edits.push(Edit::Insert(PathEntry {
1595 id: entry.id,
1596 path: entry.path.clone(),
1597 is_ignored: entry.is_ignored,
1598 scan_id: self.scan_id,
1599 }));
1600 entries_by_path_edits.push(Edit::Insert(entry));
1601 }
1602
1603 self.entries_by_path.edit(entries_by_path_edits, &());
1604 let removed_entries = self.entries_by_id.edit(entries_by_id_edits, &());
1605
1606 for removed_entry in removed_entries {
1607 self.dec_extension_count(&removed_entry.path, removed_entry.is_ignored);
1608 }
1609 }
1610
1611 fn inc_extension_count(&mut self, path: &Path, ignored: bool) {
1612 if !ignored {
1613 if let Some(extension) = path.extension() {
1614 if let Some(count) = self.extension_counts.get_mut(extension) {
1615 *count += 1;
1616 } else {
1617 self.extension_counts.insert(extension.into(), 1);
1618 }
1619 }
1620 }
1621 }
1622
1623 fn dec_extension_count(&mut self, path: &Path, ignored: bool) {
1624 if !ignored {
1625 if let Some(extension) = path.extension() {
1626 if let Some(count) = self.extension_counts.get_mut(extension) {
1627 *count -= 1;
1628 }
1629 }
1630 }
1631 }
1632
1633 fn reuse_entry_id(&mut self, entry: &mut Entry) {
1634 if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
1635 entry.id = removed_entry_id;
1636 } else if let Some(existing_entry) = self.entry_for_path(&entry.path) {
1637 entry.id = existing_entry.id;
1638 }
1639 }
1640
1641 fn remove_path(&mut self, path: &Path) {
1642 let mut new_entries;
1643 let removed_entries;
1644 {
1645 let mut cursor = self.entries_by_path.cursor::<TraversalProgress>();
1646 new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
1647 removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
1648 new_entries.push_tree(cursor.suffix(&()), &());
1649 }
1650 self.entries_by_path = new_entries;
1651
1652 let mut entries_by_id_edits = Vec::new();
1653 for entry in removed_entries.cursor::<()>() {
1654 let removed_entry_id = self
1655 .removed_entry_ids
1656 .entry(entry.inode)
1657 .or_insert(entry.id);
1658 *removed_entry_id = cmp::max(*removed_entry_id, entry.id);
1659 entries_by_id_edits.push(Edit::Remove(entry.id));
1660 self.dec_extension_count(&entry.path, entry.is_ignored);
1661 }
1662 self.entries_by_id.edit(entries_by_id_edits, &());
1663
1664 if path.file_name() == Some(&GITIGNORE) {
1665 let abs_parent_path = self.abs_path.join(path.parent().unwrap());
1666 if let Some((_, scan_id)) = self
1667 .ignores_by_parent_abs_path
1668 .get_mut(abs_parent_path.as_path())
1669 {
1670 *scan_id = self.snapshot.scan_id;
1671 }
1672 } else if path.file_name() == Some(&DOT_GIT) {
1673 let parent_path = path.parent().unwrap();
1674 if let Ok(ix) = self
1675 .git_repositories
1676 .binary_search_by_key(&parent_path, |repo| repo.git_dir_path.as_ref())
1677 {
1678 self.git_repositories[ix].scan_id = self.snapshot.scan_id;
1679 }
1680 }
1681 }
1682
1683 fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
1684 let mut inodes = TreeSet::default();
1685 for ancestor in path.ancestors().skip(1) {
1686 if let Some(entry) = self.entry_for_path(ancestor) {
1687 inodes.insert(entry.inode);
1688 }
1689 }
1690 inodes
1691 }
1692
1693 fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
1694 let mut new_ignores = Vec::new();
1695 for ancestor in abs_path.ancestors().skip(1) {
1696 if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
1697 new_ignores.push((ancestor, Some(ignore.clone())));
1698 } else {
1699 new_ignores.push((ancestor, None));
1700 }
1701 }
1702
1703 let mut ignore_stack = IgnoreStack::none();
1704 for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
1705 if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
1706 ignore_stack = IgnoreStack::all();
1707 break;
1708 } else if let Some(ignore) = ignore {
1709 ignore_stack = ignore_stack.append(parent_abs_path.into(), ignore);
1710 }
1711 }
1712
1713 if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
1714 ignore_stack = IgnoreStack::all();
1715 }
1716
1717 ignore_stack
1718 }
1719
1720 pub fn git_repo_entries(&self) -> &[GitRepositoryEntry] {
1721 &self.git_repositories
1722 }
1723}
1724
1725impl GitRepositoryEntry {
1726 // Note that these paths should be relative to the worktree root.
1727 pub(crate) fn manages(&self, path: &Path) -> bool {
1728 dbg!(path, &self.content_path);
1729 path.starts_with(self.content_path.as_ref())
1730 }
1731
1732 // Note that theis path should be relative to the worktree root.
1733 pub(crate) fn in_dot_git(&self, path: &Path) -> bool {
1734 path.starts_with(self.git_dir_path.as_ref())
1735 }
1736}
1737
1738async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
1739 let contents = fs.load(abs_path).await?;
1740 let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
1741 let mut builder = GitignoreBuilder::new(parent);
1742 for line in contents.lines() {
1743 builder.add_line(Some(abs_path.into()), line)?;
1744 }
1745 Ok(builder.build()?)
1746}
1747
1748impl WorktreeId {
1749 pub fn from_usize(handle_id: usize) -> Self {
1750 Self(handle_id)
1751 }
1752
1753 pub(crate) fn from_proto(id: u64) -> Self {
1754 Self(id as usize)
1755 }
1756
1757 pub fn to_proto(&self) -> u64 {
1758 self.0 as u64
1759 }
1760
1761 pub fn to_usize(&self) -> usize {
1762 self.0
1763 }
1764}
1765
1766impl fmt::Display for WorktreeId {
1767 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1768 self.0.fmt(f)
1769 }
1770}
1771
1772impl Deref for Worktree {
1773 type Target = Snapshot;
1774
1775 fn deref(&self) -> &Self::Target {
1776 match self {
1777 Worktree::Local(worktree) => &worktree.snapshot,
1778 Worktree::Remote(worktree) => &worktree.snapshot,
1779 }
1780 }
1781}
1782
1783impl Deref for LocalWorktree {
1784 type Target = LocalSnapshot;
1785
1786 fn deref(&self) -> &Self::Target {
1787 &self.snapshot
1788 }
1789}
1790
1791impl Deref for RemoteWorktree {
1792 type Target = Snapshot;
1793
1794 fn deref(&self) -> &Self::Target {
1795 &self.snapshot
1796 }
1797}
1798
1799impl fmt::Debug for LocalWorktree {
1800 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1801 self.snapshot.fmt(f)
1802 }
1803}
1804
1805impl fmt::Debug for Snapshot {
1806 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1807 struct EntriesById<'a>(&'a SumTree<PathEntry>);
1808 struct EntriesByPath<'a>(&'a SumTree<Entry>);
1809
1810 impl<'a> fmt::Debug for EntriesByPath<'a> {
1811 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1812 f.debug_map()
1813 .entries(self.0.iter().map(|entry| (&entry.path, entry.id)))
1814 .finish()
1815 }
1816 }
1817
1818 impl<'a> fmt::Debug for EntriesById<'a> {
1819 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1820 f.debug_list().entries(self.0.iter()).finish()
1821 }
1822 }
1823
1824 f.debug_struct("Snapshot")
1825 .field("id", &self.id)
1826 .field("root_name", &self.root_name)
1827 .field("entries_by_path", &EntriesByPath(&self.entries_by_path))
1828 .field("entries_by_id", &EntriesById(&self.entries_by_id))
1829 .finish()
1830 }
1831}
1832
1833#[derive(Clone, PartialEq)]
1834pub struct File {
1835 pub worktree: ModelHandle<Worktree>,
1836 pub path: Arc<Path>,
1837 pub mtime: SystemTime,
1838 pub(crate) entry_id: Option<ProjectEntryId>,
1839 pub(crate) is_local: bool,
1840}
1841
1842impl language::File for File {
1843 fn as_local(&self) -> Option<&dyn language::LocalFile> {
1844 if self.is_local {
1845 Some(self)
1846 } else {
1847 None
1848 }
1849 }
1850
1851 fn mtime(&self) -> SystemTime {
1852 self.mtime
1853 }
1854
1855 fn path(&self) -> &Arc<Path> {
1856 &self.path
1857 }
1858
1859 fn full_path(&self, cx: &AppContext) -> PathBuf {
1860 let mut full_path = PathBuf::new();
1861 full_path.push(self.worktree.read(cx).root_name());
1862 if self.path.components().next().is_some() {
1863 full_path.push(&self.path);
1864 }
1865 full_path
1866 }
1867
1868 /// Returns the last component of this handle's absolute path. If this handle refers to the root
1869 /// of its worktree, then this method will return the name of the worktree itself.
1870 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr {
1871 self.path
1872 .file_name()
1873 .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name))
1874 }
1875
1876 fn is_deleted(&self) -> bool {
1877 self.entry_id.is_none()
1878 }
1879
1880 fn save(
1881 &self,
1882 buffer_id: u64,
1883 text: Rope,
1884 version: clock::Global,
1885 line_ending: LineEnding,
1886 cx: &mut MutableAppContext,
1887 ) -> Task<Result<(clock::Global, String, SystemTime)>> {
1888 self.worktree.update(cx, |worktree, cx| match worktree {
1889 Worktree::Local(worktree) => {
1890 let rpc = worktree.client.clone();
1891 let project_id = worktree.share.as_ref().map(|share| share.project_id);
1892 let fingerprint = text.fingerprint();
1893 let save = worktree.write_file(self.path.clone(), text, line_ending, cx);
1894 cx.background().spawn(async move {
1895 let entry = save.await?;
1896 if let Some(project_id) = project_id {
1897 rpc.send(proto::BufferSaved {
1898 project_id,
1899 buffer_id,
1900 version: serialize_version(&version),
1901 mtime: Some(entry.mtime.into()),
1902 fingerprint: fingerprint.clone(),
1903 })?;
1904 }
1905 Ok((version, fingerprint, entry.mtime))
1906 })
1907 }
1908 Worktree::Remote(worktree) => {
1909 let rpc = worktree.client.clone();
1910 let project_id = worktree.project_id;
1911 cx.foreground().spawn(async move {
1912 let response = rpc
1913 .request(proto::SaveBuffer {
1914 project_id,
1915 buffer_id,
1916 version: serialize_version(&version),
1917 })
1918 .await?;
1919 let version = deserialize_version(response.version);
1920 let mtime = response
1921 .mtime
1922 .ok_or_else(|| anyhow!("missing mtime"))?
1923 .into();
1924 Ok((version, response.fingerprint, mtime))
1925 })
1926 }
1927 })
1928 }
1929
1930 fn as_any(&self) -> &dyn Any {
1931 self
1932 }
1933
1934 fn to_proto(&self) -> rpc::proto::File {
1935 rpc::proto::File {
1936 worktree_id: self.worktree.id() as u64,
1937 entry_id: self.entry_id.map(|entry_id| entry_id.to_proto()),
1938 path: self.path.to_string_lossy().into(),
1939 mtime: Some(self.mtime.into()),
1940 }
1941 }
1942}
1943
1944impl language::LocalFile for File {
1945 fn abs_path(&self, cx: &AppContext) -> PathBuf {
1946 self.worktree
1947 .read(cx)
1948 .as_local()
1949 .unwrap()
1950 .abs_path
1951 .join(&self.path)
1952 }
1953
1954 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
1955 let worktree = self.worktree.read(cx).as_local().unwrap();
1956 let abs_path = worktree.absolutize(&self.path);
1957 let fs = worktree.fs.clone();
1958 cx.background()
1959 .spawn(async move { fs.load(&abs_path).await })
1960 }
1961
1962 fn buffer_reloaded(
1963 &self,
1964 buffer_id: u64,
1965 version: &clock::Global,
1966 fingerprint: String,
1967 line_ending: LineEnding,
1968 mtime: SystemTime,
1969 cx: &mut MutableAppContext,
1970 ) {
1971 let worktree = self.worktree.read(cx).as_local().unwrap();
1972 if let Some(project_id) = worktree.share.as_ref().map(|share| share.project_id) {
1973 worktree
1974 .client
1975 .send(proto::BufferReloaded {
1976 project_id,
1977 buffer_id,
1978 version: serialize_version(version),
1979 mtime: Some(mtime.into()),
1980 fingerprint,
1981 line_ending: serialize_line_ending(line_ending) as i32,
1982 })
1983 .log_err();
1984 }
1985 }
1986}
1987
1988impl File {
1989 pub fn from_proto(
1990 proto: rpc::proto::File,
1991 worktree: ModelHandle<Worktree>,
1992 cx: &AppContext,
1993 ) -> Result<Self> {
1994 let worktree_id = worktree
1995 .read(cx)
1996 .as_remote()
1997 .ok_or_else(|| anyhow!("not remote"))?
1998 .id();
1999
2000 if worktree_id.to_proto() != proto.worktree_id {
2001 return Err(anyhow!("worktree id does not match file"));
2002 }
2003
2004 Ok(Self {
2005 worktree,
2006 path: Path::new(&proto.path).into(),
2007 mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
2008 entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
2009 is_local: false,
2010 })
2011 }
2012
2013 pub fn from_dyn(file: Option<&dyn language::File>) -> Option<&Self> {
2014 file.and_then(|f| f.as_any().downcast_ref())
2015 }
2016
2017 pub fn worktree_id(&self, cx: &AppContext) -> WorktreeId {
2018 self.worktree.read(cx).id()
2019 }
2020
2021 pub fn project_entry_id(&self, _: &AppContext) -> Option<ProjectEntryId> {
2022 self.entry_id
2023 }
2024}
2025
2026#[derive(Clone, Debug, PartialEq, Eq)]
2027pub struct Entry {
2028 pub id: ProjectEntryId,
2029 pub kind: EntryKind,
2030 pub path: Arc<Path>,
2031 pub inode: u64,
2032 pub mtime: SystemTime,
2033 pub is_symlink: bool,
2034 pub is_ignored: bool,
2035}
2036
2037#[derive(Clone, Copy, Debug, PartialEq, Eq)]
2038pub enum EntryKind {
2039 PendingDir,
2040 Dir,
2041 File(CharBag),
2042}
2043
2044impl Entry {
2045 fn new(
2046 path: Arc<Path>,
2047 metadata: &fs::Metadata,
2048 next_entry_id: &AtomicUsize,
2049 root_char_bag: CharBag,
2050 ) -> Self {
2051 Self {
2052 id: ProjectEntryId::new(next_entry_id),
2053 kind: if metadata.is_dir {
2054 EntryKind::PendingDir
2055 } else {
2056 EntryKind::File(char_bag_for_path(root_char_bag, &path))
2057 },
2058 path,
2059 inode: metadata.inode,
2060 mtime: metadata.mtime,
2061 is_symlink: metadata.is_symlink,
2062 is_ignored: false,
2063 }
2064 }
2065
2066 pub fn is_dir(&self) -> bool {
2067 matches!(self.kind, EntryKind::Dir | EntryKind::PendingDir)
2068 }
2069
2070 pub fn is_file(&self) -> bool {
2071 matches!(self.kind, EntryKind::File(_))
2072 }
2073}
2074
2075impl sum_tree::Item for Entry {
2076 type Summary = EntrySummary;
2077
2078 fn summary(&self) -> Self::Summary {
2079 let visible_count = if self.is_ignored { 0 } else { 1 };
2080 let file_count;
2081 let visible_file_count;
2082 if self.is_file() {
2083 file_count = 1;
2084 visible_file_count = visible_count;
2085 } else {
2086 file_count = 0;
2087 visible_file_count = 0;
2088 }
2089
2090 EntrySummary {
2091 max_path: self.path.clone(),
2092 count: 1,
2093 visible_count,
2094 file_count,
2095 visible_file_count,
2096 }
2097 }
2098}
2099
2100impl sum_tree::KeyedItem for Entry {
2101 type Key = PathKey;
2102
2103 fn key(&self) -> Self::Key {
2104 PathKey(self.path.clone())
2105 }
2106}
2107
2108#[derive(Clone, Debug)]
2109pub struct EntrySummary {
2110 max_path: Arc<Path>,
2111 count: usize,
2112 visible_count: usize,
2113 file_count: usize,
2114 visible_file_count: usize,
2115}
2116
2117impl Default for EntrySummary {
2118 fn default() -> Self {
2119 Self {
2120 max_path: Arc::from(Path::new("")),
2121 count: 0,
2122 visible_count: 0,
2123 file_count: 0,
2124 visible_file_count: 0,
2125 }
2126 }
2127}
2128
2129impl sum_tree::Summary for EntrySummary {
2130 type Context = ();
2131
2132 fn add_summary(&mut self, rhs: &Self, _: &()) {
2133 self.max_path = rhs.max_path.clone();
2134 self.count += rhs.count;
2135 self.visible_count += rhs.visible_count;
2136 self.file_count += rhs.file_count;
2137 self.visible_file_count += rhs.visible_file_count;
2138 }
2139}
2140
2141#[derive(Clone, Debug)]
2142struct PathEntry {
2143 id: ProjectEntryId,
2144 path: Arc<Path>,
2145 is_ignored: bool,
2146 scan_id: usize,
2147}
2148
2149impl sum_tree::Item for PathEntry {
2150 type Summary = PathEntrySummary;
2151
2152 fn summary(&self) -> Self::Summary {
2153 PathEntrySummary { max_id: self.id }
2154 }
2155}
2156
2157impl sum_tree::KeyedItem for PathEntry {
2158 type Key = ProjectEntryId;
2159
2160 fn key(&self) -> Self::Key {
2161 self.id
2162 }
2163}
2164
2165#[derive(Clone, Debug, Default)]
2166struct PathEntrySummary {
2167 max_id: ProjectEntryId,
2168}
2169
2170impl sum_tree::Summary for PathEntrySummary {
2171 type Context = ();
2172
2173 fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
2174 self.max_id = summary.max_id;
2175 }
2176}
2177
2178impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
2179 fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) {
2180 *self = summary.max_id;
2181 }
2182}
2183
2184#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
2185pub struct PathKey(Arc<Path>);
2186
2187impl Default for PathKey {
2188 fn default() -> Self {
2189 Self(Path::new("").into())
2190 }
2191}
2192
2193impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
2194 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2195 self.0 = summary.max_path.clone();
2196 }
2197}
2198
2199struct BackgroundScanner {
2200 fs: Arc<dyn Fs>,
2201 snapshot: Arc<Mutex<LocalSnapshot>>,
2202 notify: UnboundedSender<ScanState>,
2203 executor: Arc<executor::Background>,
2204}
2205
2206impl BackgroundScanner {
2207 fn new(
2208 snapshot: Arc<Mutex<LocalSnapshot>>,
2209 notify: UnboundedSender<ScanState>,
2210 fs: Arc<dyn Fs>,
2211 executor: Arc<executor::Background>,
2212 ) -> Self {
2213 Self {
2214 fs,
2215 snapshot,
2216 notify,
2217 executor,
2218 }
2219 }
2220
2221 fn abs_path(&self) -> Arc<Path> {
2222 self.snapshot.lock().abs_path.clone()
2223 }
2224
2225 fn snapshot(&self) -> LocalSnapshot {
2226 self.snapshot.lock().clone()
2227 }
2228
2229 async fn run(mut self, events_rx: impl Stream<Item = Vec<fsevent::Event>>) {
2230 if self.notify.unbounded_send(ScanState::Initializing).is_err() {
2231 return;
2232 }
2233
2234 if let Err(err) = self.scan_dirs().await {
2235 if self
2236 .notify
2237 .unbounded_send(ScanState::Err(Arc::new(err)))
2238 .is_err()
2239 {
2240 return;
2241 }
2242 }
2243
2244 if self.notify.unbounded_send(ScanState::Idle).is_err() {
2245 return;
2246 }
2247
2248 futures::pin_mut!(events_rx);
2249
2250 while let Some(mut events) = events_rx.next().await {
2251 while let Poll::Ready(Some(additional_events)) = futures::poll!(events_rx.next()) {
2252 events.extend(additional_events);
2253 }
2254
2255 if self.notify.unbounded_send(ScanState::Updating).is_err() {
2256 break;
2257 }
2258
2259 if !self.process_events(events).await {
2260 break;
2261 }
2262
2263 if self.notify.unbounded_send(ScanState::Idle).is_err() {
2264 break;
2265 }
2266 }
2267 }
2268
2269 async fn scan_dirs(&mut self) -> Result<()> {
2270 let root_char_bag;
2271 let root_abs_path;
2272 let root_inode;
2273 let is_dir;
2274 let next_entry_id;
2275 {
2276 let snapshot = self.snapshot.lock();
2277 root_char_bag = snapshot.root_char_bag;
2278 root_abs_path = snapshot.abs_path.clone();
2279 root_inode = snapshot.root_entry().map(|e| e.inode);
2280 is_dir = snapshot.root_entry().map_or(false, |e| e.is_dir());
2281 next_entry_id = snapshot.next_entry_id.clone();
2282 };
2283
2284 // Populate ignores above the root.
2285 for ancestor in root_abs_path.ancestors().skip(1) {
2286 if let Ok(ignore) = build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
2287 {
2288 self.snapshot
2289 .lock()
2290 .ignores_by_parent_abs_path
2291 .insert(ancestor.into(), (ignore.into(), 0));
2292 }
2293 }
2294
2295 let ignore_stack = {
2296 let mut snapshot = self.snapshot.lock();
2297 let ignore_stack = snapshot.ignore_stack_for_abs_path(&root_abs_path, true);
2298 if ignore_stack.is_all() {
2299 if let Some(mut root_entry) = snapshot.root_entry().cloned() {
2300 root_entry.is_ignored = true;
2301 snapshot.insert_entry(root_entry, self.fs.as_ref());
2302 }
2303 }
2304 ignore_stack
2305 };
2306
2307 if is_dir {
2308 let path: Arc<Path> = Arc::from(Path::new(""));
2309 let mut ancestor_inodes = TreeSet::default();
2310 if let Some(root_inode) = root_inode {
2311 ancestor_inodes.insert(root_inode);
2312 }
2313
2314 let (tx, rx) = channel::unbounded();
2315 self.executor
2316 .block(tx.send(ScanJob {
2317 abs_path: root_abs_path.to_path_buf(),
2318 path,
2319 ignore_stack,
2320 ancestor_inodes,
2321 scan_queue: tx.clone(),
2322 }))
2323 .unwrap();
2324 drop(tx);
2325
2326 self.executor
2327 .scoped(|scope| {
2328 for _ in 0..self.executor.num_cpus() {
2329 scope.spawn(async {
2330 while let Ok(job) = rx.recv().await {
2331 if let Err(err) = self
2332 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2333 .await
2334 {
2335 log::error!("error scanning {:?}: {}", job.abs_path, err);
2336 }
2337 }
2338 });
2339 }
2340 })
2341 .await;
2342 }
2343
2344 Ok(())
2345 }
2346
2347 async fn scan_dir(
2348 &self,
2349 root_char_bag: CharBag,
2350 next_entry_id: Arc<AtomicUsize>,
2351 job: &ScanJob,
2352 ) -> Result<()> {
2353 let mut new_entries: Vec<Entry> = Vec::new();
2354 let mut new_jobs: Vec<ScanJob> = Vec::new();
2355 let mut ignore_stack = job.ignore_stack.clone();
2356 let mut new_ignore = None;
2357
2358 let mut child_paths = self.fs.read_dir(&job.abs_path).await?;
2359 while let Some(child_abs_path) = child_paths.next().await {
2360 let child_abs_path = match child_abs_path {
2361 Ok(child_abs_path) => child_abs_path,
2362 Err(error) => {
2363 log::error!("error processing entry {:?}", error);
2364 continue;
2365 }
2366 };
2367 let child_name = child_abs_path.file_name().unwrap();
2368 let child_path: Arc<Path> = job.path.join(child_name).into();
2369 let child_metadata = match self.fs.metadata(&child_abs_path).await {
2370 Ok(Some(metadata)) => metadata,
2371 Ok(None) => continue,
2372 Err(err) => {
2373 log::error!("error processing {:?}: {:?}", child_abs_path, err);
2374 continue;
2375 }
2376 };
2377
2378 // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
2379 if child_name == *GITIGNORE {
2380 match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
2381 Ok(ignore) => {
2382 let ignore = Arc::new(ignore);
2383 ignore_stack =
2384 ignore_stack.append(job.abs_path.as_path().into(), ignore.clone());
2385 new_ignore = Some(ignore);
2386 }
2387 Err(error) => {
2388 log::error!(
2389 "error loading .gitignore file {:?} - {:?}",
2390 child_name,
2391 error
2392 );
2393 }
2394 }
2395
2396 // Update ignore status of any child entries we've already processed to reflect the
2397 // ignore file in the current directory. Because `.gitignore` starts with a `.`,
2398 // there should rarely be too numerous. Update the ignore stack associated with any
2399 // new jobs as well.
2400 let mut new_jobs = new_jobs.iter_mut();
2401 for entry in &mut new_entries {
2402 let entry_abs_path = self.abs_path().join(&entry.path);
2403 entry.is_ignored =
2404 ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir());
2405 if entry.is_dir() {
2406 new_jobs.next().unwrap().ignore_stack = if entry.is_ignored {
2407 IgnoreStack::all()
2408 } else {
2409 ignore_stack.clone()
2410 };
2411 }
2412 }
2413 }
2414
2415 let mut child_entry = Entry::new(
2416 child_path.clone(),
2417 &child_metadata,
2418 &next_entry_id,
2419 root_char_bag,
2420 );
2421
2422 if child_entry.is_dir() {
2423 let is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
2424 child_entry.is_ignored = is_ignored;
2425
2426 if !job.ancestor_inodes.contains(&child_entry.inode) {
2427 let mut ancestor_inodes = job.ancestor_inodes.clone();
2428 ancestor_inodes.insert(child_entry.inode);
2429 new_jobs.push(ScanJob {
2430 abs_path: child_abs_path,
2431 path: child_path,
2432 ignore_stack: if is_ignored {
2433 IgnoreStack::all()
2434 } else {
2435 ignore_stack.clone()
2436 },
2437 ancestor_inodes,
2438 scan_queue: job.scan_queue.clone(),
2439 });
2440 }
2441 } else {
2442 child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
2443 }
2444
2445 new_entries.push(child_entry);
2446 }
2447
2448 self.snapshot.lock().populate_dir(
2449 job.path.clone(),
2450 new_entries,
2451 new_ignore,
2452 self.fs.as_ref(),
2453 );
2454 for new_job in new_jobs {
2455 job.scan_queue.send(new_job).await.unwrap();
2456 }
2457
2458 Ok(())
2459 }
2460
2461 async fn process_events(&mut self, mut events: Vec<fsevent::Event>) -> bool {
2462 events.sort_unstable_by(|a, b| a.path.cmp(&b.path));
2463 events.dedup_by(|a, b| a.path.starts_with(&b.path));
2464
2465 let root_char_bag;
2466 let root_abs_path;
2467 let next_entry_id;
2468 {
2469 let snapshot = self.snapshot.lock();
2470 root_char_bag = snapshot.root_char_bag;
2471 root_abs_path = snapshot.abs_path.clone();
2472 next_entry_id = snapshot.next_entry_id.clone();
2473 }
2474
2475 let root_canonical_path = if let Ok(path) = self.fs.canonicalize(&root_abs_path).await {
2476 path
2477 } else {
2478 return false;
2479 };
2480 let metadata = futures::future::join_all(
2481 events
2482 .iter()
2483 .map(|event| self.fs.metadata(&event.path))
2484 .collect::<Vec<_>>(),
2485 )
2486 .await;
2487
2488 // Hold the snapshot lock while clearing and re-inserting the root entries
2489 // for each event. This way, the snapshot is not observable to the foreground
2490 // thread while this operation is in-progress.
2491 let (scan_queue_tx, scan_queue_rx) = channel::unbounded();
2492 {
2493 let mut snapshot = self.snapshot.lock();
2494 snapshot.scan_id += 1;
2495 for event in &events {
2496 if let Ok(path) = event.path.strip_prefix(&root_canonical_path) {
2497 snapshot.remove_path(path);
2498 }
2499 }
2500
2501 for (event, metadata) in events.into_iter().zip(metadata.into_iter()) {
2502 let path: Arc<Path> = match event.path.strip_prefix(&root_canonical_path) {
2503 Ok(path) => Arc::from(path.to_path_buf()),
2504 Err(_) => {
2505 log::error!(
2506 "unexpected event {:?} for root path {:?}",
2507 event.path,
2508 root_canonical_path
2509 );
2510 continue;
2511 }
2512 };
2513 let abs_path = root_abs_path.join(&path);
2514
2515 match metadata {
2516 Ok(Some(metadata)) => {
2517 let ignore_stack =
2518 snapshot.ignore_stack_for_abs_path(&abs_path, metadata.is_dir);
2519 let mut fs_entry = Entry::new(
2520 path.clone(),
2521 &metadata,
2522 snapshot.next_entry_id.as_ref(),
2523 snapshot.root_char_bag,
2524 );
2525 fs_entry.is_ignored = ignore_stack.is_all();
2526 snapshot.insert_entry(fs_entry, self.fs.as_ref());
2527
2528 let scan_id = snapshot.scan_id;
2529 if let Some(repo) = snapshot.in_dot_git(&path) {
2530 repo.scan_id = scan_id;
2531 }
2532
2533 let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
2534 if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
2535 ancestor_inodes.insert(metadata.inode);
2536 self.executor
2537 .block(scan_queue_tx.send(ScanJob {
2538 abs_path,
2539 path,
2540 ignore_stack,
2541 ancestor_inodes,
2542 scan_queue: scan_queue_tx.clone(),
2543 }))
2544 .unwrap();
2545 }
2546 }
2547 Ok(None) => {}
2548 Err(err) => {
2549 // TODO - create a special 'error' entry in the entries tree to mark this
2550 log::error!("error reading file on event {:?}", err);
2551 }
2552 }
2553 }
2554 drop(scan_queue_tx);
2555 }
2556
2557 // Scan any directories that were created as part of this event batch.
2558 self.executor
2559 .scoped(|scope| {
2560 for _ in 0..self.executor.num_cpus() {
2561 scope.spawn(async {
2562 while let Ok(job) = scan_queue_rx.recv().await {
2563 if let Err(err) = self
2564 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2565 .await
2566 {
2567 log::error!("error scanning {:?}: {}", job.abs_path, err);
2568 }
2569 }
2570 });
2571 }
2572 })
2573 .await;
2574
2575 // Attempt to detect renames only over a single batch of file-system events.
2576 self.snapshot.lock().removed_entry_ids.clear();
2577
2578 self.update_ignore_statuses().await;
2579 self.update_git_repositories();
2580 true
2581 }
2582
2583 async fn update_ignore_statuses(&self) {
2584 let mut snapshot = self.snapshot();
2585
2586 let mut ignores_to_update = Vec::new();
2587 let mut ignores_to_delete = Vec::new();
2588 for (parent_abs_path, (_, scan_id)) in &snapshot.ignores_by_parent_abs_path {
2589 if let Ok(parent_path) = parent_abs_path.strip_prefix(&snapshot.abs_path) {
2590 if *scan_id == snapshot.scan_id && snapshot.entry_for_path(parent_path).is_some() {
2591 ignores_to_update.push(parent_abs_path.clone());
2592 }
2593
2594 let ignore_path = parent_path.join(&*GITIGNORE);
2595 if snapshot.entry_for_path(ignore_path).is_none() {
2596 ignores_to_delete.push(parent_abs_path.clone());
2597 }
2598 }
2599 }
2600
2601 for parent_abs_path in ignores_to_delete {
2602 snapshot.ignores_by_parent_abs_path.remove(&parent_abs_path);
2603 self.snapshot
2604 .lock()
2605 .ignores_by_parent_abs_path
2606 .remove(&parent_abs_path);
2607 }
2608
2609 let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
2610 ignores_to_update.sort_unstable();
2611 let mut ignores_to_update = ignores_to_update.into_iter().peekable();
2612 while let Some(parent_abs_path) = ignores_to_update.next() {
2613 while ignores_to_update
2614 .peek()
2615 .map_or(false, |p| p.starts_with(&parent_abs_path))
2616 {
2617 ignores_to_update.next().unwrap();
2618 }
2619
2620 let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true);
2621 ignore_queue_tx
2622 .send(UpdateIgnoreStatusJob {
2623 abs_path: parent_abs_path,
2624 ignore_stack,
2625 ignore_queue: ignore_queue_tx.clone(),
2626 })
2627 .await
2628 .unwrap();
2629 }
2630 drop(ignore_queue_tx);
2631
2632 self.executor
2633 .scoped(|scope| {
2634 for _ in 0..self.executor.num_cpus() {
2635 scope.spawn(async {
2636 while let Ok(job) = ignore_queue_rx.recv().await {
2637 self.update_ignore_status(job, &snapshot).await;
2638 }
2639 });
2640 }
2641 })
2642 .await;
2643 }
2644
2645 fn update_git_repositories(&self) {
2646 let mut snapshot = self.snapshot.lock();
2647 let mut git_repositories = mem::take(&mut snapshot.git_repositories);
2648 git_repositories.retain(|repo| snapshot.entry_for_path(&repo.git_dir_path).is_some());
2649 snapshot.git_repositories = git_repositories;
2650 }
2651
2652 async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
2653 let mut ignore_stack = job.ignore_stack;
2654 if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) {
2655 ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
2656 }
2657
2658 let mut entries_by_id_edits = Vec::new();
2659 let mut entries_by_path_edits = Vec::new();
2660 let path = job.abs_path.strip_prefix(&snapshot.abs_path).unwrap();
2661 for mut entry in snapshot.child_entries(path).cloned() {
2662 let was_ignored = entry.is_ignored;
2663 let abs_path = self.abs_path().join(&entry.path);
2664 entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
2665 if entry.is_dir() {
2666 let child_ignore_stack = if entry.is_ignored {
2667 IgnoreStack::all()
2668 } else {
2669 ignore_stack.clone()
2670 };
2671 job.ignore_queue
2672 .send(UpdateIgnoreStatusJob {
2673 abs_path: abs_path.into(),
2674 ignore_stack: child_ignore_stack,
2675 ignore_queue: job.ignore_queue.clone(),
2676 })
2677 .await
2678 .unwrap();
2679 }
2680
2681 if entry.is_ignored != was_ignored {
2682 let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
2683 path_entry.scan_id = snapshot.scan_id;
2684 path_entry.is_ignored = entry.is_ignored;
2685 entries_by_id_edits.push(Edit::Insert(path_entry));
2686 entries_by_path_edits.push(Edit::Insert(entry));
2687 }
2688 }
2689
2690 let mut snapshot = self.snapshot.lock();
2691 snapshot.entries_by_path.edit(entries_by_path_edits, &());
2692 snapshot.entries_by_id.edit(entries_by_id_edits, &());
2693 }
2694}
2695
2696fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
2697 let mut result = root_char_bag;
2698 result.extend(
2699 path.to_string_lossy()
2700 .chars()
2701 .map(|c| c.to_ascii_lowercase()),
2702 );
2703 result
2704}
2705
2706struct ScanJob {
2707 abs_path: PathBuf,
2708 path: Arc<Path>,
2709 ignore_stack: Arc<IgnoreStack>,
2710 scan_queue: Sender<ScanJob>,
2711 ancestor_inodes: TreeSet<u64>,
2712}
2713
2714struct UpdateIgnoreStatusJob {
2715 abs_path: Arc<Path>,
2716 ignore_stack: Arc<IgnoreStack>,
2717 ignore_queue: Sender<UpdateIgnoreStatusJob>,
2718}
2719
2720pub trait WorktreeHandle {
2721 #[cfg(any(test, feature = "test-support"))]
2722 fn flush_fs_events<'a>(
2723 &self,
2724 cx: &'a gpui::TestAppContext,
2725 ) -> futures::future::LocalBoxFuture<'a, ()>;
2726}
2727
2728impl WorktreeHandle for ModelHandle<Worktree> {
2729 // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
2730 // occurred before the worktree was constructed. These events can cause the worktree to perfrom
2731 // extra directory scans, and emit extra scan-state notifications.
2732 //
2733 // This function mutates the worktree's directory and waits for those mutations to be picked up,
2734 // to ensure that all redundant FS events have already been processed.
2735 #[cfg(any(test, feature = "test-support"))]
2736 fn flush_fs_events<'a>(
2737 &self,
2738 cx: &'a gpui::TestAppContext,
2739 ) -> futures::future::LocalBoxFuture<'a, ()> {
2740 use smol::future::FutureExt;
2741
2742 let filename = "fs-event-sentinel";
2743 let tree = self.clone();
2744 let (fs, root_path) = self.read_with(cx, |tree, _| {
2745 let tree = tree.as_local().unwrap();
2746 (tree.fs.clone(), tree.abs_path().clone())
2747 });
2748
2749 async move {
2750 fs.create_file(&root_path.join(filename), Default::default())
2751 .await
2752 .unwrap();
2753 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
2754 .await;
2755
2756 fs.remove_file(&root_path.join(filename), Default::default())
2757 .await
2758 .unwrap();
2759 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
2760 .await;
2761
2762 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2763 .await;
2764 }
2765 .boxed_local()
2766 }
2767}
2768
2769#[derive(Clone, Debug)]
2770struct TraversalProgress<'a> {
2771 max_path: &'a Path,
2772 count: usize,
2773 visible_count: usize,
2774 file_count: usize,
2775 visible_file_count: usize,
2776}
2777
2778impl<'a> TraversalProgress<'a> {
2779 fn count(&self, include_dirs: bool, include_ignored: bool) -> usize {
2780 match (include_ignored, include_dirs) {
2781 (true, true) => self.count,
2782 (true, false) => self.file_count,
2783 (false, true) => self.visible_count,
2784 (false, false) => self.visible_file_count,
2785 }
2786 }
2787}
2788
2789impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
2790 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2791 self.max_path = summary.max_path.as_ref();
2792 self.count += summary.count;
2793 self.visible_count += summary.visible_count;
2794 self.file_count += summary.file_count;
2795 self.visible_file_count += summary.visible_file_count;
2796 }
2797}
2798
2799impl<'a> Default for TraversalProgress<'a> {
2800 fn default() -> Self {
2801 Self {
2802 max_path: Path::new(""),
2803 count: 0,
2804 visible_count: 0,
2805 file_count: 0,
2806 visible_file_count: 0,
2807 }
2808 }
2809}
2810
2811pub struct Traversal<'a> {
2812 cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
2813 include_ignored: bool,
2814 include_dirs: bool,
2815}
2816
2817impl<'a> Traversal<'a> {
2818 pub fn advance(&mut self) -> bool {
2819 self.advance_to_offset(self.offset() + 1)
2820 }
2821
2822 pub fn advance_to_offset(&mut self, offset: usize) -> bool {
2823 self.cursor.seek_forward(
2824 &TraversalTarget::Count {
2825 count: offset,
2826 include_dirs: self.include_dirs,
2827 include_ignored: self.include_ignored,
2828 },
2829 Bias::Right,
2830 &(),
2831 )
2832 }
2833
2834 pub fn advance_to_sibling(&mut self) -> bool {
2835 while let Some(entry) = self.cursor.item() {
2836 self.cursor.seek_forward(
2837 &TraversalTarget::PathSuccessor(&entry.path),
2838 Bias::Left,
2839 &(),
2840 );
2841 if let Some(entry) = self.cursor.item() {
2842 if (self.include_dirs || !entry.is_dir())
2843 && (self.include_ignored || !entry.is_ignored)
2844 {
2845 return true;
2846 }
2847 }
2848 }
2849 false
2850 }
2851
2852 pub fn entry(&self) -> Option<&'a Entry> {
2853 self.cursor.item()
2854 }
2855
2856 pub fn offset(&self) -> usize {
2857 self.cursor
2858 .start()
2859 .count(self.include_dirs, self.include_ignored)
2860 }
2861}
2862
2863impl<'a> Iterator for Traversal<'a> {
2864 type Item = &'a Entry;
2865
2866 fn next(&mut self) -> Option<Self::Item> {
2867 if let Some(item) = self.entry() {
2868 self.advance();
2869 Some(item)
2870 } else {
2871 None
2872 }
2873 }
2874}
2875
2876#[derive(Debug)]
2877enum TraversalTarget<'a> {
2878 Path(&'a Path),
2879 PathSuccessor(&'a Path),
2880 Count {
2881 count: usize,
2882 include_ignored: bool,
2883 include_dirs: bool,
2884 },
2885}
2886
2887impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> {
2888 fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
2889 match self {
2890 TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path),
2891 TraversalTarget::PathSuccessor(path) => {
2892 if !cursor_location.max_path.starts_with(path) {
2893 Ordering::Equal
2894 } else {
2895 Ordering::Greater
2896 }
2897 }
2898 TraversalTarget::Count {
2899 count,
2900 include_dirs,
2901 include_ignored,
2902 } => Ord::cmp(
2903 count,
2904 &cursor_location.count(*include_dirs, *include_ignored),
2905 ),
2906 }
2907 }
2908}
2909
2910struct ChildEntriesIter<'a> {
2911 parent_path: &'a Path,
2912 traversal: Traversal<'a>,
2913}
2914
2915impl<'a> Iterator for ChildEntriesIter<'a> {
2916 type Item = &'a Entry;
2917
2918 fn next(&mut self) -> Option<Self::Item> {
2919 if let Some(item) = self.traversal.entry() {
2920 if item.path.starts_with(&self.parent_path) {
2921 self.traversal.advance_to_sibling();
2922 return Some(item);
2923 }
2924 }
2925 None
2926 }
2927}
2928
2929impl<'a> From<&'a Entry> for proto::Entry {
2930 fn from(entry: &'a Entry) -> Self {
2931 Self {
2932 id: entry.id.to_proto(),
2933 is_dir: entry.is_dir(),
2934 path: entry.path.as_os_str().as_bytes().to_vec(),
2935 inode: entry.inode,
2936 mtime: Some(entry.mtime.into()),
2937 is_symlink: entry.is_symlink,
2938 is_ignored: entry.is_ignored,
2939 }
2940 }
2941}
2942
2943impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
2944 type Error = anyhow::Error;
2945
2946 fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result<Self> {
2947 if let Some(mtime) = entry.mtime {
2948 let kind = if entry.is_dir {
2949 EntryKind::Dir
2950 } else {
2951 let mut char_bag = *root_char_bag;
2952 char_bag.extend(
2953 String::from_utf8_lossy(&entry.path)
2954 .chars()
2955 .map(|c| c.to_ascii_lowercase()),
2956 );
2957 EntryKind::File(char_bag)
2958 };
2959 let path: Arc<Path> = PathBuf::from(OsString::from_vec(entry.path)).into();
2960 Ok(Entry {
2961 id: ProjectEntryId::from_proto(entry.id),
2962 kind,
2963 path,
2964 inode: entry.inode,
2965 mtime: mtime.into(),
2966 is_symlink: entry.is_symlink,
2967 is_ignored: entry.is_ignored,
2968 })
2969 } else {
2970 Err(anyhow!(
2971 "missing mtime in remote worktree entry {:?}",
2972 entry.path
2973 ))
2974 }
2975 }
2976}
2977
2978async fn send_worktree_update(client: &Arc<Client>, update: proto::UpdateWorktree) -> Result<()> {
2979 #[cfg(any(test, feature = "test-support"))]
2980 const MAX_CHUNK_SIZE: usize = 2;
2981 #[cfg(not(any(test, feature = "test-support")))]
2982 const MAX_CHUNK_SIZE: usize = 256;
2983
2984 for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
2985 client.request(update).await?;
2986 }
2987
2988 Ok(())
2989}
2990
2991#[cfg(test)]
2992mod tests {
2993 use super::*;
2994 use crate::fs::FakeFs;
2995 use anyhow::Result;
2996 use client::test::FakeHttpClient;
2997 use fs::RealFs;
2998 use git::repository::FakeGitRepository;
2999 use gpui::{executor::Deterministic, TestAppContext};
3000 use rand::prelude::*;
3001 use serde_json::json;
3002 use std::{
3003 env,
3004 fmt::Write,
3005 time::{SystemTime, UNIX_EPOCH},
3006 };
3007
3008 use util::test::temp_tree;
3009
3010 #[gpui::test]
3011 async fn test_traversal(cx: &mut TestAppContext) {
3012 let fs = FakeFs::new(cx.background());
3013 fs.insert_tree(
3014 "/root",
3015 json!({
3016 ".gitignore": "a/b\n",
3017 "a": {
3018 "b": "",
3019 "c": "",
3020 }
3021 }),
3022 )
3023 .await;
3024
3025 let http_client = FakeHttpClient::with_404_response();
3026 let client = cx.read(|cx| Client::new(http_client, cx));
3027
3028 let tree = Worktree::local(
3029 client,
3030 Arc::from(Path::new("/root")),
3031 true,
3032 fs,
3033 Default::default(),
3034 &mut cx.to_async(),
3035 )
3036 .await
3037 .unwrap();
3038 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3039 .await;
3040
3041 tree.read_with(cx, |tree, _| {
3042 assert_eq!(
3043 tree.entries(false)
3044 .map(|entry| entry.path.as_ref())
3045 .collect::<Vec<_>>(),
3046 vec![
3047 Path::new(""),
3048 Path::new(".gitignore"),
3049 Path::new("a"),
3050 Path::new("a/c"),
3051 ]
3052 );
3053 assert_eq!(
3054 tree.entries(true)
3055 .map(|entry| entry.path.as_ref())
3056 .collect::<Vec<_>>(),
3057 vec![
3058 Path::new(""),
3059 Path::new(".gitignore"),
3060 Path::new("a"),
3061 Path::new("a/b"),
3062 Path::new("a/c"),
3063 ]
3064 );
3065 })
3066 }
3067
3068 #[gpui::test(iterations = 10)]
3069 async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
3070 let fs = FakeFs::new(cx.background());
3071 fs.insert_tree(
3072 "/root",
3073 json!({
3074 "lib": {
3075 "a": {
3076 "a.txt": ""
3077 },
3078 "b": {
3079 "b.txt": ""
3080 }
3081 }
3082 }),
3083 )
3084 .await;
3085 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
3086 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
3087
3088 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3089 let tree = Worktree::local(
3090 client,
3091 Arc::from(Path::new("/root")),
3092 true,
3093 fs.clone(),
3094 Default::default(),
3095 &mut cx.to_async(),
3096 )
3097 .await
3098 .unwrap();
3099
3100 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3101 .await;
3102
3103 tree.read_with(cx, |tree, _| {
3104 assert_eq!(
3105 tree.entries(false)
3106 .map(|entry| entry.path.as_ref())
3107 .collect::<Vec<_>>(),
3108 vec![
3109 Path::new(""),
3110 Path::new("lib"),
3111 Path::new("lib/a"),
3112 Path::new("lib/a/a.txt"),
3113 Path::new("lib/a/lib"),
3114 Path::new("lib/b"),
3115 Path::new("lib/b/b.txt"),
3116 Path::new("lib/b/lib"),
3117 ]
3118 );
3119 });
3120
3121 fs.rename(
3122 Path::new("/root/lib/a/lib"),
3123 Path::new("/root/lib/a/lib-2"),
3124 Default::default(),
3125 )
3126 .await
3127 .unwrap();
3128 executor.run_until_parked();
3129 tree.read_with(cx, |tree, _| {
3130 assert_eq!(
3131 tree.entries(false)
3132 .map(|entry| entry.path.as_ref())
3133 .collect::<Vec<_>>(),
3134 vec![
3135 Path::new(""),
3136 Path::new("lib"),
3137 Path::new("lib/a"),
3138 Path::new("lib/a/a.txt"),
3139 Path::new("lib/a/lib-2"),
3140 Path::new("lib/b"),
3141 Path::new("lib/b/b.txt"),
3142 Path::new("lib/b/lib"),
3143 ]
3144 );
3145 });
3146 }
3147
3148 #[gpui::test]
3149 async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
3150 let parent_dir = temp_tree(json!({
3151 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
3152 "tree": {
3153 ".git": {},
3154 ".gitignore": "ignored-dir\n",
3155 "tracked-dir": {
3156 "tracked-file1": "",
3157 "ancestor-ignored-file1": "",
3158 },
3159 "ignored-dir": {
3160 "ignored-file1": ""
3161 }
3162 }
3163 }));
3164 let dir = parent_dir.path().join("tree");
3165
3166 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3167
3168 let tree = Worktree::local(
3169 client,
3170 dir.as_path(),
3171 true,
3172 Arc::new(RealFs),
3173 Default::default(),
3174 &mut cx.to_async(),
3175 )
3176 .await
3177 .unwrap();
3178 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3179 .await;
3180 tree.flush_fs_events(cx).await;
3181 cx.read(|cx| {
3182 let tree = tree.read(cx);
3183 assert!(
3184 !tree
3185 .entry_for_path("tracked-dir/tracked-file1")
3186 .unwrap()
3187 .is_ignored
3188 );
3189 assert!(
3190 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
3191 .unwrap()
3192 .is_ignored
3193 );
3194 assert!(
3195 tree.entry_for_path("ignored-dir/ignored-file1")
3196 .unwrap()
3197 .is_ignored
3198 );
3199 });
3200
3201 std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
3202 std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
3203 std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
3204 tree.flush_fs_events(cx).await;
3205 cx.read(|cx| {
3206 let tree = tree.read(cx);
3207 assert!(
3208 !tree
3209 .entry_for_path("tracked-dir/tracked-file2")
3210 .unwrap()
3211 .is_ignored
3212 );
3213 assert!(
3214 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
3215 .unwrap()
3216 .is_ignored
3217 );
3218 assert!(
3219 tree.entry_for_path("ignored-dir/ignored-file2")
3220 .unwrap()
3221 .is_ignored
3222 );
3223 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
3224 });
3225 }
3226
3227 #[gpui::test]
3228 async fn test_git_repository_for_path(cx: &mut TestAppContext) {
3229 let root = temp_tree(json!({
3230 "dir1": {
3231 ".git": {},
3232 "deps": {
3233 "dep1": {
3234 ".git": {},
3235 "src": {
3236 "a.txt": ""
3237 }
3238 }
3239 },
3240 "src": {
3241 "b.txt": ""
3242 }
3243 },
3244 "c.txt": "",
3245
3246 }));
3247
3248 let http_client = FakeHttpClient::with_404_response();
3249 let client = cx.read(|cx| Client::new(http_client, cx));
3250 let tree = Worktree::local(
3251 client,
3252 root.path(),
3253 true,
3254 Arc::new(RealFs),
3255 Default::default(),
3256 &mut cx.to_async(),
3257 )
3258 .await
3259 .unwrap();
3260
3261 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3262 .await;
3263 tree.flush_fs_events(cx).await;
3264
3265 tree.read_with(cx, |tree, _cx| {
3266 let tree = tree.as_local().unwrap();
3267
3268 assert!(tree.repo_for("c.txt".as_ref()).is_none());
3269
3270 let repo = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap();
3271 assert_eq!(repo.content_path.as_ref(), Path::new("dir1"));
3272 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git"));
3273
3274 let repo = tree.repo_for("dir1/deps/dep1/src/a.txt".as_ref()).unwrap();
3275 assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1"));
3276 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/deps/dep1/.git"),);
3277 });
3278
3279 let original_scan_id = tree.read_with(cx, |tree, _cx| {
3280 let tree = tree.as_local().unwrap();
3281 tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id
3282 });
3283
3284 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
3285 tree.flush_fs_events(cx).await;
3286
3287 tree.read_with(cx, |tree, _cx| {
3288 let tree = tree.as_local().unwrap();
3289 let new_scan_id = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id;
3290 assert_ne!(
3291 original_scan_id, new_scan_id,
3292 "original {original_scan_id}, new {new_scan_id}"
3293 );
3294 });
3295
3296 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
3297 tree.flush_fs_events(cx).await;
3298
3299 tree.read_with(cx, |tree, _cx| {
3300 let tree = tree.as_local().unwrap();
3301
3302 assert!(tree.repo_for("dir1/src/b.txt".as_ref()).is_none());
3303 });
3304 }
3305
3306 #[test]
3307 fn test_changed_repos() {
3308 fn fake_entry(git_dir_path: impl AsRef<Path>, scan_id: usize) -> GitRepositoryEntry {
3309 GitRepositoryEntry {
3310 repo: Arc::new(Mutex::new(FakeGitRepository::default())),
3311 scan_id,
3312 content_path: git_dir_path.as_ref().parent().unwrap().into(),
3313 git_dir_path: git_dir_path.as_ref().into(),
3314 }
3315 }
3316
3317 let prev_repos: Vec<GitRepositoryEntry> = vec![
3318 fake_entry("/.git", 0),
3319 fake_entry("/a/.git", 0),
3320 fake_entry("/a/b/.git", 0),
3321 ];
3322
3323 let new_repos: Vec<GitRepositoryEntry> = vec![
3324 fake_entry("/a/.git", 1),
3325 fake_entry("/a/b/.git", 0),
3326 fake_entry("/a/c/.git", 0),
3327 ];
3328
3329 let res = LocalWorktree::changed_repos(&prev_repos, &new_repos);
3330
3331 // Deletion retained
3332 assert!(res
3333 .iter()
3334 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/.git") && repo.scan_id == 0)
3335 .is_some());
3336
3337 // Update retained
3338 assert!(res
3339 .iter()
3340 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/.git") && repo.scan_id == 1)
3341 .is_some());
3342
3343 // Addition retained
3344 assert!(res
3345 .iter()
3346 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/c/.git") && repo.scan_id == 0)
3347 .is_some());
3348
3349 // Nochange, not retained
3350 assert!(res
3351 .iter()
3352 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/b/.git") && repo.scan_id == 0)
3353 .is_none());
3354 }
3355
3356 #[gpui::test]
3357 async fn test_write_file(cx: &mut TestAppContext) {
3358 let dir = temp_tree(json!({
3359 ".git": {},
3360 ".gitignore": "ignored-dir\n",
3361 "tracked-dir": {},
3362 "ignored-dir": {}
3363 }));
3364
3365 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3366
3367 let tree = Worktree::local(
3368 client,
3369 dir.path(),
3370 true,
3371 Arc::new(RealFs),
3372 Default::default(),
3373 &mut cx.to_async(),
3374 )
3375 .await
3376 .unwrap();
3377 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3378 .await;
3379 tree.flush_fs_events(cx).await;
3380
3381 tree.update(cx, |tree, cx| {
3382 tree.as_local().unwrap().write_file(
3383 Path::new("tracked-dir/file.txt"),
3384 "hello".into(),
3385 Default::default(),
3386 cx,
3387 )
3388 })
3389 .await
3390 .unwrap();
3391 tree.update(cx, |tree, cx| {
3392 tree.as_local().unwrap().write_file(
3393 Path::new("ignored-dir/file.txt"),
3394 "world".into(),
3395 Default::default(),
3396 cx,
3397 )
3398 })
3399 .await
3400 .unwrap();
3401
3402 tree.read_with(cx, |tree, _| {
3403 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
3404 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
3405 assert!(!tracked.is_ignored);
3406 assert!(ignored.is_ignored);
3407 });
3408 }
3409
3410 #[gpui::test(iterations = 30)]
3411 async fn test_create_directory(cx: &mut TestAppContext) {
3412 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3413
3414 let fs = FakeFs::new(cx.background());
3415 fs.insert_tree(
3416 "/a",
3417 json!({
3418 "b": {},
3419 "c": {},
3420 "d": {},
3421 }),
3422 )
3423 .await;
3424
3425 let tree = Worktree::local(
3426 client,
3427 "/a".as_ref(),
3428 true,
3429 fs,
3430 Default::default(),
3431 &mut cx.to_async(),
3432 )
3433 .await
3434 .unwrap();
3435
3436 let entry = tree
3437 .update(cx, |tree, cx| {
3438 tree.as_local_mut()
3439 .unwrap()
3440 .create_entry("a/e".as_ref(), true, cx)
3441 })
3442 .await
3443 .unwrap();
3444 assert!(entry.is_dir());
3445
3446 cx.foreground().run_until_parked();
3447 tree.read_with(cx, |tree, _| {
3448 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
3449 });
3450 }
3451
3452 #[gpui::test(iterations = 100)]
3453 fn test_random(mut rng: StdRng) {
3454 let operations = env::var("OPERATIONS")
3455 .map(|o| o.parse().unwrap())
3456 .unwrap_or(40);
3457 let initial_entries = env::var("INITIAL_ENTRIES")
3458 .map(|o| o.parse().unwrap())
3459 .unwrap_or(20);
3460
3461 let root_dir = tempdir::TempDir::new("worktree-test").unwrap();
3462 for _ in 0..initial_entries {
3463 randomly_mutate_tree(root_dir.path(), 1.0, &mut rng).unwrap();
3464 }
3465 log::info!("Generated initial tree");
3466
3467 let (notify_tx, _notify_rx) = mpsc::unbounded();
3468 let fs = Arc::new(RealFs);
3469 let next_entry_id = Arc::new(AtomicUsize::new(0));
3470 let mut initial_snapshot = LocalSnapshot {
3471 abs_path: root_dir.path().into(),
3472 removed_entry_ids: Default::default(),
3473 ignores_by_parent_abs_path: Default::default(),
3474 git_repositories: Default::default(),
3475 next_entry_id: next_entry_id.clone(),
3476 snapshot: Snapshot {
3477 id: WorktreeId::from_usize(0),
3478 entries_by_path: Default::default(),
3479 entries_by_id: Default::default(),
3480 root_name: Default::default(),
3481 root_char_bag: Default::default(),
3482 scan_id: 0,
3483 is_complete: true,
3484 },
3485 extension_counts: Default::default(),
3486 };
3487 initial_snapshot.insert_entry(
3488 Entry::new(
3489 Path::new("").into(),
3490 &smol::block_on(fs.metadata(root_dir.path()))
3491 .unwrap()
3492 .unwrap(),
3493 &next_entry_id,
3494 Default::default(),
3495 ),
3496 fs.as_ref(),
3497 );
3498 let mut scanner = BackgroundScanner::new(
3499 Arc::new(Mutex::new(initial_snapshot.clone())),
3500 notify_tx,
3501 fs.clone(),
3502 Arc::new(gpui::executor::Background::new()),
3503 );
3504 smol::block_on(scanner.scan_dirs()).unwrap();
3505 scanner.snapshot().check_invariants();
3506
3507 let mut events = Vec::new();
3508 let mut snapshots = Vec::new();
3509 let mut mutations_len = operations;
3510 while mutations_len > 1 {
3511 if !events.is_empty() && rng.gen_bool(0.4) {
3512 let len = rng.gen_range(0..=events.len());
3513 let to_deliver = events.drain(0..len).collect::<Vec<_>>();
3514 log::info!("Delivering events: {:#?}", to_deliver);
3515 smol::block_on(scanner.process_events(to_deliver));
3516 scanner.snapshot().check_invariants();
3517 } else {
3518 events.extend(randomly_mutate_tree(root_dir.path(), 0.6, &mut rng).unwrap());
3519 mutations_len -= 1;
3520 }
3521
3522 if rng.gen_bool(0.2) {
3523 snapshots.push(scanner.snapshot());
3524 }
3525 }
3526 log::info!("Quiescing: {:#?}", events);
3527 smol::block_on(scanner.process_events(events));
3528 scanner.snapshot().check_invariants();
3529
3530 let (notify_tx, _notify_rx) = mpsc::unbounded();
3531 let mut new_scanner = BackgroundScanner::new(
3532 Arc::new(Mutex::new(initial_snapshot)),
3533 notify_tx,
3534 scanner.fs.clone(),
3535 scanner.executor.clone(),
3536 );
3537 smol::block_on(new_scanner.scan_dirs()).unwrap();
3538 assert_eq!(
3539 scanner.snapshot().to_vec(true),
3540 new_scanner.snapshot().to_vec(true)
3541 );
3542
3543 for mut prev_snapshot in snapshots {
3544 let include_ignored = rng.gen::<bool>();
3545 if !include_ignored {
3546 let mut entries_by_path_edits = Vec::new();
3547 let mut entries_by_id_edits = Vec::new();
3548 for entry in prev_snapshot
3549 .entries_by_id
3550 .cursor::<()>()
3551 .filter(|e| e.is_ignored)
3552 {
3553 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
3554 entries_by_id_edits.push(Edit::Remove(entry.id));
3555 }
3556
3557 prev_snapshot
3558 .entries_by_path
3559 .edit(entries_by_path_edits, &());
3560 prev_snapshot.entries_by_id.edit(entries_by_id_edits, &());
3561 }
3562
3563 let update = scanner
3564 .snapshot()
3565 .build_update(&prev_snapshot, 0, 0, include_ignored);
3566 prev_snapshot.apply_remote_update(update).unwrap();
3567 assert_eq!(
3568 prev_snapshot.to_vec(true),
3569 scanner.snapshot().to_vec(include_ignored)
3570 );
3571 }
3572 }
3573
3574 fn randomly_mutate_tree(
3575 root_path: &Path,
3576 insertion_probability: f64,
3577 rng: &mut impl Rng,
3578 ) -> Result<Vec<fsevent::Event>> {
3579 let root_path = root_path.canonicalize().unwrap();
3580 let (dirs, files) = read_dir_recursive(root_path.clone());
3581
3582 let mut events = Vec::new();
3583 let mut record_event = |path: PathBuf| {
3584 events.push(fsevent::Event {
3585 event_id: SystemTime::now()
3586 .duration_since(UNIX_EPOCH)
3587 .unwrap()
3588 .as_secs(),
3589 flags: fsevent::StreamFlags::empty(),
3590 path,
3591 });
3592 };
3593
3594 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
3595 let path = dirs.choose(rng).unwrap();
3596 let new_path = path.join(gen_name(rng));
3597
3598 if rng.gen() {
3599 log::info!("Creating dir {:?}", new_path.strip_prefix(root_path)?);
3600 std::fs::create_dir(&new_path)?;
3601 } else {
3602 log::info!("Creating file {:?}", new_path.strip_prefix(root_path)?);
3603 std::fs::write(&new_path, "")?;
3604 }
3605 record_event(new_path);
3606 } else if rng.gen_bool(0.05) {
3607 let ignore_dir_path = dirs.choose(rng).unwrap();
3608 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
3609
3610 let (subdirs, subfiles) = read_dir_recursive(ignore_dir_path.clone());
3611 let files_to_ignore = {
3612 let len = rng.gen_range(0..=subfiles.len());
3613 subfiles.choose_multiple(rng, len)
3614 };
3615 let dirs_to_ignore = {
3616 let len = rng.gen_range(0..subdirs.len());
3617 subdirs.choose_multiple(rng, len)
3618 };
3619
3620 let mut ignore_contents = String::new();
3621 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
3622 writeln!(
3623 ignore_contents,
3624 "{}",
3625 path_to_ignore
3626 .strip_prefix(&ignore_dir_path)?
3627 .to_str()
3628 .unwrap()
3629 )
3630 .unwrap();
3631 }
3632 log::info!(
3633 "Creating {:?} with contents:\n{}",
3634 ignore_path.strip_prefix(&root_path)?,
3635 ignore_contents
3636 );
3637 std::fs::write(&ignore_path, ignore_contents).unwrap();
3638 record_event(ignore_path);
3639 } else {
3640 let old_path = {
3641 let file_path = files.choose(rng);
3642 let dir_path = dirs[1..].choose(rng);
3643 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
3644 };
3645
3646 let is_rename = rng.gen();
3647 if is_rename {
3648 let new_path_parent = dirs
3649 .iter()
3650 .filter(|d| !d.starts_with(old_path))
3651 .choose(rng)
3652 .unwrap();
3653
3654 let overwrite_existing_dir =
3655 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
3656 let new_path = if overwrite_existing_dir {
3657 std::fs::remove_dir_all(&new_path_parent).ok();
3658 new_path_parent.to_path_buf()
3659 } else {
3660 new_path_parent.join(gen_name(rng))
3661 };
3662
3663 log::info!(
3664 "Renaming {:?} to {}{:?}",
3665 old_path.strip_prefix(&root_path)?,
3666 if overwrite_existing_dir {
3667 "overwrite "
3668 } else {
3669 ""
3670 },
3671 new_path.strip_prefix(&root_path)?
3672 );
3673 std::fs::rename(&old_path, &new_path)?;
3674 record_event(old_path.clone());
3675 record_event(new_path);
3676 } else if old_path.is_dir() {
3677 let (dirs, files) = read_dir_recursive(old_path.clone());
3678
3679 log::info!("Deleting dir {:?}", old_path.strip_prefix(&root_path)?);
3680 std::fs::remove_dir_all(&old_path).unwrap();
3681 for file in files {
3682 record_event(file);
3683 }
3684 for dir in dirs {
3685 record_event(dir);
3686 }
3687 } else {
3688 log::info!("Deleting file {:?}", old_path.strip_prefix(&root_path)?);
3689 std::fs::remove_file(old_path).unwrap();
3690 record_event(old_path.clone());
3691 }
3692 }
3693
3694 Ok(events)
3695 }
3696
3697 fn read_dir_recursive(path: PathBuf) -> (Vec<PathBuf>, Vec<PathBuf>) {
3698 let child_entries = std::fs::read_dir(&path).unwrap();
3699 let mut dirs = vec![path];
3700 let mut files = Vec::new();
3701 for child_entry in child_entries {
3702 let child_path = child_entry.unwrap().path();
3703 if child_path.is_dir() {
3704 let (child_dirs, child_files) = read_dir_recursive(child_path);
3705 dirs.extend(child_dirs);
3706 files.extend(child_files);
3707 } else {
3708 files.push(child_path);
3709 }
3710 }
3711 (dirs, files)
3712 }
3713
3714 fn gen_name(rng: &mut impl Rng) -> String {
3715 (0..6)
3716 .map(|_| rng.sample(rand::distributions::Alphanumeric))
3717 .map(char::from)
3718 .collect()
3719 }
3720
3721 impl LocalSnapshot {
3722 fn check_invariants(&self) {
3723 let mut files = self.files(true, 0);
3724 let mut visible_files = self.files(false, 0);
3725 for entry in self.entries_by_path.cursor::<()>() {
3726 if entry.is_file() {
3727 assert_eq!(files.next().unwrap().inode, entry.inode);
3728 if !entry.is_ignored {
3729 assert_eq!(visible_files.next().unwrap().inode, entry.inode);
3730 }
3731 }
3732 }
3733 assert!(files.next().is_none());
3734 assert!(visible_files.next().is_none());
3735
3736 let mut bfs_paths = Vec::new();
3737 let mut stack = vec![Path::new("")];
3738 while let Some(path) = stack.pop() {
3739 bfs_paths.push(path);
3740 let ix = stack.len();
3741 for child_entry in self.child_entries(path) {
3742 stack.insert(ix, &child_entry.path);
3743 }
3744 }
3745
3746 let dfs_paths_via_iter = self
3747 .entries_by_path
3748 .cursor::<()>()
3749 .map(|e| e.path.as_ref())
3750 .collect::<Vec<_>>();
3751 assert_eq!(bfs_paths, dfs_paths_via_iter);
3752
3753 let dfs_paths_via_traversal = self
3754 .entries(true)
3755 .map(|e| e.path.as_ref())
3756 .collect::<Vec<_>>();
3757 assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
3758
3759 for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
3760 let ignore_parent_path =
3761 ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
3762 assert!(self.entry_for_path(&ignore_parent_path).is_some());
3763 assert!(self
3764 .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
3765 .is_some());
3766 }
3767
3768 // Ensure extension counts are correct.
3769 let mut expected_extension_counts = HashMap::default();
3770 for extension in self.entries(false).filter_map(|e| e.path.extension()) {
3771 *expected_extension_counts
3772 .entry(extension.into())
3773 .or_insert(0) += 1;
3774 }
3775 assert_eq!(self.extension_counts, expected_extension_counts);
3776 }
3777
3778 fn to_vec(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
3779 let mut paths = Vec::new();
3780 for entry in self.entries_by_path.cursor::<()>() {
3781 if include_ignored || !entry.is_ignored {
3782 paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
3783 }
3784 }
3785 paths.sort_by(|a, b| a.0.cmp(b.0));
3786 paths
3787 }
3788 }
3789}