1use super::{ignore::IgnoreStack, DiagnosticSummary};
2use crate::{copy_recursive, ProjectEntryId, RemoveOptions};
3use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
4use anyhow::{anyhow, Context, Result};
5use client::{proto, Client};
6use clock::ReplicaId;
7use collections::{HashMap, VecDeque};
8use fs::LineEnding;
9use fs::{repository::GitRepository, Fs};
10use futures::{
11 channel::{
12 mpsc::{self, UnboundedSender},
13 oneshot,
14 },
15 Stream, StreamExt,
16};
17use fuzzy::CharBag;
18use git::{DOT_GIT, GITIGNORE};
19use gpui::{
20 executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
21 Task,
22};
23use language::{
24 proto::{
25 deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
26 serialize_version,
27 },
28 Buffer, DiagnosticEntry, PointUtf16, Rope, RopeFingerprint, Unclipped,
29};
30use parking_lot::Mutex;
31use postage::{
32 prelude::{Sink as _, Stream as _},
33 watch,
34};
35
36use smol::channel::{self, Sender};
37use std::{
38 any::Any,
39 cmp::{self, Ordering},
40 convert::TryFrom,
41 ffi::OsStr,
42 fmt,
43 future::Future,
44 mem,
45 ops::{Deref, DerefMut},
46 path::{Path, PathBuf},
47 sync::{atomic::AtomicUsize, Arc},
48 task::Poll,
49 time::{Duration, SystemTime},
50};
51use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
52use util::paths::HOME;
53use util::{ResultExt, TryFutureExt};
54
55#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
56pub struct WorktreeId(usize);
57
58#[allow(clippy::large_enum_variant)]
59pub enum Worktree {
60 Local(LocalWorktree),
61 Remote(RemoteWorktree),
62}
63
64pub struct LocalWorktree {
65 snapshot: LocalSnapshot,
66 background_snapshot: Arc<Mutex<LocalSnapshot>>,
67 last_scan_state_rx: watch::Receiver<ScanState>,
68 _background_scanner_task: Option<Task<()>>,
69 poll_task: Option<Task<()>>,
70 share: Option<ShareState>,
71 diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<Unclipped<PointUtf16>>>>,
72 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
73 client: Arc<Client>,
74 fs: Arc<dyn Fs>,
75 visible: bool,
76}
77
78pub struct RemoteWorktree {
79 pub snapshot: Snapshot,
80 pub(crate) background_snapshot: Arc<Mutex<Snapshot>>,
81 project_id: u64,
82 client: Arc<Client>,
83 updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
84 snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
85 replica_id: ReplicaId,
86 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
87 visible: bool,
88 disconnected: bool,
89}
90
91#[derive(Clone)]
92pub struct Snapshot {
93 id: WorktreeId,
94 abs_path: Arc<Path>,
95 root_name: String,
96 root_char_bag: CharBag,
97 entries_by_path: SumTree<Entry>,
98 entries_by_id: SumTree<PathEntry>,
99 scan_id: usize,
100 completed_scan_id: usize,
101}
102
103#[derive(Clone)]
104pub struct GitRepositoryEntry {
105 pub(crate) repo: Arc<Mutex<dyn GitRepository>>,
106
107 pub(crate) scan_id: usize,
108 // Path to folder containing the .git file or directory
109 pub(crate) content_path: Arc<Path>,
110 // Path to the actual .git folder.
111 // Note: if .git is a file, this points to the folder indicated by the .git file
112 pub(crate) git_dir_path: Arc<Path>,
113}
114
115impl std::fmt::Debug for GitRepositoryEntry {
116 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
117 f.debug_struct("GitRepositoryEntry")
118 .field("content_path", &self.content_path)
119 .field("git_dir_path", &self.git_dir_path)
120 .field("libgit_repository", &"LibGitRepository")
121 .finish()
122 }
123}
124
125pub struct LocalSnapshot {
126 ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
127 git_repositories: Vec<GitRepositoryEntry>,
128 removed_entry_ids: HashMap<u64, ProjectEntryId>,
129 next_entry_id: Arc<AtomicUsize>,
130 snapshot: Snapshot,
131}
132
133impl Clone for LocalSnapshot {
134 fn clone(&self) -> Self {
135 Self {
136 ignores_by_parent_abs_path: self.ignores_by_parent_abs_path.clone(),
137 git_repositories: self.git_repositories.iter().cloned().collect(),
138 removed_entry_ids: self.removed_entry_ids.clone(),
139 next_entry_id: self.next_entry_id.clone(),
140 snapshot: self.snapshot.clone(),
141 }
142 }
143}
144
145impl Deref for LocalSnapshot {
146 type Target = Snapshot;
147
148 fn deref(&self) -> &Self::Target {
149 &self.snapshot
150 }
151}
152
153impl DerefMut for LocalSnapshot {
154 fn deref_mut(&mut self) -> &mut Self::Target {
155 &mut self.snapshot
156 }
157}
158
159#[derive(Clone, Debug)]
160enum ScanState {
161 Idle,
162 /// The worktree is performing its initial scan of the filesystem.
163 Initializing,
164 /// The worktree is updating in response to filesystem events.
165 Updating,
166 Err(Arc<anyhow::Error>),
167}
168
169struct ShareState {
170 project_id: u64,
171 snapshots_tx: watch::Sender<LocalSnapshot>,
172 resume_updates: watch::Sender<()>,
173 _maintain_remote_snapshot: Task<Option<()>>,
174}
175
176pub enum Event {
177 UpdatedEntries,
178 UpdatedGitRepositories(Vec<GitRepositoryEntry>),
179}
180
181impl Entity for Worktree {
182 type Event = Event;
183}
184
185impl Worktree {
186 pub async fn local(
187 client: Arc<Client>,
188 path: impl Into<Arc<Path>>,
189 visible: bool,
190 fs: Arc<dyn Fs>,
191 next_entry_id: Arc<AtomicUsize>,
192 cx: &mut AsyncAppContext,
193 ) -> Result<ModelHandle<Self>> {
194 let (tree, scan_states_tx) =
195 LocalWorktree::create(client, path, visible, fs.clone(), next_entry_id, cx).await?;
196 tree.update(cx, |tree, cx| {
197 let tree = tree.as_local_mut().unwrap();
198 let abs_path = tree.abs_path().clone();
199 let background_snapshot = tree.background_snapshot.clone();
200 let background = cx.background().clone();
201 tree._background_scanner_task = Some(cx.background().spawn(async move {
202 let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
203 let scanner =
204 BackgroundScanner::new(background_snapshot, scan_states_tx, fs, background);
205 scanner.run(events).await;
206 }));
207 });
208 Ok(tree)
209 }
210
211 pub fn remote(
212 project_remote_id: u64,
213 replica_id: ReplicaId,
214 worktree: proto::WorktreeMetadata,
215 client: Arc<Client>,
216 cx: &mut MutableAppContext,
217 ) -> ModelHandle<Self> {
218 let remote_id = worktree.id;
219 let root_char_bag: CharBag = worktree
220 .root_name
221 .chars()
222 .map(|c| c.to_ascii_lowercase())
223 .collect();
224 let root_name = worktree.root_name.clone();
225 let visible = worktree.visible;
226
227 let abs_path = PathBuf::from(worktree.abs_path);
228 let snapshot = Snapshot {
229 id: WorktreeId(remote_id as usize),
230 abs_path: Arc::from(abs_path.deref()),
231 root_name,
232 root_char_bag,
233 entries_by_path: Default::default(),
234 entries_by_id: Default::default(),
235 scan_id: 0,
236 completed_scan_id: 0,
237 };
238
239 let (updates_tx, mut updates_rx) = mpsc::unbounded();
240 let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
241 let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
242 let worktree_handle = cx.add_model(|_: &mut ModelContext<Worktree>| {
243 Worktree::Remote(RemoteWorktree {
244 project_id: project_remote_id,
245 replica_id,
246 snapshot: snapshot.clone(),
247 background_snapshot: background_snapshot.clone(),
248 updates_tx: Some(updates_tx),
249 snapshot_subscriptions: Default::default(),
250 client: client.clone(),
251 diagnostic_summaries: Default::default(),
252 visible,
253 disconnected: false,
254 })
255 });
256
257 cx.background()
258 .spawn(async move {
259 while let Some(update) = updates_rx.next().await {
260 if let Err(error) = background_snapshot.lock().apply_remote_update(update) {
261 log::error!("error applying worktree update: {}", error);
262 }
263 snapshot_updated_tx.send(()).await.ok();
264 }
265 })
266 .detach();
267
268 cx.spawn(|mut cx| {
269 let this = worktree_handle.downgrade();
270 async move {
271 while (snapshot_updated_rx.recv().await).is_some() {
272 if let Some(this) = this.upgrade(&cx) {
273 this.update(&mut cx, |this, cx| {
274 this.poll_snapshot(cx);
275 let this = this.as_remote_mut().unwrap();
276 while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
277 if this.observed_snapshot(*scan_id) {
278 let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
279 let _ = tx.send(());
280 } else {
281 break;
282 }
283 }
284 });
285 } else {
286 break;
287 }
288 }
289 }
290 })
291 .detach();
292
293 worktree_handle
294 }
295
296 pub fn as_local(&self) -> Option<&LocalWorktree> {
297 if let Worktree::Local(worktree) = self {
298 Some(worktree)
299 } else {
300 None
301 }
302 }
303
304 pub fn as_remote(&self) -> Option<&RemoteWorktree> {
305 if let Worktree::Remote(worktree) = self {
306 Some(worktree)
307 } else {
308 None
309 }
310 }
311
312 pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
313 if let Worktree::Local(worktree) = self {
314 Some(worktree)
315 } else {
316 None
317 }
318 }
319
320 pub fn as_remote_mut(&mut self) -> Option<&mut RemoteWorktree> {
321 if let Worktree::Remote(worktree) = self {
322 Some(worktree)
323 } else {
324 None
325 }
326 }
327
328 pub fn is_local(&self) -> bool {
329 matches!(self, Worktree::Local(_))
330 }
331
332 pub fn is_remote(&self) -> bool {
333 !self.is_local()
334 }
335
336 pub fn snapshot(&self) -> Snapshot {
337 match self {
338 Worktree::Local(worktree) => worktree.snapshot().snapshot,
339 Worktree::Remote(worktree) => worktree.snapshot(),
340 }
341 }
342
343 pub fn scan_id(&self) -> usize {
344 match self {
345 Worktree::Local(worktree) => worktree.snapshot.scan_id,
346 Worktree::Remote(worktree) => worktree.snapshot.scan_id,
347 }
348 }
349
350 pub fn completed_scan_id(&self) -> usize {
351 match self {
352 Worktree::Local(worktree) => worktree.snapshot.completed_scan_id,
353 Worktree::Remote(worktree) => worktree.snapshot.completed_scan_id,
354 }
355 }
356
357 pub fn is_visible(&self) -> bool {
358 match self {
359 Worktree::Local(worktree) => worktree.visible,
360 Worktree::Remote(worktree) => worktree.visible,
361 }
362 }
363
364 pub fn replica_id(&self) -> ReplicaId {
365 match self {
366 Worktree::Local(_) => 0,
367 Worktree::Remote(worktree) => worktree.replica_id,
368 }
369 }
370
371 pub fn diagnostic_summaries(
372 &self,
373 ) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + '_ {
374 match self {
375 Worktree::Local(worktree) => &worktree.diagnostic_summaries,
376 Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
377 }
378 .iter()
379 .map(|(path, summary)| (path.0.clone(), *summary))
380 }
381
382 fn poll_snapshot(&mut self, cx: &mut ModelContext<Self>) {
383 match self {
384 Self::Local(worktree) => worktree.poll_snapshot(false, cx),
385 Self::Remote(worktree) => worktree.poll_snapshot(cx),
386 };
387 }
388
389 pub fn abs_path(&self) -> Arc<Path> {
390 match self {
391 Worktree::Local(worktree) => worktree.abs_path.clone(),
392 Worktree::Remote(worktree) => worktree.abs_path.clone(),
393 }
394 }
395}
396
397impl LocalWorktree {
398 async fn create(
399 client: Arc<Client>,
400 path: impl Into<Arc<Path>>,
401 visible: bool,
402 fs: Arc<dyn Fs>,
403 next_entry_id: Arc<AtomicUsize>,
404 cx: &mut AsyncAppContext,
405 ) -> Result<(ModelHandle<Worktree>, UnboundedSender<ScanState>)> {
406 let abs_path = path.into();
407 let path: Arc<Path> = Arc::from(Path::new(""));
408
409 // After determining whether the root entry is a file or a directory, populate the
410 // snapshot's "root name", which will be used for the purpose of fuzzy matching.
411 let root_name = abs_path
412 .file_name()
413 .map_or(String::new(), |f| f.to_string_lossy().to_string());
414 let root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
415 let metadata = fs
416 .metadata(&abs_path)
417 .await
418 .context("failed to stat worktree path")?;
419
420 let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
421 let (mut last_scan_state_tx, last_scan_state_rx) =
422 watch::channel_with(ScanState::Initializing);
423 let tree = cx.add_model(move |cx: &mut ModelContext<Worktree>| {
424 let mut snapshot = LocalSnapshot {
425 ignores_by_parent_abs_path: Default::default(),
426 git_repositories: Default::default(),
427 removed_entry_ids: Default::default(),
428 next_entry_id,
429 snapshot: Snapshot {
430 id: WorktreeId::from_usize(cx.model_id()),
431 abs_path,
432 root_name: root_name.clone(),
433 root_char_bag,
434 entries_by_path: Default::default(),
435 entries_by_id: Default::default(),
436 scan_id: 0,
437 completed_scan_id: 0,
438 },
439 };
440 if let Some(metadata) = metadata {
441 let entry = Entry::new(
442 path,
443 &metadata,
444 &snapshot.next_entry_id,
445 snapshot.root_char_bag,
446 );
447 snapshot.insert_entry(entry, fs.as_ref());
448 }
449
450 let tree = Self {
451 snapshot: snapshot.clone(),
452 background_snapshot: Arc::new(Mutex::new(snapshot)),
453 last_scan_state_rx,
454 _background_scanner_task: None,
455 share: None,
456 poll_task: None,
457 diagnostics: Default::default(),
458 diagnostic_summaries: Default::default(),
459 client,
460 fs,
461 visible,
462 };
463
464 cx.spawn_weak(|this, mut cx| async move {
465 while let Some(scan_state) = scan_states_rx.next().await {
466 if let Some(this) = this.upgrade(&cx) {
467 last_scan_state_tx.blocking_send(scan_state).ok();
468 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
469 } else {
470 break;
471 }
472 }
473 })
474 .detach();
475
476 Worktree::Local(tree)
477 });
478
479 Ok((tree, scan_states_tx))
480 }
481
482 pub fn contains_abs_path(&self, path: &Path) -> bool {
483 path.starts_with(&self.abs_path)
484 }
485
486 fn absolutize(&self, path: &Path) -> PathBuf {
487 if path.file_name().is_some() {
488 self.abs_path.join(path)
489 } else {
490 self.abs_path.to_path_buf()
491 }
492 }
493
494 pub(crate) fn load_buffer(
495 &mut self,
496 path: &Path,
497 cx: &mut ModelContext<Worktree>,
498 ) -> Task<Result<ModelHandle<Buffer>>> {
499 let path = Arc::from(path);
500 cx.spawn(move |this, mut cx| async move {
501 let (file, contents, diff_base) = this
502 .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
503 .await?;
504 Ok(cx.add_model(|cx| {
505 let mut buffer = Buffer::from_file(0, contents, diff_base, Arc::new(file), cx);
506 buffer.git_diff_recalc(cx);
507 buffer
508 }))
509 })
510 }
511
512 pub fn diagnostics_for_path(
513 &self,
514 path: &Path,
515 ) -> Option<Vec<DiagnosticEntry<Unclipped<PointUtf16>>>> {
516 self.diagnostics.get(path).cloned()
517 }
518
519 pub fn update_diagnostics(
520 &mut self,
521 language_server_id: usize,
522 worktree_path: Arc<Path>,
523 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
524 _: &mut ModelContext<Worktree>,
525 ) -> Result<bool> {
526 self.diagnostics.remove(&worktree_path);
527 let old_summary = self
528 .diagnostic_summaries
529 .remove(&PathKey(worktree_path.clone()))
530 .unwrap_or_default();
531 let new_summary = DiagnosticSummary::new(language_server_id, &diagnostics);
532 if !new_summary.is_empty() {
533 self.diagnostic_summaries
534 .insert(PathKey(worktree_path.clone()), new_summary);
535 self.diagnostics.insert(worktree_path.clone(), diagnostics);
536 }
537
538 let updated = !old_summary.is_empty() || !new_summary.is_empty();
539 if updated {
540 if let Some(share) = self.share.as_ref() {
541 self.client
542 .send(proto::UpdateDiagnosticSummary {
543 project_id: share.project_id,
544 worktree_id: self.id().to_proto(),
545 summary: Some(proto::DiagnosticSummary {
546 path: worktree_path.to_string_lossy().to_string(),
547 language_server_id: language_server_id as u64,
548 error_count: new_summary.error_count as u32,
549 warning_count: new_summary.warning_count as u32,
550 }),
551 })
552 .log_err();
553 }
554 }
555
556 Ok(updated)
557 }
558
559 fn poll_snapshot(&mut self, force: bool, cx: &mut ModelContext<Worktree>) {
560 self.poll_task.take();
561
562 match self.scan_state() {
563 ScanState::Idle => {
564 let new_snapshot = self.background_snapshot.lock().clone();
565 let updated_repos = Self::changed_repos(
566 &self.snapshot.git_repositories,
567 &new_snapshot.git_repositories,
568 );
569 self.snapshot = new_snapshot;
570
571 if let Some(share) = self.share.as_mut() {
572 *share.snapshots_tx.borrow_mut() = self.snapshot.clone();
573 }
574
575 cx.emit(Event::UpdatedEntries);
576
577 if !updated_repos.is_empty() {
578 cx.emit(Event::UpdatedGitRepositories(updated_repos));
579 }
580 }
581
582 ScanState::Initializing => {
583 let is_fake_fs = self.fs.is_fake();
584
585 let new_snapshot = self.background_snapshot.lock().clone();
586 let updated_repos = Self::changed_repos(
587 &self.snapshot.git_repositories,
588 &new_snapshot.git_repositories,
589 );
590 self.snapshot = new_snapshot;
591
592 self.poll_task = Some(cx.spawn_weak(|this, mut cx| async move {
593 if is_fake_fs {
594 #[cfg(any(test, feature = "test-support"))]
595 cx.background().simulate_random_delay().await;
596 } else {
597 smol::Timer::after(Duration::from_millis(100)).await;
598 }
599 if let Some(this) = this.upgrade(&cx) {
600 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
601 }
602 }));
603
604 cx.emit(Event::UpdatedEntries);
605
606 if !updated_repos.is_empty() {
607 cx.emit(Event::UpdatedGitRepositories(updated_repos));
608 }
609 }
610
611 _ => {
612 if force {
613 self.snapshot = self.background_snapshot.lock().clone();
614 }
615 }
616 }
617
618 cx.notify();
619 }
620
621 fn changed_repos(
622 old_repos: &[GitRepositoryEntry],
623 new_repos: &[GitRepositoryEntry],
624 ) -> Vec<GitRepositoryEntry> {
625 fn diff<'a>(
626 a: &'a [GitRepositoryEntry],
627 b: &'a [GitRepositoryEntry],
628 updated: &mut HashMap<&'a Path, GitRepositoryEntry>,
629 ) {
630 for a_repo in a {
631 let matched = b.iter().find(|b_repo| {
632 a_repo.git_dir_path == b_repo.git_dir_path && a_repo.scan_id == b_repo.scan_id
633 });
634
635 if matched.is_none() {
636 updated.insert(a_repo.git_dir_path.as_ref(), a_repo.clone());
637 }
638 }
639 }
640
641 let mut updated = HashMap::<&Path, GitRepositoryEntry>::default();
642
643 diff(old_repos, new_repos, &mut updated);
644 diff(new_repos, old_repos, &mut updated);
645
646 updated.into_values().collect()
647 }
648
649 pub fn scan_complete(&self) -> impl Future<Output = ()> {
650 let mut scan_state_rx = self.last_scan_state_rx.clone();
651 async move {
652 let mut scan_state = Some(scan_state_rx.borrow().clone());
653 while let Some(ScanState::Initializing | ScanState::Updating) = scan_state {
654 scan_state = scan_state_rx.recv().await;
655 }
656 }
657 }
658
659 fn scan_state(&self) -> ScanState {
660 self.last_scan_state_rx.borrow().clone()
661 }
662
663 pub fn snapshot(&self) -> LocalSnapshot {
664 self.snapshot.clone()
665 }
666
667 pub fn metadata_proto(&self) -> proto::WorktreeMetadata {
668 proto::WorktreeMetadata {
669 id: self.id().to_proto(),
670 root_name: self.root_name().to_string(),
671 visible: self.visible,
672 abs_path: self.abs_path().as_os_str().to_string_lossy().into(),
673 }
674 }
675
676 fn load(
677 &self,
678 path: &Path,
679 cx: &mut ModelContext<Worktree>,
680 ) -> Task<Result<(File, String, Option<String>)>> {
681 let handle = cx.handle();
682 let path = Arc::from(path);
683 let abs_path = self.absolutize(&path);
684 let fs = self.fs.clone();
685 let snapshot = self.snapshot();
686
687 cx.spawn(|this, mut cx| async move {
688 let text = fs.load(&abs_path).await?;
689
690 let diff_base = if let Some(repo) = snapshot.repo_for(&path) {
691 if let Ok(repo_relative) = path.strip_prefix(repo.content_path) {
692 let repo_relative = repo_relative.to_owned();
693 cx.background()
694 .spawn(async move { repo.repo.lock().load_index_text(&repo_relative) })
695 .await
696 } else {
697 None
698 }
699 } else {
700 None
701 };
702
703 // Eagerly populate the snapshot with an updated entry for the loaded file
704 let entry = this
705 .update(&mut cx, |this, cx| {
706 this.as_local()
707 .unwrap()
708 .refresh_entry(path, abs_path, None, cx)
709 })
710 .await?;
711
712 Ok((
713 File {
714 entry_id: entry.id,
715 worktree: handle,
716 path: entry.path,
717 mtime: entry.mtime,
718 is_local: true,
719 is_deleted: false,
720 },
721 text,
722 diff_base,
723 ))
724 })
725 }
726
727 pub fn save_buffer_as(
728 &self,
729 buffer_handle: ModelHandle<Buffer>,
730 path: impl Into<Arc<Path>>,
731 cx: &mut ModelContext<Worktree>,
732 ) -> Task<Result<()>> {
733 let buffer = buffer_handle.read(cx);
734 let text = buffer.as_rope().clone();
735 let fingerprint = text.fingerprint();
736 let version = buffer.version();
737 let save = self.write_file(path, text, buffer.line_ending(), cx);
738 let handle = cx.handle();
739 cx.as_mut().spawn(|mut cx| async move {
740 let entry = save.await?;
741 let file = File {
742 entry_id: entry.id,
743 worktree: handle,
744 path: entry.path,
745 mtime: entry.mtime,
746 is_local: true,
747 is_deleted: false,
748 };
749
750 buffer_handle.update(&mut cx, |buffer, cx| {
751 buffer.did_save(version, fingerprint, file.mtime, Some(Arc::new(file)), cx);
752 });
753
754 Ok(())
755 })
756 }
757
758 pub fn create_entry(
759 &self,
760 path: impl Into<Arc<Path>>,
761 is_dir: bool,
762 cx: &mut ModelContext<Worktree>,
763 ) -> Task<Result<Entry>> {
764 self.write_entry_internal(
765 path,
766 if is_dir {
767 None
768 } else {
769 Some(Default::default())
770 },
771 cx,
772 )
773 }
774
775 pub fn write_file(
776 &self,
777 path: impl Into<Arc<Path>>,
778 text: Rope,
779 line_ending: LineEnding,
780 cx: &mut ModelContext<Worktree>,
781 ) -> Task<Result<Entry>> {
782 self.write_entry_internal(path, Some((text, line_ending)), cx)
783 }
784
785 pub fn delete_entry(
786 &self,
787 entry_id: ProjectEntryId,
788 cx: &mut ModelContext<Worktree>,
789 ) -> Option<Task<Result<()>>> {
790 let entry = self.entry_for_id(entry_id)?.clone();
791 let abs_path = self.absolutize(&entry.path);
792 let delete = cx.background().spawn({
793 let fs = self.fs.clone();
794 let abs_path = abs_path;
795 async move {
796 if entry.is_file() {
797 fs.remove_file(&abs_path, Default::default()).await
798 } else {
799 fs.remove_dir(
800 &abs_path,
801 RemoveOptions {
802 recursive: true,
803 ignore_if_not_exists: false,
804 },
805 )
806 .await
807 }
808 }
809 });
810
811 Some(cx.spawn(|this, mut cx| async move {
812 delete.await?;
813 this.update(&mut cx, |this, cx| {
814 let this = this.as_local_mut().unwrap();
815 {
816 let mut snapshot = this.background_snapshot.lock();
817 snapshot.delete_entry(entry_id);
818 }
819 this.poll_snapshot(true, cx);
820 });
821 Ok(())
822 }))
823 }
824
825 pub fn rename_entry(
826 &self,
827 entry_id: ProjectEntryId,
828 new_path: impl Into<Arc<Path>>,
829 cx: &mut ModelContext<Worktree>,
830 ) -> Option<Task<Result<Entry>>> {
831 let old_path = self.entry_for_id(entry_id)?.path.clone();
832 let new_path = new_path.into();
833 let abs_old_path = self.absolutize(&old_path);
834 let abs_new_path = self.absolutize(&new_path);
835 let rename = cx.background().spawn({
836 let fs = self.fs.clone();
837 let abs_new_path = abs_new_path.clone();
838 async move {
839 fs.rename(&abs_old_path, &abs_new_path, Default::default())
840 .await
841 }
842 });
843
844 Some(cx.spawn(|this, mut cx| async move {
845 rename.await?;
846 let entry = this
847 .update(&mut cx, |this, cx| {
848 this.as_local_mut().unwrap().refresh_entry(
849 new_path.clone(),
850 abs_new_path,
851 Some(old_path),
852 cx,
853 )
854 })
855 .await?;
856 Ok(entry)
857 }))
858 }
859
860 pub fn copy_entry(
861 &self,
862 entry_id: ProjectEntryId,
863 new_path: impl Into<Arc<Path>>,
864 cx: &mut ModelContext<Worktree>,
865 ) -> Option<Task<Result<Entry>>> {
866 let old_path = self.entry_for_id(entry_id)?.path.clone();
867 let new_path = new_path.into();
868 let abs_old_path = self.absolutize(&old_path);
869 let abs_new_path = self.absolutize(&new_path);
870 let copy = cx.background().spawn({
871 let fs = self.fs.clone();
872 let abs_new_path = abs_new_path.clone();
873 async move {
874 copy_recursive(
875 fs.as_ref(),
876 &abs_old_path,
877 &abs_new_path,
878 Default::default(),
879 )
880 .await
881 }
882 });
883
884 Some(cx.spawn(|this, mut cx| async move {
885 copy.await?;
886 let entry = this
887 .update(&mut cx, |this, cx| {
888 this.as_local_mut().unwrap().refresh_entry(
889 new_path.clone(),
890 abs_new_path,
891 None,
892 cx,
893 )
894 })
895 .await?;
896 Ok(entry)
897 }))
898 }
899
900 fn write_entry_internal(
901 &self,
902 path: impl Into<Arc<Path>>,
903 text_if_file: Option<(Rope, LineEnding)>,
904 cx: &mut ModelContext<Worktree>,
905 ) -> Task<Result<Entry>> {
906 let path = path.into();
907 let abs_path = self.absolutize(&path);
908 let write = cx.background().spawn({
909 let fs = self.fs.clone();
910 let abs_path = abs_path.clone();
911 async move {
912 if let Some((text, line_ending)) = text_if_file {
913 fs.save(&abs_path, &text, line_ending).await
914 } else {
915 fs.create_dir(&abs_path).await
916 }
917 }
918 });
919
920 cx.spawn(|this, mut cx| async move {
921 write.await?;
922 let entry = this
923 .update(&mut cx, |this, cx| {
924 this.as_local_mut()
925 .unwrap()
926 .refresh_entry(path, abs_path, None, cx)
927 })
928 .await?;
929 Ok(entry)
930 })
931 }
932
933 fn refresh_entry(
934 &self,
935 path: Arc<Path>,
936 abs_path: PathBuf,
937 old_path: Option<Arc<Path>>,
938 cx: &mut ModelContext<Worktree>,
939 ) -> Task<Result<Entry>> {
940 let fs = self.fs.clone();
941 let root_char_bag;
942 let next_entry_id;
943 {
944 let snapshot = self.background_snapshot.lock();
945 root_char_bag = snapshot.root_char_bag;
946 next_entry_id = snapshot.next_entry_id.clone();
947 }
948 cx.spawn_weak(|this, mut cx| async move {
949 let metadata = fs
950 .metadata(&abs_path)
951 .await?
952 .ok_or_else(|| anyhow!("could not read saved file metadata"))?;
953 let this = this
954 .upgrade(&cx)
955 .ok_or_else(|| anyhow!("worktree was dropped"))?;
956 this.update(&mut cx, |this, cx| {
957 let this = this.as_local_mut().unwrap();
958 let inserted_entry;
959 {
960 let mut snapshot = this.background_snapshot.lock();
961 let mut entry = Entry::new(path, &metadata, &next_entry_id, root_char_bag);
962 entry.is_ignored = snapshot
963 .ignore_stack_for_abs_path(&abs_path, entry.is_dir())
964 .is_abs_path_ignored(&abs_path, entry.is_dir());
965 if let Some(old_path) = old_path {
966 snapshot.remove_path(&old_path);
967 }
968 snapshot.scan_started();
969 inserted_entry = snapshot.insert_entry(entry, fs.as_ref());
970 snapshot.scan_completed();
971 }
972 this.poll_snapshot(true, cx);
973 Ok(inserted_entry)
974 })
975 })
976 }
977
978 pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
979 let (share_tx, share_rx) = oneshot::channel();
980
981 if let Some(share) = self.share.as_mut() {
982 let _ = share_tx.send(());
983 *share.resume_updates.borrow_mut() = ();
984 } else {
985 let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
986 let (resume_updates_tx, mut resume_updates_rx) = watch::channel();
987 let worktree_id = cx.model_id() as u64;
988
989 for (path, summary) in self.diagnostic_summaries.iter() {
990 if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
991 project_id,
992 worktree_id,
993 summary: Some(summary.to_proto(&path.0)),
994 }) {
995 return Task::ready(Err(e));
996 }
997 }
998
999 let _maintain_remote_snapshot = cx.background().spawn({
1000 let client = self.client.clone();
1001 async move {
1002 let mut share_tx = Some(share_tx);
1003 let mut prev_snapshot = LocalSnapshot {
1004 ignores_by_parent_abs_path: Default::default(),
1005 git_repositories: Default::default(),
1006 removed_entry_ids: Default::default(),
1007 next_entry_id: Default::default(),
1008 snapshot: Snapshot {
1009 id: WorktreeId(worktree_id as usize),
1010 abs_path: Path::new("").into(),
1011 root_name: Default::default(),
1012 root_char_bag: Default::default(),
1013 entries_by_path: Default::default(),
1014 entries_by_id: Default::default(),
1015 scan_id: 0,
1016 completed_scan_id: 0,
1017 },
1018 };
1019 while let Some(snapshot) = snapshots_rx.recv().await {
1020 #[cfg(any(test, feature = "test-support"))]
1021 const MAX_CHUNK_SIZE: usize = 2;
1022 #[cfg(not(any(test, feature = "test-support")))]
1023 const MAX_CHUNK_SIZE: usize = 256;
1024
1025 let update =
1026 snapshot.build_update(&prev_snapshot, project_id, worktree_id, true);
1027 for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
1028 let _ = resume_updates_rx.try_recv();
1029 while let Err(error) = client.request(update.clone()).await {
1030 log::error!("failed to send worktree update: {}", error);
1031 log::info!("waiting to resume updates");
1032 if resume_updates_rx.next().await.is_none() {
1033 return Ok(());
1034 }
1035 }
1036 }
1037
1038 if let Some(share_tx) = share_tx.take() {
1039 let _ = share_tx.send(());
1040 }
1041
1042 prev_snapshot = snapshot;
1043 }
1044
1045 Ok::<_, anyhow::Error>(())
1046 }
1047 .log_err()
1048 });
1049
1050 self.share = Some(ShareState {
1051 project_id,
1052 snapshots_tx,
1053 resume_updates: resume_updates_tx,
1054 _maintain_remote_snapshot,
1055 });
1056 }
1057
1058 cx.foreground()
1059 .spawn(async move { share_rx.await.map_err(|_| anyhow!("share ended")) })
1060 }
1061
1062 pub fn unshare(&mut self) {
1063 self.share.take();
1064 }
1065
1066 pub fn is_shared(&self) -> bool {
1067 self.share.is_some()
1068 }
1069}
1070
1071impl RemoteWorktree {
1072 fn snapshot(&self) -> Snapshot {
1073 self.snapshot.clone()
1074 }
1075
1076 fn poll_snapshot(&mut self, cx: &mut ModelContext<Worktree>) {
1077 self.snapshot = self.background_snapshot.lock().clone();
1078 cx.emit(Event::UpdatedEntries);
1079 cx.notify();
1080 }
1081
1082 pub fn disconnected_from_host(&mut self) {
1083 self.updates_tx.take();
1084 self.snapshot_subscriptions.clear();
1085 self.disconnected = true;
1086 }
1087
1088 pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
1089 if let Some(updates_tx) = &self.updates_tx {
1090 updates_tx
1091 .unbounded_send(update)
1092 .expect("consumer runs to completion");
1093 }
1094 }
1095
1096 fn observed_snapshot(&self, scan_id: usize) -> bool {
1097 self.completed_scan_id >= scan_id
1098 }
1099
1100 fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = Result<()>> {
1101 let (tx, rx) = oneshot::channel();
1102 if self.observed_snapshot(scan_id) {
1103 let _ = tx.send(());
1104 } else if self.disconnected {
1105 drop(tx);
1106 } else {
1107 match self
1108 .snapshot_subscriptions
1109 .binary_search_by_key(&scan_id, |probe| probe.0)
1110 {
1111 Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)),
1112 }
1113 }
1114
1115 async move {
1116 rx.await?;
1117 Ok(())
1118 }
1119 }
1120
1121 pub fn update_diagnostic_summary(
1122 &mut self,
1123 path: Arc<Path>,
1124 summary: &proto::DiagnosticSummary,
1125 ) {
1126 let summary = DiagnosticSummary {
1127 language_server_id: summary.language_server_id as usize,
1128 error_count: summary.error_count as usize,
1129 warning_count: summary.warning_count as usize,
1130 };
1131 if summary.is_empty() {
1132 self.diagnostic_summaries.remove(&PathKey(path));
1133 } else {
1134 self.diagnostic_summaries.insert(PathKey(path), summary);
1135 }
1136 }
1137
1138 pub fn insert_entry(
1139 &mut self,
1140 entry: proto::Entry,
1141 scan_id: usize,
1142 cx: &mut ModelContext<Worktree>,
1143 ) -> Task<Result<Entry>> {
1144 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1145 cx.spawn(|this, mut cx| async move {
1146 wait_for_snapshot.await?;
1147 this.update(&mut cx, |worktree, _| {
1148 let worktree = worktree.as_remote_mut().unwrap();
1149 let mut snapshot = worktree.background_snapshot.lock();
1150 let entry = snapshot.insert_entry(entry);
1151 worktree.snapshot = snapshot.clone();
1152 entry
1153 })
1154 })
1155 }
1156
1157 pub(crate) fn delete_entry(
1158 &mut self,
1159 id: ProjectEntryId,
1160 scan_id: usize,
1161 cx: &mut ModelContext<Worktree>,
1162 ) -> Task<Result<()>> {
1163 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1164 cx.spawn(|this, mut cx| async move {
1165 wait_for_snapshot.await?;
1166 this.update(&mut cx, |worktree, _| {
1167 let worktree = worktree.as_remote_mut().unwrap();
1168 let mut snapshot = worktree.background_snapshot.lock();
1169 snapshot.delete_entry(id);
1170 worktree.snapshot = snapshot.clone();
1171 });
1172 Ok(())
1173 })
1174 }
1175}
1176
1177impl Snapshot {
1178 pub fn id(&self) -> WorktreeId {
1179 self.id
1180 }
1181
1182 pub fn abs_path(&self) -> &Arc<Path> {
1183 &self.abs_path
1184 }
1185
1186 pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
1187 self.entries_by_id.get(&entry_id, &()).is_some()
1188 }
1189
1190 pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
1191 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1192 let old_entry = self.entries_by_id.insert_or_replace(
1193 PathEntry {
1194 id: entry.id,
1195 path: entry.path.clone(),
1196 is_ignored: entry.is_ignored,
1197 scan_id: 0,
1198 },
1199 &(),
1200 );
1201 if let Some(old_entry) = old_entry {
1202 self.entries_by_path.remove(&PathKey(old_entry.path), &());
1203 }
1204 self.entries_by_path.insert_or_replace(entry.clone(), &());
1205 Ok(entry)
1206 }
1207
1208 fn delete_entry(&mut self, entry_id: ProjectEntryId) -> bool {
1209 if let Some(removed_entry) = self.entries_by_id.remove(&entry_id, &()) {
1210 self.entries_by_path = {
1211 let mut cursor = self.entries_by_path.cursor();
1212 let mut new_entries_by_path =
1213 cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &());
1214 while let Some(entry) = cursor.item() {
1215 if entry.path.starts_with(&removed_entry.path) {
1216 self.entries_by_id.remove(&entry.id, &());
1217 cursor.next(&());
1218 } else {
1219 break;
1220 }
1221 }
1222 new_entries_by_path.push_tree(cursor.suffix(&()), &());
1223 new_entries_by_path
1224 };
1225
1226 true
1227 } else {
1228 false
1229 }
1230 }
1231
1232 pub(crate) fn apply_remote_update(&mut self, update: proto::UpdateWorktree) -> Result<()> {
1233 let mut entries_by_path_edits = Vec::new();
1234 let mut entries_by_id_edits = Vec::new();
1235 for entry_id in update.removed_entries {
1236 let entry = self
1237 .entry_for_id(ProjectEntryId::from_proto(entry_id))
1238 .ok_or_else(|| anyhow!("unknown entry {}", entry_id))?;
1239 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
1240 entries_by_id_edits.push(Edit::Remove(entry.id));
1241 }
1242
1243 for entry in update.updated_entries {
1244 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1245 if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
1246 entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
1247 }
1248 entries_by_id_edits.push(Edit::Insert(PathEntry {
1249 id: entry.id,
1250 path: entry.path.clone(),
1251 is_ignored: entry.is_ignored,
1252 scan_id: 0,
1253 }));
1254 entries_by_path_edits.push(Edit::Insert(entry));
1255 }
1256
1257 self.entries_by_path.edit(entries_by_path_edits, &());
1258 self.entries_by_id.edit(entries_by_id_edits, &());
1259 self.scan_id = update.scan_id as usize;
1260 if update.is_last_update {
1261 self.completed_scan_id = update.scan_id as usize;
1262 }
1263
1264 Ok(())
1265 }
1266
1267 pub fn file_count(&self) -> usize {
1268 self.entries_by_path.summary().file_count
1269 }
1270
1271 pub fn visible_file_count(&self) -> usize {
1272 self.entries_by_path.summary().visible_file_count
1273 }
1274
1275 fn traverse_from_offset(
1276 &self,
1277 include_dirs: bool,
1278 include_ignored: bool,
1279 start_offset: usize,
1280 ) -> Traversal {
1281 let mut cursor = self.entries_by_path.cursor();
1282 cursor.seek(
1283 &TraversalTarget::Count {
1284 count: start_offset,
1285 include_dirs,
1286 include_ignored,
1287 },
1288 Bias::Right,
1289 &(),
1290 );
1291 Traversal {
1292 cursor,
1293 include_dirs,
1294 include_ignored,
1295 }
1296 }
1297
1298 fn traverse_from_path(
1299 &self,
1300 include_dirs: bool,
1301 include_ignored: bool,
1302 path: &Path,
1303 ) -> Traversal {
1304 let mut cursor = self.entries_by_path.cursor();
1305 cursor.seek(&TraversalTarget::Path(path), Bias::Left, &());
1306 Traversal {
1307 cursor,
1308 include_dirs,
1309 include_ignored,
1310 }
1311 }
1312
1313 pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
1314 self.traverse_from_offset(false, include_ignored, start)
1315 }
1316
1317 pub fn entries(&self, include_ignored: bool) -> Traversal {
1318 self.traverse_from_offset(true, include_ignored, 0)
1319 }
1320
1321 pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
1322 let empty_path = Path::new("");
1323 self.entries_by_path
1324 .cursor::<()>()
1325 .filter(move |entry| entry.path.as_ref() != empty_path)
1326 .map(|entry| &entry.path)
1327 }
1328
1329 fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
1330 let mut cursor = self.entries_by_path.cursor();
1331 cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &());
1332 let traversal = Traversal {
1333 cursor,
1334 include_dirs: true,
1335 include_ignored: true,
1336 };
1337 ChildEntriesIter {
1338 traversal,
1339 parent_path,
1340 }
1341 }
1342
1343 pub fn root_entry(&self) -> Option<&Entry> {
1344 self.entry_for_path("")
1345 }
1346
1347 pub fn root_name(&self) -> &str {
1348 &self.root_name
1349 }
1350
1351 pub fn scan_started(&mut self) {
1352 self.scan_id += 1;
1353 }
1354
1355 pub fn scan_completed(&mut self) {
1356 self.completed_scan_id = self.scan_id;
1357 }
1358
1359 pub fn scan_id(&self) -> usize {
1360 self.scan_id
1361 }
1362
1363 pub fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
1364 let path = path.as_ref();
1365 self.traverse_from_path(true, true, path)
1366 .entry()
1367 .and_then(|entry| {
1368 if entry.path.as_ref() == path {
1369 Some(entry)
1370 } else {
1371 None
1372 }
1373 })
1374 }
1375
1376 pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> {
1377 let entry = self.entries_by_id.get(&id, &())?;
1378 self.entry_for_path(&entry.path)
1379 }
1380
1381 pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
1382 self.entry_for_path(path.as_ref()).map(|e| e.inode)
1383 }
1384}
1385
1386impl LocalSnapshot {
1387 // Gives the most specific git repository for a given path
1388 pub(crate) fn repo_for(&self, path: &Path) -> Option<GitRepositoryEntry> {
1389 self.git_repositories
1390 .iter()
1391 .rev() //git_repository is ordered lexicographically
1392 .find(|repo| repo.manages(path))
1393 .cloned()
1394 }
1395
1396 pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut GitRepositoryEntry> {
1397 // Git repositories cannot be nested, so we don't need to reverse the order
1398 self.git_repositories
1399 .iter_mut()
1400 .find(|repo| repo.in_dot_git(path))
1401 }
1402
1403 #[cfg(test)]
1404 pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree {
1405 let root_name = self.root_name.clone();
1406 proto::UpdateWorktree {
1407 project_id,
1408 worktree_id: self.id().to_proto(),
1409 abs_path: self.abs_path().to_string_lossy().into(),
1410 root_name,
1411 updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
1412 removed_entries: Default::default(),
1413 scan_id: self.scan_id as u64,
1414 is_last_update: true,
1415 }
1416 }
1417
1418 pub(crate) fn build_update(
1419 &self,
1420 other: &Self,
1421 project_id: u64,
1422 worktree_id: u64,
1423 include_ignored: bool,
1424 ) -> proto::UpdateWorktree {
1425 let mut updated_entries = Vec::new();
1426 let mut removed_entries = Vec::new();
1427 let mut self_entries = self
1428 .entries_by_id
1429 .cursor::<()>()
1430 .filter(|e| include_ignored || !e.is_ignored)
1431 .peekable();
1432 let mut other_entries = other
1433 .entries_by_id
1434 .cursor::<()>()
1435 .filter(|e| include_ignored || !e.is_ignored)
1436 .peekable();
1437 loop {
1438 match (self_entries.peek(), other_entries.peek()) {
1439 (Some(self_entry), Some(other_entry)) => {
1440 match Ord::cmp(&self_entry.id, &other_entry.id) {
1441 Ordering::Less => {
1442 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1443 updated_entries.push(entry);
1444 self_entries.next();
1445 }
1446 Ordering::Equal => {
1447 if self_entry.scan_id != other_entry.scan_id {
1448 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1449 updated_entries.push(entry);
1450 }
1451
1452 self_entries.next();
1453 other_entries.next();
1454 }
1455 Ordering::Greater => {
1456 removed_entries.push(other_entry.id.to_proto());
1457 other_entries.next();
1458 }
1459 }
1460 }
1461 (Some(self_entry), None) => {
1462 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1463 updated_entries.push(entry);
1464 self_entries.next();
1465 }
1466 (None, Some(other_entry)) => {
1467 removed_entries.push(other_entry.id.to_proto());
1468 other_entries.next();
1469 }
1470 (None, None) => break,
1471 }
1472 }
1473
1474 proto::UpdateWorktree {
1475 project_id,
1476 worktree_id,
1477 abs_path: self.abs_path().to_string_lossy().into(),
1478 root_name: self.root_name().to_string(),
1479 updated_entries,
1480 removed_entries,
1481 scan_id: self.scan_id as u64,
1482 is_last_update: self.completed_scan_id == self.scan_id,
1483 }
1484 }
1485
1486 fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
1487 if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) {
1488 let abs_path = self.abs_path.join(&entry.path);
1489 match smol::block_on(build_gitignore(&abs_path, fs)) {
1490 Ok(ignore) => {
1491 self.ignores_by_parent_abs_path.insert(
1492 abs_path.parent().unwrap().into(),
1493 (Arc::new(ignore), self.scan_id),
1494 );
1495 }
1496 Err(error) => {
1497 log::error!(
1498 "error loading .gitignore file {:?} - {:?}",
1499 &entry.path,
1500 error
1501 );
1502 }
1503 }
1504 }
1505
1506 self.reuse_entry_id(&mut entry);
1507
1508 if entry.kind == EntryKind::PendingDir {
1509 if let Some(existing_entry) =
1510 self.entries_by_path.get(&PathKey(entry.path.clone()), &())
1511 {
1512 entry.kind = existing_entry.kind;
1513 }
1514 }
1515
1516 let scan_id = self.scan_id;
1517 self.entries_by_path.insert_or_replace(entry.clone(), &());
1518 self.entries_by_id.insert_or_replace(
1519 PathEntry {
1520 id: entry.id,
1521 path: entry.path.clone(),
1522 is_ignored: entry.is_ignored,
1523 scan_id,
1524 },
1525 &(),
1526 );
1527
1528 entry
1529 }
1530
1531 fn populate_dir(
1532 &mut self,
1533 parent_path: Arc<Path>,
1534 entries: impl IntoIterator<Item = Entry>,
1535 ignore: Option<Arc<Gitignore>>,
1536 fs: &dyn Fs,
1537 ) {
1538 let mut parent_entry = if let Some(parent_entry) =
1539 self.entries_by_path.get(&PathKey(parent_path.clone()), &())
1540 {
1541 parent_entry.clone()
1542 } else {
1543 log::warn!(
1544 "populating a directory {:?} that has been removed",
1545 parent_path
1546 );
1547 return;
1548 };
1549
1550 if let Some(ignore) = ignore {
1551 self.ignores_by_parent_abs_path.insert(
1552 self.abs_path.join(&parent_path).into(),
1553 (ignore, self.scan_id),
1554 );
1555 }
1556 if matches!(parent_entry.kind, EntryKind::PendingDir) {
1557 parent_entry.kind = EntryKind::Dir;
1558 } else {
1559 unreachable!();
1560 }
1561
1562 if parent_path.file_name() == Some(&DOT_GIT) {
1563 let abs_path = self.abs_path.join(&parent_path);
1564 let content_path: Arc<Path> = parent_path.parent().unwrap().into();
1565 if let Err(ix) = self
1566 .git_repositories
1567 .binary_search_by_key(&&content_path, |repo| &repo.content_path)
1568 {
1569 if let Some(repo) = fs.open_repo(abs_path.as_path()) {
1570 self.git_repositories.insert(
1571 ix,
1572 GitRepositoryEntry {
1573 repo,
1574 scan_id: 0,
1575 content_path,
1576 git_dir_path: parent_path,
1577 },
1578 );
1579 }
1580 }
1581 }
1582
1583 let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
1584 let mut entries_by_id_edits = Vec::new();
1585
1586 for mut entry in entries {
1587 self.reuse_entry_id(&mut entry);
1588 entries_by_id_edits.push(Edit::Insert(PathEntry {
1589 id: entry.id,
1590 path: entry.path.clone(),
1591 is_ignored: entry.is_ignored,
1592 scan_id: self.scan_id,
1593 }));
1594 entries_by_path_edits.push(Edit::Insert(entry));
1595 }
1596
1597 self.entries_by_path.edit(entries_by_path_edits, &());
1598 self.entries_by_id.edit(entries_by_id_edits, &());
1599 }
1600
1601 fn reuse_entry_id(&mut self, entry: &mut Entry) {
1602 if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
1603 entry.id = removed_entry_id;
1604 } else if let Some(existing_entry) = self.entry_for_path(&entry.path) {
1605 entry.id = existing_entry.id;
1606 }
1607 }
1608
1609 fn remove_path(&mut self, path: &Path) {
1610 let mut new_entries;
1611 let removed_entries;
1612 {
1613 let mut cursor = self.entries_by_path.cursor::<TraversalProgress>();
1614 new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
1615 removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
1616 new_entries.push_tree(cursor.suffix(&()), &());
1617 }
1618 self.entries_by_path = new_entries;
1619
1620 let mut entries_by_id_edits = Vec::new();
1621 for entry in removed_entries.cursor::<()>() {
1622 let removed_entry_id = self
1623 .removed_entry_ids
1624 .entry(entry.inode)
1625 .or_insert(entry.id);
1626 *removed_entry_id = cmp::max(*removed_entry_id, entry.id);
1627 entries_by_id_edits.push(Edit::Remove(entry.id));
1628 }
1629 self.entries_by_id.edit(entries_by_id_edits, &());
1630
1631 if path.file_name() == Some(&GITIGNORE) {
1632 let abs_parent_path = self.abs_path.join(path.parent().unwrap());
1633 if let Some((_, scan_id)) = self
1634 .ignores_by_parent_abs_path
1635 .get_mut(abs_parent_path.as_path())
1636 {
1637 *scan_id = self.snapshot.scan_id;
1638 }
1639 } else if path.file_name() == Some(&DOT_GIT) {
1640 let parent_path = path.parent().unwrap();
1641 if let Ok(ix) = self
1642 .git_repositories
1643 .binary_search_by_key(&parent_path, |repo| repo.git_dir_path.as_ref())
1644 {
1645 self.git_repositories[ix].scan_id = self.snapshot.scan_id;
1646 }
1647 }
1648 }
1649
1650 fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
1651 let mut inodes = TreeSet::default();
1652 for ancestor in path.ancestors().skip(1) {
1653 if let Some(entry) = self.entry_for_path(ancestor) {
1654 inodes.insert(entry.inode);
1655 }
1656 }
1657 inodes
1658 }
1659
1660 fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
1661 let mut new_ignores = Vec::new();
1662 for ancestor in abs_path.ancestors().skip(1) {
1663 if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
1664 new_ignores.push((ancestor, Some(ignore.clone())));
1665 } else {
1666 new_ignores.push((ancestor, None));
1667 }
1668 }
1669
1670 let mut ignore_stack = IgnoreStack::none();
1671 for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
1672 if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
1673 ignore_stack = IgnoreStack::all();
1674 break;
1675 } else if let Some(ignore) = ignore {
1676 ignore_stack = ignore_stack.append(parent_abs_path.into(), ignore);
1677 }
1678 }
1679
1680 if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
1681 ignore_stack = IgnoreStack::all();
1682 }
1683
1684 ignore_stack
1685 }
1686
1687 pub fn git_repo_entries(&self) -> &[GitRepositoryEntry] {
1688 &self.git_repositories
1689 }
1690}
1691
1692impl GitRepositoryEntry {
1693 // Note that these paths should be relative to the worktree root.
1694 pub(crate) fn manages(&self, path: &Path) -> bool {
1695 path.starts_with(self.content_path.as_ref())
1696 }
1697
1698 // Note that theis path should be relative to the worktree root.
1699 pub(crate) fn in_dot_git(&self, path: &Path) -> bool {
1700 path.starts_with(self.git_dir_path.as_ref())
1701 }
1702}
1703
1704async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
1705 let contents = fs.load(abs_path).await?;
1706 let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
1707 let mut builder = GitignoreBuilder::new(parent);
1708 for line in contents.lines() {
1709 builder.add_line(Some(abs_path.into()), line)?;
1710 }
1711 Ok(builder.build()?)
1712}
1713
1714impl WorktreeId {
1715 pub fn from_usize(handle_id: usize) -> Self {
1716 Self(handle_id)
1717 }
1718
1719 pub(crate) fn from_proto(id: u64) -> Self {
1720 Self(id as usize)
1721 }
1722
1723 pub fn to_proto(&self) -> u64 {
1724 self.0 as u64
1725 }
1726
1727 pub fn to_usize(&self) -> usize {
1728 self.0
1729 }
1730}
1731
1732impl fmt::Display for WorktreeId {
1733 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1734 self.0.fmt(f)
1735 }
1736}
1737
1738impl Deref for Worktree {
1739 type Target = Snapshot;
1740
1741 fn deref(&self) -> &Self::Target {
1742 match self {
1743 Worktree::Local(worktree) => &worktree.snapshot,
1744 Worktree::Remote(worktree) => &worktree.snapshot,
1745 }
1746 }
1747}
1748
1749impl Deref for LocalWorktree {
1750 type Target = LocalSnapshot;
1751
1752 fn deref(&self) -> &Self::Target {
1753 &self.snapshot
1754 }
1755}
1756
1757impl Deref for RemoteWorktree {
1758 type Target = Snapshot;
1759
1760 fn deref(&self) -> &Self::Target {
1761 &self.snapshot
1762 }
1763}
1764
1765impl fmt::Debug for LocalWorktree {
1766 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1767 self.snapshot.fmt(f)
1768 }
1769}
1770
1771impl fmt::Debug for Snapshot {
1772 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1773 struct EntriesById<'a>(&'a SumTree<PathEntry>);
1774 struct EntriesByPath<'a>(&'a SumTree<Entry>);
1775
1776 impl<'a> fmt::Debug for EntriesByPath<'a> {
1777 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1778 f.debug_map()
1779 .entries(self.0.iter().map(|entry| (&entry.path, entry.id)))
1780 .finish()
1781 }
1782 }
1783
1784 impl<'a> fmt::Debug for EntriesById<'a> {
1785 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1786 f.debug_list().entries(self.0.iter()).finish()
1787 }
1788 }
1789
1790 f.debug_struct("Snapshot")
1791 .field("id", &self.id)
1792 .field("root_name", &self.root_name)
1793 .field("entries_by_path", &EntriesByPath(&self.entries_by_path))
1794 .field("entries_by_id", &EntriesById(&self.entries_by_id))
1795 .finish()
1796 }
1797}
1798
1799#[derive(Clone, PartialEq)]
1800pub struct File {
1801 pub worktree: ModelHandle<Worktree>,
1802 pub path: Arc<Path>,
1803 pub mtime: SystemTime,
1804 pub(crate) entry_id: ProjectEntryId,
1805 pub(crate) is_local: bool,
1806 pub(crate) is_deleted: bool,
1807}
1808
1809impl language::File for File {
1810 fn as_local(&self) -> Option<&dyn language::LocalFile> {
1811 if self.is_local {
1812 Some(self)
1813 } else {
1814 None
1815 }
1816 }
1817
1818 fn mtime(&self) -> SystemTime {
1819 self.mtime
1820 }
1821
1822 fn path(&self) -> &Arc<Path> {
1823 &self.path
1824 }
1825
1826 fn full_path(&self, cx: &AppContext) -> PathBuf {
1827 let mut full_path = PathBuf::new();
1828 let worktree = self.worktree.read(cx);
1829
1830 if worktree.is_visible() {
1831 full_path.push(worktree.root_name());
1832 } else {
1833 let path = worktree.abs_path();
1834
1835 if worktree.is_local() && path.starts_with(HOME.as_path()) {
1836 full_path.push("~");
1837 full_path.push(path.strip_prefix(HOME.as_path()).unwrap());
1838 } else {
1839 full_path.push(path)
1840 }
1841 }
1842
1843 if self.path.components().next().is_some() {
1844 full_path.push(&self.path);
1845 }
1846
1847 full_path
1848 }
1849
1850 /// Returns the last component of this handle's absolute path. If this handle refers to the root
1851 /// of its worktree, then this method will return the name of the worktree itself.
1852 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr {
1853 self.path
1854 .file_name()
1855 .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name))
1856 }
1857
1858 fn is_deleted(&self) -> bool {
1859 self.is_deleted
1860 }
1861
1862 fn save(
1863 &self,
1864 buffer_id: u64,
1865 text: Rope,
1866 version: clock::Global,
1867 line_ending: LineEnding,
1868 cx: &mut MutableAppContext,
1869 ) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
1870 self.worktree.update(cx, |worktree, cx| match worktree {
1871 Worktree::Local(worktree) => {
1872 let rpc = worktree.client.clone();
1873 let project_id = worktree.share.as_ref().map(|share| share.project_id);
1874 let fingerprint = text.fingerprint();
1875 let save = worktree.write_file(self.path.clone(), text, line_ending, cx);
1876 cx.background().spawn(async move {
1877 let entry = save.await?;
1878 if let Some(project_id) = project_id {
1879 rpc.send(proto::BufferSaved {
1880 project_id,
1881 buffer_id,
1882 version: serialize_version(&version),
1883 mtime: Some(entry.mtime.into()),
1884 fingerprint: serialize_fingerprint(fingerprint),
1885 })?;
1886 }
1887 Ok((version, fingerprint, entry.mtime))
1888 })
1889 }
1890 Worktree::Remote(worktree) => {
1891 let rpc = worktree.client.clone();
1892 let project_id = worktree.project_id;
1893 cx.foreground().spawn(async move {
1894 let response = rpc
1895 .request(proto::SaveBuffer {
1896 project_id,
1897 buffer_id,
1898 version: serialize_version(&version),
1899 })
1900 .await?;
1901 let version = deserialize_version(response.version);
1902 let fingerprint = deserialize_fingerprint(&response.fingerprint)?;
1903 let mtime = response
1904 .mtime
1905 .ok_or_else(|| anyhow!("missing mtime"))?
1906 .into();
1907 Ok((version, fingerprint, mtime))
1908 })
1909 }
1910 })
1911 }
1912
1913 fn as_any(&self) -> &dyn Any {
1914 self
1915 }
1916
1917 fn to_proto(&self) -> rpc::proto::File {
1918 rpc::proto::File {
1919 worktree_id: self.worktree.id() as u64,
1920 entry_id: self.entry_id.to_proto(),
1921 path: self.path.to_string_lossy().into(),
1922 mtime: Some(self.mtime.into()),
1923 is_deleted: self.is_deleted,
1924 }
1925 }
1926}
1927
1928impl language::LocalFile for File {
1929 fn abs_path(&self, cx: &AppContext) -> PathBuf {
1930 self.worktree
1931 .read(cx)
1932 .as_local()
1933 .unwrap()
1934 .abs_path
1935 .join(&self.path)
1936 }
1937
1938 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
1939 let worktree = self.worktree.read(cx).as_local().unwrap();
1940 let abs_path = worktree.absolutize(&self.path);
1941 let fs = worktree.fs.clone();
1942 cx.background()
1943 .spawn(async move { fs.load(&abs_path).await })
1944 }
1945
1946 fn buffer_reloaded(
1947 &self,
1948 buffer_id: u64,
1949 version: &clock::Global,
1950 fingerprint: RopeFingerprint,
1951 line_ending: LineEnding,
1952 mtime: SystemTime,
1953 cx: &mut MutableAppContext,
1954 ) {
1955 let worktree = self.worktree.read(cx).as_local().unwrap();
1956 if let Some(project_id) = worktree.share.as_ref().map(|share| share.project_id) {
1957 worktree
1958 .client
1959 .send(proto::BufferReloaded {
1960 project_id,
1961 buffer_id,
1962 version: serialize_version(version),
1963 mtime: Some(mtime.into()),
1964 fingerprint: serialize_fingerprint(fingerprint),
1965 line_ending: serialize_line_ending(line_ending) as i32,
1966 })
1967 .log_err();
1968 }
1969 }
1970}
1971
1972impl File {
1973 pub fn from_proto(
1974 proto: rpc::proto::File,
1975 worktree: ModelHandle<Worktree>,
1976 cx: &AppContext,
1977 ) -> Result<Self> {
1978 let worktree_id = worktree
1979 .read(cx)
1980 .as_remote()
1981 .ok_or_else(|| anyhow!("not remote"))?
1982 .id();
1983
1984 if worktree_id.to_proto() != proto.worktree_id {
1985 return Err(anyhow!("worktree id does not match file"));
1986 }
1987
1988 Ok(Self {
1989 worktree,
1990 path: Path::new(&proto.path).into(),
1991 mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
1992 entry_id: ProjectEntryId::from_proto(proto.entry_id),
1993 is_local: false,
1994 is_deleted: proto.is_deleted,
1995 })
1996 }
1997
1998 pub fn from_dyn(file: Option<&Arc<dyn language::File>>) -> Option<&Self> {
1999 file.and_then(|f| f.as_any().downcast_ref())
2000 }
2001
2002 pub fn worktree_id(&self, cx: &AppContext) -> WorktreeId {
2003 self.worktree.read(cx).id()
2004 }
2005
2006 pub fn project_entry_id(&self, _: &AppContext) -> Option<ProjectEntryId> {
2007 if self.is_deleted {
2008 None
2009 } else {
2010 Some(self.entry_id)
2011 }
2012 }
2013}
2014
2015#[derive(Clone, Debug, PartialEq, Eq)]
2016pub struct Entry {
2017 pub id: ProjectEntryId,
2018 pub kind: EntryKind,
2019 pub path: Arc<Path>,
2020 pub inode: u64,
2021 pub mtime: SystemTime,
2022 pub is_symlink: bool,
2023 pub is_ignored: bool,
2024}
2025
2026#[derive(Clone, Copy, Debug, PartialEq, Eq)]
2027pub enum EntryKind {
2028 PendingDir,
2029 Dir,
2030 File(CharBag),
2031}
2032
2033impl Entry {
2034 fn new(
2035 path: Arc<Path>,
2036 metadata: &fs::Metadata,
2037 next_entry_id: &AtomicUsize,
2038 root_char_bag: CharBag,
2039 ) -> Self {
2040 Self {
2041 id: ProjectEntryId::new(next_entry_id),
2042 kind: if metadata.is_dir {
2043 EntryKind::PendingDir
2044 } else {
2045 EntryKind::File(char_bag_for_path(root_char_bag, &path))
2046 },
2047 path,
2048 inode: metadata.inode,
2049 mtime: metadata.mtime,
2050 is_symlink: metadata.is_symlink,
2051 is_ignored: false,
2052 }
2053 }
2054
2055 pub fn is_dir(&self) -> bool {
2056 matches!(self.kind, EntryKind::Dir | EntryKind::PendingDir)
2057 }
2058
2059 pub fn is_file(&self) -> bool {
2060 matches!(self.kind, EntryKind::File(_))
2061 }
2062}
2063
2064impl sum_tree::Item for Entry {
2065 type Summary = EntrySummary;
2066
2067 fn summary(&self) -> Self::Summary {
2068 let visible_count = if self.is_ignored { 0 } else { 1 };
2069 let file_count;
2070 let visible_file_count;
2071 if self.is_file() {
2072 file_count = 1;
2073 visible_file_count = visible_count;
2074 } else {
2075 file_count = 0;
2076 visible_file_count = 0;
2077 }
2078
2079 EntrySummary {
2080 max_path: self.path.clone(),
2081 count: 1,
2082 visible_count,
2083 file_count,
2084 visible_file_count,
2085 }
2086 }
2087}
2088
2089impl sum_tree::KeyedItem for Entry {
2090 type Key = PathKey;
2091
2092 fn key(&self) -> Self::Key {
2093 PathKey(self.path.clone())
2094 }
2095}
2096
2097#[derive(Clone, Debug)]
2098pub struct EntrySummary {
2099 max_path: Arc<Path>,
2100 count: usize,
2101 visible_count: usize,
2102 file_count: usize,
2103 visible_file_count: usize,
2104}
2105
2106impl Default for EntrySummary {
2107 fn default() -> Self {
2108 Self {
2109 max_path: Arc::from(Path::new("")),
2110 count: 0,
2111 visible_count: 0,
2112 file_count: 0,
2113 visible_file_count: 0,
2114 }
2115 }
2116}
2117
2118impl sum_tree::Summary for EntrySummary {
2119 type Context = ();
2120
2121 fn add_summary(&mut self, rhs: &Self, _: &()) {
2122 self.max_path = rhs.max_path.clone();
2123 self.count += rhs.count;
2124 self.visible_count += rhs.visible_count;
2125 self.file_count += rhs.file_count;
2126 self.visible_file_count += rhs.visible_file_count;
2127 }
2128}
2129
2130#[derive(Clone, Debug)]
2131struct PathEntry {
2132 id: ProjectEntryId,
2133 path: Arc<Path>,
2134 is_ignored: bool,
2135 scan_id: usize,
2136}
2137
2138impl sum_tree::Item for PathEntry {
2139 type Summary = PathEntrySummary;
2140
2141 fn summary(&self) -> Self::Summary {
2142 PathEntrySummary { max_id: self.id }
2143 }
2144}
2145
2146impl sum_tree::KeyedItem for PathEntry {
2147 type Key = ProjectEntryId;
2148
2149 fn key(&self) -> Self::Key {
2150 self.id
2151 }
2152}
2153
2154#[derive(Clone, Debug, Default)]
2155struct PathEntrySummary {
2156 max_id: ProjectEntryId,
2157}
2158
2159impl sum_tree::Summary for PathEntrySummary {
2160 type Context = ();
2161
2162 fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
2163 self.max_id = summary.max_id;
2164 }
2165}
2166
2167impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
2168 fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) {
2169 *self = summary.max_id;
2170 }
2171}
2172
2173#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
2174pub struct PathKey(Arc<Path>);
2175
2176impl Default for PathKey {
2177 fn default() -> Self {
2178 Self(Path::new("").into())
2179 }
2180}
2181
2182impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
2183 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2184 self.0 = summary.max_path.clone();
2185 }
2186}
2187
2188struct BackgroundScanner {
2189 fs: Arc<dyn Fs>,
2190 snapshot: Arc<Mutex<LocalSnapshot>>,
2191 notify: UnboundedSender<ScanState>,
2192 executor: Arc<executor::Background>,
2193}
2194
2195impl BackgroundScanner {
2196 fn new(
2197 snapshot: Arc<Mutex<LocalSnapshot>>,
2198 notify: UnboundedSender<ScanState>,
2199 fs: Arc<dyn Fs>,
2200 executor: Arc<executor::Background>,
2201 ) -> Self {
2202 Self {
2203 fs,
2204 snapshot,
2205 notify,
2206 executor,
2207 }
2208 }
2209
2210 fn abs_path(&self) -> Arc<Path> {
2211 self.snapshot.lock().abs_path.clone()
2212 }
2213
2214 fn snapshot(&self) -> LocalSnapshot {
2215 self.snapshot.lock().clone()
2216 }
2217
2218 async fn run(mut self, events_rx: impl Stream<Item = Vec<fsevent::Event>>) {
2219 if self.notify.unbounded_send(ScanState::Initializing).is_err() {
2220 return;
2221 }
2222
2223 if let Err(err) = self.scan_dirs().await {
2224 if self
2225 .notify
2226 .unbounded_send(ScanState::Err(Arc::new(err)))
2227 .is_err()
2228 {
2229 return;
2230 }
2231 }
2232
2233 if self.notify.unbounded_send(ScanState::Idle).is_err() {
2234 return;
2235 }
2236
2237 futures::pin_mut!(events_rx);
2238
2239 while let Some(mut events) = events_rx.next().await {
2240 while let Poll::Ready(Some(additional_events)) = futures::poll!(events_rx.next()) {
2241 events.extend(additional_events);
2242 }
2243
2244 if self.notify.unbounded_send(ScanState::Updating).is_err() {
2245 break;
2246 }
2247
2248 if !self.process_events(events).await {
2249 break;
2250 }
2251
2252 if self.notify.unbounded_send(ScanState::Idle).is_err() {
2253 break;
2254 }
2255 }
2256 }
2257
2258 async fn scan_dirs(&mut self) -> Result<()> {
2259 let root_char_bag;
2260 let root_abs_path;
2261 let root_inode;
2262 let is_dir;
2263 let next_entry_id;
2264 {
2265 let mut snapshot = self.snapshot.lock();
2266 snapshot.scan_started();
2267 root_char_bag = snapshot.root_char_bag;
2268 root_abs_path = snapshot.abs_path.clone();
2269 root_inode = snapshot.root_entry().map(|e| e.inode);
2270 is_dir = snapshot.root_entry().map_or(false, |e| e.is_dir());
2271 next_entry_id = snapshot.next_entry_id.clone();
2272 };
2273
2274 // Populate ignores above the root.
2275 for ancestor in root_abs_path.ancestors().skip(1) {
2276 if let Ok(ignore) = build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
2277 {
2278 self.snapshot
2279 .lock()
2280 .ignores_by_parent_abs_path
2281 .insert(ancestor.into(), (ignore.into(), 0));
2282 }
2283 }
2284
2285 let ignore_stack = {
2286 let mut snapshot = self.snapshot.lock();
2287 let ignore_stack = snapshot.ignore_stack_for_abs_path(&root_abs_path, true);
2288 if ignore_stack.is_all() {
2289 if let Some(mut root_entry) = snapshot.root_entry().cloned() {
2290 root_entry.is_ignored = true;
2291 snapshot.insert_entry(root_entry, self.fs.as_ref());
2292 }
2293 }
2294 ignore_stack
2295 };
2296
2297 if is_dir {
2298 let path: Arc<Path> = Arc::from(Path::new(""));
2299 let mut ancestor_inodes = TreeSet::default();
2300 if let Some(root_inode) = root_inode {
2301 ancestor_inodes.insert(root_inode);
2302 }
2303
2304 let (tx, rx) = channel::unbounded();
2305 self.executor
2306 .block(tx.send(ScanJob {
2307 abs_path: root_abs_path.to_path_buf(),
2308 path,
2309 ignore_stack,
2310 ancestor_inodes,
2311 scan_queue: tx.clone(),
2312 }))
2313 .unwrap();
2314 drop(tx);
2315
2316 self.executor
2317 .scoped(|scope| {
2318 for _ in 0..self.executor.num_cpus() {
2319 scope.spawn(async {
2320 while let Ok(job) = rx.recv().await {
2321 if let Err(err) = self
2322 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2323 .await
2324 {
2325 log::error!("error scanning {:?}: {}", job.abs_path, err);
2326 }
2327 }
2328 });
2329 }
2330 })
2331 .await;
2332
2333 self.snapshot.lock().scan_completed();
2334 }
2335
2336 Ok(())
2337 }
2338
2339 async fn scan_dir(
2340 &self,
2341 root_char_bag: CharBag,
2342 next_entry_id: Arc<AtomicUsize>,
2343 job: &ScanJob,
2344 ) -> Result<()> {
2345 let mut new_entries: Vec<Entry> = Vec::new();
2346 let mut new_jobs: Vec<ScanJob> = Vec::new();
2347 let mut ignore_stack = job.ignore_stack.clone();
2348 let mut new_ignore = None;
2349
2350 let mut child_paths = self.fs.read_dir(&job.abs_path).await?;
2351 while let Some(child_abs_path) = child_paths.next().await {
2352 let child_abs_path = match child_abs_path {
2353 Ok(child_abs_path) => child_abs_path,
2354 Err(error) => {
2355 log::error!("error processing entry {:?}", error);
2356 continue;
2357 }
2358 };
2359 let child_name = child_abs_path.file_name().unwrap();
2360 let child_path: Arc<Path> = job.path.join(child_name).into();
2361 let child_metadata = match self.fs.metadata(&child_abs_path).await {
2362 Ok(Some(metadata)) => metadata,
2363 Ok(None) => continue,
2364 Err(err) => {
2365 log::error!("error processing {:?}: {:?}", child_abs_path, err);
2366 continue;
2367 }
2368 };
2369
2370 // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
2371 if child_name == *GITIGNORE {
2372 match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
2373 Ok(ignore) => {
2374 let ignore = Arc::new(ignore);
2375 ignore_stack =
2376 ignore_stack.append(job.abs_path.as_path().into(), ignore.clone());
2377 new_ignore = Some(ignore);
2378 }
2379 Err(error) => {
2380 log::error!(
2381 "error loading .gitignore file {:?} - {:?}",
2382 child_name,
2383 error
2384 );
2385 }
2386 }
2387
2388 // Update ignore status of any child entries we've already processed to reflect the
2389 // ignore file in the current directory. Because `.gitignore` starts with a `.`,
2390 // there should rarely be too numerous. Update the ignore stack associated with any
2391 // new jobs as well.
2392 let mut new_jobs = new_jobs.iter_mut();
2393 for entry in &mut new_entries {
2394 let entry_abs_path = self.abs_path().join(&entry.path);
2395 entry.is_ignored =
2396 ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir());
2397 if entry.is_dir() {
2398 new_jobs.next().unwrap().ignore_stack = if entry.is_ignored {
2399 IgnoreStack::all()
2400 } else {
2401 ignore_stack.clone()
2402 };
2403 }
2404 }
2405 }
2406
2407 let mut child_entry = Entry::new(
2408 child_path.clone(),
2409 &child_metadata,
2410 &next_entry_id,
2411 root_char_bag,
2412 );
2413
2414 if child_entry.is_dir() {
2415 let is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
2416 child_entry.is_ignored = is_ignored;
2417
2418 if !job.ancestor_inodes.contains(&child_entry.inode) {
2419 let mut ancestor_inodes = job.ancestor_inodes.clone();
2420 ancestor_inodes.insert(child_entry.inode);
2421 new_jobs.push(ScanJob {
2422 abs_path: child_abs_path,
2423 path: child_path,
2424 ignore_stack: if is_ignored {
2425 IgnoreStack::all()
2426 } else {
2427 ignore_stack.clone()
2428 },
2429 ancestor_inodes,
2430 scan_queue: job.scan_queue.clone(),
2431 });
2432 }
2433 } else {
2434 child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
2435 }
2436
2437 new_entries.push(child_entry);
2438 }
2439
2440 self.snapshot.lock().populate_dir(
2441 job.path.clone(),
2442 new_entries,
2443 new_ignore,
2444 self.fs.as_ref(),
2445 );
2446 for new_job in new_jobs {
2447 job.scan_queue.send(new_job).await.unwrap();
2448 }
2449
2450 Ok(())
2451 }
2452
2453 async fn process_events(&mut self, mut events: Vec<fsevent::Event>) -> bool {
2454 events.sort_unstable_by(|a, b| a.path.cmp(&b.path));
2455 events.dedup_by(|a, b| a.path.starts_with(&b.path));
2456
2457 let root_char_bag;
2458 let root_abs_path;
2459 let next_entry_id;
2460 {
2461 let mut snapshot = self.snapshot.lock();
2462 snapshot.scan_started();
2463 root_char_bag = snapshot.root_char_bag;
2464 root_abs_path = snapshot.abs_path.clone();
2465 next_entry_id = snapshot.next_entry_id.clone();
2466 }
2467
2468 let root_canonical_path = if let Ok(path) = self.fs.canonicalize(&root_abs_path).await {
2469 path
2470 } else {
2471 return false;
2472 };
2473 let metadata = futures::future::join_all(
2474 events
2475 .iter()
2476 .map(|event| self.fs.metadata(&event.path))
2477 .collect::<Vec<_>>(),
2478 )
2479 .await;
2480
2481 // Hold the snapshot lock while clearing and re-inserting the root entries
2482 // for each event. This way, the snapshot is not observable to the foreground
2483 // thread while this operation is in-progress.
2484 let (scan_queue_tx, scan_queue_rx) = channel::unbounded();
2485 {
2486 let mut snapshot = self.snapshot.lock();
2487 for event in &events {
2488 if let Ok(path) = event.path.strip_prefix(&root_canonical_path) {
2489 snapshot.remove_path(path);
2490 }
2491 }
2492
2493 for (event, metadata) in events.into_iter().zip(metadata.into_iter()) {
2494 let path: Arc<Path> = match event.path.strip_prefix(&root_canonical_path) {
2495 Ok(path) => Arc::from(path.to_path_buf()),
2496 Err(_) => {
2497 log::error!(
2498 "unexpected event {:?} for root path {:?}",
2499 event.path,
2500 root_canonical_path
2501 );
2502 continue;
2503 }
2504 };
2505 let abs_path = root_abs_path.join(&path);
2506
2507 match metadata {
2508 Ok(Some(metadata)) => {
2509 let ignore_stack =
2510 snapshot.ignore_stack_for_abs_path(&abs_path, metadata.is_dir);
2511 let mut fs_entry = Entry::new(
2512 path.clone(),
2513 &metadata,
2514 snapshot.next_entry_id.as_ref(),
2515 snapshot.root_char_bag,
2516 );
2517 fs_entry.is_ignored = ignore_stack.is_all();
2518 snapshot.insert_entry(fs_entry, self.fs.as_ref());
2519
2520 let scan_id = snapshot.scan_id;
2521 if let Some(repo) = snapshot.in_dot_git(&path) {
2522 repo.repo.lock().reload_index();
2523 repo.scan_id = scan_id;
2524 }
2525
2526 let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
2527 if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
2528 ancestor_inodes.insert(metadata.inode);
2529 self.executor
2530 .block(scan_queue_tx.send(ScanJob {
2531 abs_path,
2532 path,
2533 ignore_stack,
2534 ancestor_inodes,
2535 scan_queue: scan_queue_tx.clone(),
2536 }))
2537 .unwrap();
2538 }
2539 }
2540 Ok(None) => {}
2541 Err(err) => {
2542 // TODO - create a special 'error' entry in the entries tree to mark this
2543 log::error!("error reading file on event {:?}", err);
2544 }
2545 }
2546 }
2547 drop(scan_queue_tx);
2548 }
2549
2550 // Scan any directories that were created as part of this event batch.
2551 self.executor
2552 .scoped(|scope| {
2553 for _ in 0..self.executor.num_cpus() {
2554 scope.spawn(async {
2555 while let Ok(job) = scan_queue_rx.recv().await {
2556 if let Err(err) = self
2557 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2558 .await
2559 {
2560 log::error!("error scanning {:?}: {}", job.abs_path, err);
2561 }
2562 }
2563 });
2564 }
2565 })
2566 .await;
2567
2568 // Attempt to detect renames only over a single batch of file-system events.
2569 self.snapshot.lock().removed_entry_ids.clear();
2570
2571 self.update_ignore_statuses().await;
2572 self.update_git_repositories();
2573 self.snapshot.lock().scan_completed();
2574 true
2575 }
2576
2577 async fn update_ignore_statuses(&self) {
2578 let mut snapshot = self.snapshot();
2579
2580 let mut ignores_to_update = Vec::new();
2581 let mut ignores_to_delete = Vec::new();
2582 for (parent_abs_path, (_, scan_id)) in &snapshot.ignores_by_parent_abs_path {
2583 if let Ok(parent_path) = parent_abs_path.strip_prefix(&snapshot.abs_path) {
2584 if *scan_id == snapshot.scan_id && snapshot.entry_for_path(parent_path).is_some() {
2585 ignores_to_update.push(parent_abs_path.clone());
2586 }
2587
2588 let ignore_path = parent_path.join(&*GITIGNORE);
2589 if snapshot.entry_for_path(ignore_path).is_none() {
2590 ignores_to_delete.push(parent_abs_path.clone());
2591 }
2592 }
2593 }
2594
2595 for parent_abs_path in ignores_to_delete {
2596 snapshot.ignores_by_parent_abs_path.remove(&parent_abs_path);
2597 self.snapshot
2598 .lock()
2599 .ignores_by_parent_abs_path
2600 .remove(&parent_abs_path);
2601 }
2602
2603 let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
2604 ignores_to_update.sort_unstable();
2605 let mut ignores_to_update = ignores_to_update.into_iter().peekable();
2606 while let Some(parent_abs_path) = ignores_to_update.next() {
2607 while ignores_to_update
2608 .peek()
2609 .map_or(false, |p| p.starts_with(&parent_abs_path))
2610 {
2611 ignores_to_update.next().unwrap();
2612 }
2613
2614 let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true);
2615 ignore_queue_tx
2616 .send(UpdateIgnoreStatusJob {
2617 abs_path: parent_abs_path,
2618 ignore_stack,
2619 ignore_queue: ignore_queue_tx.clone(),
2620 })
2621 .await
2622 .unwrap();
2623 }
2624 drop(ignore_queue_tx);
2625
2626 self.executor
2627 .scoped(|scope| {
2628 for _ in 0..self.executor.num_cpus() {
2629 scope.spawn(async {
2630 while let Ok(job) = ignore_queue_rx.recv().await {
2631 self.update_ignore_status(job, &snapshot).await;
2632 }
2633 });
2634 }
2635 })
2636 .await;
2637 }
2638
2639 fn update_git_repositories(&self) {
2640 let mut snapshot = self.snapshot.lock();
2641 let mut git_repositories = mem::take(&mut snapshot.git_repositories);
2642 git_repositories.retain(|repo| snapshot.entry_for_path(&repo.git_dir_path).is_some());
2643 snapshot.git_repositories = git_repositories;
2644 }
2645
2646 async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
2647 let mut ignore_stack = job.ignore_stack;
2648 if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) {
2649 ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
2650 }
2651
2652 let mut entries_by_id_edits = Vec::new();
2653 let mut entries_by_path_edits = Vec::new();
2654 let path = job.abs_path.strip_prefix(&snapshot.abs_path).unwrap();
2655 for mut entry in snapshot.child_entries(path).cloned() {
2656 let was_ignored = entry.is_ignored;
2657 let abs_path = self.abs_path().join(&entry.path);
2658 entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
2659 if entry.is_dir() {
2660 let child_ignore_stack = if entry.is_ignored {
2661 IgnoreStack::all()
2662 } else {
2663 ignore_stack.clone()
2664 };
2665 job.ignore_queue
2666 .send(UpdateIgnoreStatusJob {
2667 abs_path: abs_path.into(),
2668 ignore_stack: child_ignore_stack,
2669 ignore_queue: job.ignore_queue.clone(),
2670 })
2671 .await
2672 .unwrap();
2673 }
2674
2675 if entry.is_ignored != was_ignored {
2676 let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
2677 path_entry.scan_id = snapshot.scan_id;
2678 path_entry.is_ignored = entry.is_ignored;
2679 entries_by_id_edits.push(Edit::Insert(path_entry));
2680 entries_by_path_edits.push(Edit::Insert(entry));
2681 }
2682 }
2683
2684 let mut snapshot = self.snapshot.lock();
2685 snapshot.entries_by_path.edit(entries_by_path_edits, &());
2686 snapshot.entries_by_id.edit(entries_by_id_edits, &());
2687 }
2688}
2689
2690fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
2691 let mut result = root_char_bag;
2692 result.extend(
2693 path.to_string_lossy()
2694 .chars()
2695 .map(|c| c.to_ascii_lowercase()),
2696 );
2697 result
2698}
2699
2700struct ScanJob {
2701 abs_path: PathBuf,
2702 path: Arc<Path>,
2703 ignore_stack: Arc<IgnoreStack>,
2704 scan_queue: Sender<ScanJob>,
2705 ancestor_inodes: TreeSet<u64>,
2706}
2707
2708struct UpdateIgnoreStatusJob {
2709 abs_path: Arc<Path>,
2710 ignore_stack: Arc<IgnoreStack>,
2711 ignore_queue: Sender<UpdateIgnoreStatusJob>,
2712}
2713
2714pub trait WorktreeHandle {
2715 #[cfg(any(test, feature = "test-support"))]
2716 fn flush_fs_events<'a>(
2717 &self,
2718 cx: &'a gpui::TestAppContext,
2719 ) -> futures::future::LocalBoxFuture<'a, ()>;
2720}
2721
2722impl WorktreeHandle for ModelHandle<Worktree> {
2723 // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
2724 // occurred before the worktree was constructed. These events can cause the worktree to perfrom
2725 // extra directory scans, and emit extra scan-state notifications.
2726 //
2727 // This function mutates the worktree's directory and waits for those mutations to be picked up,
2728 // to ensure that all redundant FS events have already been processed.
2729 #[cfg(any(test, feature = "test-support"))]
2730 fn flush_fs_events<'a>(
2731 &self,
2732 cx: &'a gpui::TestAppContext,
2733 ) -> futures::future::LocalBoxFuture<'a, ()> {
2734 use smol::future::FutureExt;
2735
2736 let filename = "fs-event-sentinel";
2737 let tree = self.clone();
2738 let (fs, root_path) = self.read_with(cx, |tree, _| {
2739 let tree = tree.as_local().unwrap();
2740 (tree.fs.clone(), tree.abs_path().clone())
2741 });
2742
2743 async move {
2744 fs.create_file(&root_path.join(filename), Default::default())
2745 .await
2746 .unwrap();
2747 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
2748 .await;
2749
2750 fs.remove_file(&root_path.join(filename), Default::default())
2751 .await
2752 .unwrap();
2753 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
2754 .await;
2755
2756 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2757 .await;
2758 }
2759 .boxed_local()
2760 }
2761}
2762
2763#[derive(Clone, Debug)]
2764struct TraversalProgress<'a> {
2765 max_path: &'a Path,
2766 count: usize,
2767 visible_count: usize,
2768 file_count: usize,
2769 visible_file_count: usize,
2770}
2771
2772impl<'a> TraversalProgress<'a> {
2773 fn count(&self, include_dirs: bool, include_ignored: bool) -> usize {
2774 match (include_ignored, include_dirs) {
2775 (true, true) => self.count,
2776 (true, false) => self.file_count,
2777 (false, true) => self.visible_count,
2778 (false, false) => self.visible_file_count,
2779 }
2780 }
2781}
2782
2783impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
2784 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2785 self.max_path = summary.max_path.as_ref();
2786 self.count += summary.count;
2787 self.visible_count += summary.visible_count;
2788 self.file_count += summary.file_count;
2789 self.visible_file_count += summary.visible_file_count;
2790 }
2791}
2792
2793impl<'a> Default for TraversalProgress<'a> {
2794 fn default() -> Self {
2795 Self {
2796 max_path: Path::new(""),
2797 count: 0,
2798 visible_count: 0,
2799 file_count: 0,
2800 visible_file_count: 0,
2801 }
2802 }
2803}
2804
2805pub struct Traversal<'a> {
2806 cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
2807 include_ignored: bool,
2808 include_dirs: bool,
2809}
2810
2811impl<'a> Traversal<'a> {
2812 pub fn advance(&mut self) -> bool {
2813 self.advance_to_offset(self.offset() + 1)
2814 }
2815
2816 pub fn advance_to_offset(&mut self, offset: usize) -> bool {
2817 self.cursor.seek_forward(
2818 &TraversalTarget::Count {
2819 count: offset,
2820 include_dirs: self.include_dirs,
2821 include_ignored: self.include_ignored,
2822 },
2823 Bias::Right,
2824 &(),
2825 )
2826 }
2827
2828 pub fn advance_to_sibling(&mut self) -> bool {
2829 while let Some(entry) = self.cursor.item() {
2830 self.cursor.seek_forward(
2831 &TraversalTarget::PathSuccessor(&entry.path),
2832 Bias::Left,
2833 &(),
2834 );
2835 if let Some(entry) = self.cursor.item() {
2836 if (self.include_dirs || !entry.is_dir())
2837 && (self.include_ignored || !entry.is_ignored)
2838 {
2839 return true;
2840 }
2841 }
2842 }
2843 false
2844 }
2845
2846 pub fn entry(&self) -> Option<&'a Entry> {
2847 self.cursor.item()
2848 }
2849
2850 pub fn offset(&self) -> usize {
2851 self.cursor
2852 .start()
2853 .count(self.include_dirs, self.include_ignored)
2854 }
2855}
2856
2857impl<'a> Iterator for Traversal<'a> {
2858 type Item = &'a Entry;
2859
2860 fn next(&mut self) -> Option<Self::Item> {
2861 if let Some(item) = self.entry() {
2862 self.advance();
2863 Some(item)
2864 } else {
2865 None
2866 }
2867 }
2868}
2869
2870#[derive(Debug)]
2871enum TraversalTarget<'a> {
2872 Path(&'a Path),
2873 PathSuccessor(&'a Path),
2874 Count {
2875 count: usize,
2876 include_ignored: bool,
2877 include_dirs: bool,
2878 },
2879}
2880
2881impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> {
2882 fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
2883 match self {
2884 TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path),
2885 TraversalTarget::PathSuccessor(path) => {
2886 if !cursor_location.max_path.starts_with(path) {
2887 Ordering::Equal
2888 } else {
2889 Ordering::Greater
2890 }
2891 }
2892 TraversalTarget::Count {
2893 count,
2894 include_dirs,
2895 include_ignored,
2896 } => Ord::cmp(
2897 count,
2898 &cursor_location.count(*include_dirs, *include_ignored),
2899 ),
2900 }
2901 }
2902}
2903
2904struct ChildEntriesIter<'a> {
2905 parent_path: &'a Path,
2906 traversal: Traversal<'a>,
2907}
2908
2909impl<'a> Iterator for ChildEntriesIter<'a> {
2910 type Item = &'a Entry;
2911
2912 fn next(&mut self) -> Option<Self::Item> {
2913 if let Some(item) = self.traversal.entry() {
2914 if item.path.starts_with(&self.parent_path) {
2915 self.traversal.advance_to_sibling();
2916 return Some(item);
2917 }
2918 }
2919 None
2920 }
2921}
2922
2923impl<'a> From<&'a Entry> for proto::Entry {
2924 fn from(entry: &'a Entry) -> Self {
2925 Self {
2926 id: entry.id.to_proto(),
2927 is_dir: entry.is_dir(),
2928 path: entry.path.to_string_lossy().into(),
2929 inode: entry.inode,
2930 mtime: Some(entry.mtime.into()),
2931 is_symlink: entry.is_symlink,
2932 is_ignored: entry.is_ignored,
2933 }
2934 }
2935}
2936
2937impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
2938 type Error = anyhow::Error;
2939
2940 fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result<Self> {
2941 if let Some(mtime) = entry.mtime {
2942 let kind = if entry.is_dir {
2943 EntryKind::Dir
2944 } else {
2945 let mut char_bag = *root_char_bag;
2946 char_bag.extend(entry.path.chars().map(|c| c.to_ascii_lowercase()));
2947 EntryKind::File(char_bag)
2948 };
2949 let path: Arc<Path> = PathBuf::from(entry.path).into();
2950 Ok(Entry {
2951 id: ProjectEntryId::from_proto(entry.id),
2952 kind,
2953 path,
2954 inode: entry.inode,
2955 mtime: mtime.into(),
2956 is_symlink: entry.is_symlink,
2957 is_ignored: entry.is_ignored,
2958 })
2959 } else {
2960 Err(anyhow!(
2961 "missing mtime in remote worktree entry {:?}",
2962 entry.path
2963 ))
2964 }
2965 }
2966}
2967
2968#[cfg(test)]
2969mod tests {
2970 use super::*;
2971 use anyhow::Result;
2972 use client::test::FakeHttpClient;
2973 use fs::repository::FakeGitRepository;
2974 use fs::{FakeFs, RealFs};
2975 use gpui::{executor::Deterministic, TestAppContext};
2976 use rand::prelude::*;
2977 use serde_json::json;
2978 use std::{
2979 env,
2980 fmt::Write,
2981 time::{SystemTime, UNIX_EPOCH},
2982 };
2983
2984 use util::test::temp_tree;
2985
2986 #[gpui::test]
2987 async fn test_traversal(cx: &mut TestAppContext) {
2988 let fs = FakeFs::new(cx.background());
2989 fs.insert_tree(
2990 "/root",
2991 json!({
2992 ".gitignore": "a/b\n",
2993 "a": {
2994 "b": "",
2995 "c": "",
2996 }
2997 }),
2998 )
2999 .await;
3000
3001 let http_client = FakeHttpClient::with_404_response();
3002 let client = cx.read(|cx| Client::new(http_client, cx));
3003
3004 let tree = Worktree::local(
3005 client,
3006 Arc::from(Path::new("/root")),
3007 true,
3008 fs,
3009 Default::default(),
3010 &mut cx.to_async(),
3011 )
3012 .await
3013 .unwrap();
3014 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3015 .await;
3016
3017 tree.read_with(cx, |tree, _| {
3018 assert_eq!(
3019 tree.entries(false)
3020 .map(|entry| entry.path.as_ref())
3021 .collect::<Vec<_>>(),
3022 vec![
3023 Path::new(""),
3024 Path::new(".gitignore"),
3025 Path::new("a"),
3026 Path::new("a/c"),
3027 ]
3028 );
3029 assert_eq!(
3030 tree.entries(true)
3031 .map(|entry| entry.path.as_ref())
3032 .collect::<Vec<_>>(),
3033 vec![
3034 Path::new(""),
3035 Path::new(".gitignore"),
3036 Path::new("a"),
3037 Path::new("a/b"),
3038 Path::new("a/c"),
3039 ]
3040 );
3041 })
3042 }
3043
3044 #[gpui::test(iterations = 10)]
3045 async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
3046 let fs = FakeFs::new(cx.background());
3047 fs.insert_tree(
3048 "/root",
3049 json!({
3050 "lib": {
3051 "a": {
3052 "a.txt": ""
3053 },
3054 "b": {
3055 "b.txt": ""
3056 }
3057 }
3058 }),
3059 )
3060 .await;
3061 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
3062 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
3063
3064 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3065 let tree = Worktree::local(
3066 client,
3067 Arc::from(Path::new("/root")),
3068 true,
3069 fs.clone(),
3070 Default::default(),
3071 &mut cx.to_async(),
3072 )
3073 .await
3074 .unwrap();
3075
3076 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3077 .await;
3078
3079 tree.read_with(cx, |tree, _| {
3080 assert_eq!(
3081 tree.entries(false)
3082 .map(|entry| entry.path.as_ref())
3083 .collect::<Vec<_>>(),
3084 vec![
3085 Path::new(""),
3086 Path::new("lib"),
3087 Path::new("lib/a"),
3088 Path::new("lib/a/a.txt"),
3089 Path::new("lib/a/lib"),
3090 Path::new("lib/b"),
3091 Path::new("lib/b/b.txt"),
3092 Path::new("lib/b/lib"),
3093 ]
3094 );
3095 });
3096
3097 fs.rename(
3098 Path::new("/root/lib/a/lib"),
3099 Path::new("/root/lib/a/lib-2"),
3100 Default::default(),
3101 )
3102 .await
3103 .unwrap();
3104 executor.run_until_parked();
3105 tree.read_with(cx, |tree, _| {
3106 assert_eq!(
3107 tree.entries(false)
3108 .map(|entry| entry.path.as_ref())
3109 .collect::<Vec<_>>(),
3110 vec![
3111 Path::new(""),
3112 Path::new("lib"),
3113 Path::new("lib/a"),
3114 Path::new("lib/a/a.txt"),
3115 Path::new("lib/a/lib-2"),
3116 Path::new("lib/b"),
3117 Path::new("lib/b/b.txt"),
3118 Path::new("lib/b/lib"),
3119 ]
3120 );
3121 });
3122 }
3123
3124 #[gpui::test]
3125 async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
3126 let parent_dir = temp_tree(json!({
3127 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
3128 "tree": {
3129 ".git": {},
3130 ".gitignore": "ignored-dir\n",
3131 "tracked-dir": {
3132 "tracked-file1": "",
3133 "ancestor-ignored-file1": "",
3134 },
3135 "ignored-dir": {
3136 "ignored-file1": ""
3137 }
3138 }
3139 }));
3140 let dir = parent_dir.path().join("tree");
3141
3142 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3143
3144 let tree = Worktree::local(
3145 client,
3146 dir.as_path(),
3147 true,
3148 Arc::new(RealFs),
3149 Default::default(),
3150 &mut cx.to_async(),
3151 )
3152 .await
3153 .unwrap();
3154 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3155 .await;
3156 tree.flush_fs_events(cx).await;
3157 cx.read(|cx| {
3158 let tree = tree.read(cx);
3159 assert!(
3160 !tree
3161 .entry_for_path("tracked-dir/tracked-file1")
3162 .unwrap()
3163 .is_ignored
3164 );
3165 assert!(
3166 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
3167 .unwrap()
3168 .is_ignored
3169 );
3170 assert!(
3171 tree.entry_for_path("ignored-dir/ignored-file1")
3172 .unwrap()
3173 .is_ignored
3174 );
3175 });
3176
3177 std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
3178 std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
3179 std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
3180 tree.flush_fs_events(cx).await;
3181 cx.read(|cx| {
3182 let tree = tree.read(cx);
3183 assert!(
3184 !tree
3185 .entry_for_path("tracked-dir/tracked-file2")
3186 .unwrap()
3187 .is_ignored
3188 );
3189 assert!(
3190 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
3191 .unwrap()
3192 .is_ignored
3193 );
3194 assert!(
3195 tree.entry_for_path("ignored-dir/ignored-file2")
3196 .unwrap()
3197 .is_ignored
3198 );
3199 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
3200 });
3201 }
3202
3203 #[gpui::test]
3204 async fn test_git_repository_for_path(cx: &mut TestAppContext) {
3205 let root = temp_tree(json!({
3206 "dir1": {
3207 ".git": {},
3208 "deps": {
3209 "dep1": {
3210 ".git": {},
3211 "src": {
3212 "a.txt": ""
3213 }
3214 }
3215 },
3216 "src": {
3217 "b.txt": ""
3218 }
3219 },
3220 "c.txt": "",
3221 }));
3222
3223 let http_client = FakeHttpClient::with_404_response();
3224 let client = cx.read(|cx| Client::new(http_client, cx));
3225 let tree = Worktree::local(
3226 client,
3227 root.path(),
3228 true,
3229 Arc::new(RealFs),
3230 Default::default(),
3231 &mut cx.to_async(),
3232 )
3233 .await
3234 .unwrap();
3235
3236 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3237 .await;
3238 tree.flush_fs_events(cx).await;
3239
3240 tree.read_with(cx, |tree, _cx| {
3241 let tree = tree.as_local().unwrap();
3242
3243 assert!(tree.repo_for("c.txt".as_ref()).is_none());
3244
3245 let repo = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap();
3246 assert_eq!(repo.content_path.as_ref(), Path::new("dir1"));
3247 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git"));
3248
3249 let repo = tree.repo_for("dir1/deps/dep1/src/a.txt".as_ref()).unwrap();
3250 assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1"));
3251 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/deps/dep1/.git"),);
3252 });
3253
3254 let original_scan_id = tree.read_with(cx, |tree, _cx| {
3255 let tree = tree.as_local().unwrap();
3256 tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id
3257 });
3258
3259 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
3260 tree.flush_fs_events(cx).await;
3261
3262 tree.read_with(cx, |tree, _cx| {
3263 let tree = tree.as_local().unwrap();
3264 let new_scan_id = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id;
3265 assert_ne!(
3266 original_scan_id, new_scan_id,
3267 "original {original_scan_id}, new {new_scan_id}"
3268 );
3269 });
3270
3271 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
3272 tree.flush_fs_events(cx).await;
3273
3274 tree.read_with(cx, |tree, _cx| {
3275 let tree = tree.as_local().unwrap();
3276
3277 assert!(tree.repo_for("dir1/src/b.txt".as_ref()).is_none());
3278 });
3279 }
3280
3281 #[test]
3282 fn test_changed_repos() {
3283 fn fake_entry(git_dir_path: impl AsRef<Path>, scan_id: usize) -> GitRepositoryEntry {
3284 GitRepositoryEntry {
3285 repo: Arc::new(Mutex::new(FakeGitRepository::default())),
3286 scan_id,
3287 content_path: git_dir_path.as_ref().parent().unwrap().into(),
3288 git_dir_path: git_dir_path.as_ref().into(),
3289 }
3290 }
3291
3292 let prev_repos: Vec<GitRepositoryEntry> = vec![
3293 fake_entry("/.git", 0),
3294 fake_entry("/a/.git", 0),
3295 fake_entry("/a/b/.git", 0),
3296 ];
3297
3298 let new_repos: Vec<GitRepositoryEntry> = vec![
3299 fake_entry("/a/.git", 1),
3300 fake_entry("/a/b/.git", 0),
3301 fake_entry("/a/c/.git", 0),
3302 ];
3303
3304 let res = LocalWorktree::changed_repos(&prev_repos, &new_repos);
3305
3306 // Deletion retained
3307 assert!(res
3308 .iter()
3309 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/.git") && repo.scan_id == 0)
3310 .is_some());
3311
3312 // Update retained
3313 assert!(res
3314 .iter()
3315 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/.git") && repo.scan_id == 1)
3316 .is_some());
3317
3318 // Addition retained
3319 assert!(res
3320 .iter()
3321 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/c/.git") && repo.scan_id == 0)
3322 .is_some());
3323
3324 // Nochange, not retained
3325 assert!(res
3326 .iter()
3327 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/b/.git") && repo.scan_id == 0)
3328 .is_none());
3329 }
3330
3331 #[gpui::test]
3332 async fn test_write_file(cx: &mut TestAppContext) {
3333 let dir = temp_tree(json!({
3334 ".git": {},
3335 ".gitignore": "ignored-dir\n",
3336 "tracked-dir": {},
3337 "ignored-dir": {}
3338 }));
3339
3340 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3341
3342 let tree = Worktree::local(
3343 client,
3344 dir.path(),
3345 true,
3346 Arc::new(RealFs),
3347 Default::default(),
3348 &mut cx.to_async(),
3349 )
3350 .await
3351 .unwrap();
3352 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3353 .await;
3354 tree.flush_fs_events(cx).await;
3355
3356 tree.update(cx, |tree, cx| {
3357 tree.as_local().unwrap().write_file(
3358 Path::new("tracked-dir/file.txt"),
3359 "hello".into(),
3360 Default::default(),
3361 cx,
3362 )
3363 })
3364 .await
3365 .unwrap();
3366 tree.update(cx, |tree, cx| {
3367 tree.as_local().unwrap().write_file(
3368 Path::new("ignored-dir/file.txt"),
3369 "world".into(),
3370 Default::default(),
3371 cx,
3372 )
3373 })
3374 .await
3375 .unwrap();
3376
3377 tree.read_with(cx, |tree, _| {
3378 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
3379 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
3380 assert!(!tracked.is_ignored);
3381 assert!(ignored.is_ignored);
3382 });
3383 }
3384
3385 #[gpui::test(iterations = 30)]
3386 async fn test_create_directory(cx: &mut TestAppContext) {
3387 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3388
3389 let fs = FakeFs::new(cx.background());
3390 fs.insert_tree(
3391 "/a",
3392 json!({
3393 "b": {},
3394 "c": {},
3395 "d": {},
3396 }),
3397 )
3398 .await;
3399
3400 let tree = Worktree::local(
3401 client,
3402 "/a".as_ref(),
3403 true,
3404 fs,
3405 Default::default(),
3406 &mut cx.to_async(),
3407 )
3408 .await
3409 .unwrap();
3410
3411 let entry = tree
3412 .update(cx, |tree, cx| {
3413 tree.as_local_mut()
3414 .unwrap()
3415 .create_entry("a/e".as_ref(), true, cx)
3416 })
3417 .await
3418 .unwrap();
3419 assert!(entry.is_dir());
3420
3421 cx.foreground().run_until_parked();
3422 tree.read_with(cx, |tree, _| {
3423 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
3424 });
3425 }
3426
3427 #[gpui::test(iterations = 100)]
3428 fn test_random(mut rng: StdRng) {
3429 let operations = env::var("OPERATIONS")
3430 .map(|o| o.parse().unwrap())
3431 .unwrap_or(40);
3432 let initial_entries = env::var("INITIAL_ENTRIES")
3433 .map(|o| o.parse().unwrap())
3434 .unwrap_or(20);
3435
3436 let root_dir = tempdir::TempDir::new("worktree-test").unwrap();
3437 for _ in 0..initial_entries {
3438 randomly_mutate_tree(root_dir.path(), 1.0, &mut rng).unwrap();
3439 }
3440 log::info!("Generated initial tree");
3441
3442 let (notify_tx, _notify_rx) = mpsc::unbounded();
3443 let fs = Arc::new(RealFs);
3444 let next_entry_id = Arc::new(AtomicUsize::new(0));
3445 let mut initial_snapshot = LocalSnapshot {
3446 removed_entry_ids: Default::default(),
3447 ignores_by_parent_abs_path: Default::default(),
3448 git_repositories: Default::default(),
3449 next_entry_id: next_entry_id.clone(),
3450 snapshot: Snapshot {
3451 id: WorktreeId::from_usize(0),
3452 entries_by_path: Default::default(),
3453 entries_by_id: Default::default(),
3454 abs_path: root_dir.path().into(),
3455 root_name: Default::default(),
3456 root_char_bag: Default::default(),
3457 scan_id: 0,
3458 completed_scan_id: 0,
3459 },
3460 };
3461 initial_snapshot.insert_entry(
3462 Entry::new(
3463 Path::new("").into(),
3464 &smol::block_on(fs.metadata(root_dir.path()))
3465 .unwrap()
3466 .unwrap(),
3467 &next_entry_id,
3468 Default::default(),
3469 ),
3470 fs.as_ref(),
3471 );
3472 let mut scanner = BackgroundScanner::new(
3473 Arc::new(Mutex::new(initial_snapshot.clone())),
3474 notify_tx,
3475 fs.clone(),
3476 Arc::new(gpui::executor::Background::new()),
3477 );
3478 smol::block_on(scanner.scan_dirs()).unwrap();
3479 scanner.snapshot().check_invariants();
3480
3481 let mut events = Vec::new();
3482 let mut snapshots = Vec::new();
3483 let mut mutations_len = operations;
3484 while mutations_len > 1 {
3485 if !events.is_empty() && rng.gen_bool(0.4) {
3486 let len = rng.gen_range(0..=events.len());
3487 let to_deliver = events.drain(0..len).collect::<Vec<_>>();
3488 log::info!("Delivering events: {:#?}", to_deliver);
3489 smol::block_on(scanner.process_events(to_deliver));
3490 scanner.snapshot().check_invariants();
3491 } else {
3492 events.extend(randomly_mutate_tree(root_dir.path(), 0.6, &mut rng).unwrap());
3493 mutations_len -= 1;
3494 }
3495
3496 if rng.gen_bool(0.2) {
3497 snapshots.push(scanner.snapshot());
3498 }
3499 }
3500 log::info!("Quiescing: {:#?}", events);
3501 smol::block_on(scanner.process_events(events));
3502 scanner.snapshot().check_invariants();
3503
3504 let (notify_tx, _notify_rx) = mpsc::unbounded();
3505 let mut new_scanner = BackgroundScanner::new(
3506 Arc::new(Mutex::new(initial_snapshot)),
3507 notify_tx,
3508 scanner.fs.clone(),
3509 scanner.executor.clone(),
3510 );
3511 smol::block_on(new_scanner.scan_dirs()).unwrap();
3512 assert_eq!(
3513 scanner.snapshot().to_vec(true),
3514 new_scanner.snapshot().to_vec(true)
3515 );
3516
3517 for mut prev_snapshot in snapshots {
3518 let include_ignored = rng.gen::<bool>();
3519 if !include_ignored {
3520 let mut entries_by_path_edits = Vec::new();
3521 let mut entries_by_id_edits = Vec::new();
3522 for entry in prev_snapshot
3523 .entries_by_id
3524 .cursor::<()>()
3525 .filter(|e| e.is_ignored)
3526 {
3527 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
3528 entries_by_id_edits.push(Edit::Remove(entry.id));
3529 }
3530
3531 prev_snapshot
3532 .entries_by_path
3533 .edit(entries_by_path_edits, &());
3534 prev_snapshot.entries_by_id.edit(entries_by_id_edits, &());
3535 }
3536
3537 let update = scanner
3538 .snapshot()
3539 .build_update(&prev_snapshot, 0, 0, include_ignored);
3540 prev_snapshot.apply_remote_update(update).unwrap();
3541 assert_eq!(
3542 prev_snapshot.to_vec(true),
3543 scanner.snapshot().to_vec(include_ignored)
3544 );
3545 }
3546 }
3547
3548 fn randomly_mutate_tree(
3549 root_path: &Path,
3550 insertion_probability: f64,
3551 rng: &mut impl Rng,
3552 ) -> Result<Vec<fsevent::Event>> {
3553 let root_path = root_path.canonicalize().unwrap();
3554 let (dirs, files) = read_dir_recursive(root_path.clone());
3555
3556 let mut events = Vec::new();
3557 let mut record_event = |path: PathBuf| {
3558 events.push(fsevent::Event {
3559 event_id: SystemTime::now()
3560 .duration_since(UNIX_EPOCH)
3561 .unwrap()
3562 .as_secs(),
3563 flags: fsevent::StreamFlags::empty(),
3564 path,
3565 });
3566 };
3567
3568 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
3569 let path = dirs.choose(rng).unwrap();
3570 let new_path = path.join(gen_name(rng));
3571
3572 if rng.gen() {
3573 log::info!("Creating dir {:?}", new_path.strip_prefix(root_path)?);
3574 std::fs::create_dir(&new_path)?;
3575 } else {
3576 log::info!("Creating file {:?}", new_path.strip_prefix(root_path)?);
3577 std::fs::write(&new_path, "")?;
3578 }
3579 record_event(new_path);
3580 } else if rng.gen_bool(0.05) {
3581 let ignore_dir_path = dirs.choose(rng).unwrap();
3582 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
3583
3584 let (subdirs, subfiles) = read_dir_recursive(ignore_dir_path.clone());
3585 let files_to_ignore = {
3586 let len = rng.gen_range(0..=subfiles.len());
3587 subfiles.choose_multiple(rng, len)
3588 };
3589 let dirs_to_ignore = {
3590 let len = rng.gen_range(0..subdirs.len());
3591 subdirs.choose_multiple(rng, len)
3592 };
3593
3594 let mut ignore_contents = String::new();
3595 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
3596 writeln!(
3597 ignore_contents,
3598 "{}",
3599 path_to_ignore
3600 .strip_prefix(&ignore_dir_path)?
3601 .to_str()
3602 .unwrap()
3603 )
3604 .unwrap();
3605 }
3606 log::info!(
3607 "Creating {:?} with contents:\n{}",
3608 ignore_path.strip_prefix(&root_path)?,
3609 ignore_contents
3610 );
3611 std::fs::write(&ignore_path, ignore_contents).unwrap();
3612 record_event(ignore_path);
3613 } else {
3614 let old_path = {
3615 let file_path = files.choose(rng);
3616 let dir_path = dirs[1..].choose(rng);
3617 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
3618 };
3619
3620 let is_rename = rng.gen();
3621 if is_rename {
3622 let new_path_parent = dirs
3623 .iter()
3624 .filter(|d| !d.starts_with(old_path))
3625 .choose(rng)
3626 .unwrap();
3627
3628 let overwrite_existing_dir =
3629 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
3630 let new_path = if overwrite_existing_dir {
3631 std::fs::remove_dir_all(&new_path_parent).ok();
3632 new_path_parent.to_path_buf()
3633 } else {
3634 new_path_parent.join(gen_name(rng))
3635 };
3636
3637 log::info!(
3638 "Renaming {:?} to {}{:?}",
3639 old_path.strip_prefix(&root_path)?,
3640 if overwrite_existing_dir {
3641 "overwrite "
3642 } else {
3643 ""
3644 },
3645 new_path.strip_prefix(&root_path)?
3646 );
3647 std::fs::rename(&old_path, &new_path)?;
3648 record_event(old_path.clone());
3649 record_event(new_path);
3650 } else if old_path.is_dir() {
3651 let (dirs, files) = read_dir_recursive(old_path.clone());
3652
3653 log::info!("Deleting dir {:?}", old_path.strip_prefix(&root_path)?);
3654 std::fs::remove_dir_all(&old_path).unwrap();
3655 for file in files {
3656 record_event(file);
3657 }
3658 for dir in dirs {
3659 record_event(dir);
3660 }
3661 } else {
3662 log::info!("Deleting file {:?}", old_path.strip_prefix(&root_path)?);
3663 std::fs::remove_file(old_path).unwrap();
3664 record_event(old_path.clone());
3665 }
3666 }
3667
3668 Ok(events)
3669 }
3670
3671 fn read_dir_recursive(path: PathBuf) -> (Vec<PathBuf>, Vec<PathBuf>) {
3672 let child_entries = std::fs::read_dir(&path).unwrap();
3673 let mut dirs = vec![path];
3674 let mut files = Vec::new();
3675 for child_entry in child_entries {
3676 let child_path = child_entry.unwrap().path();
3677 if child_path.is_dir() {
3678 let (child_dirs, child_files) = read_dir_recursive(child_path);
3679 dirs.extend(child_dirs);
3680 files.extend(child_files);
3681 } else {
3682 files.push(child_path);
3683 }
3684 }
3685 (dirs, files)
3686 }
3687
3688 fn gen_name(rng: &mut impl Rng) -> String {
3689 (0..6)
3690 .map(|_| rng.sample(rand::distributions::Alphanumeric))
3691 .map(char::from)
3692 .collect()
3693 }
3694
3695 impl LocalSnapshot {
3696 fn check_invariants(&self) {
3697 let mut files = self.files(true, 0);
3698 let mut visible_files = self.files(false, 0);
3699 for entry in self.entries_by_path.cursor::<()>() {
3700 if entry.is_file() {
3701 assert_eq!(files.next().unwrap().inode, entry.inode);
3702 if !entry.is_ignored {
3703 assert_eq!(visible_files.next().unwrap().inode, entry.inode);
3704 }
3705 }
3706 }
3707 assert!(files.next().is_none());
3708 assert!(visible_files.next().is_none());
3709
3710 let mut bfs_paths = Vec::new();
3711 let mut stack = vec![Path::new("")];
3712 while let Some(path) = stack.pop() {
3713 bfs_paths.push(path);
3714 let ix = stack.len();
3715 for child_entry in self.child_entries(path) {
3716 stack.insert(ix, &child_entry.path);
3717 }
3718 }
3719
3720 let dfs_paths_via_iter = self
3721 .entries_by_path
3722 .cursor::<()>()
3723 .map(|e| e.path.as_ref())
3724 .collect::<Vec<_>>();
3725 assert_eq!(bfs_paths, dfs_paths_via_iter);
3726
3727 let dfs_paths_via_traversal = self
3728 .entries(true)
3729 .map(|e| e.path.as_ref())
3730 .collect::<Vec<_>>();
3731 assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
3732
3733 for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
3734 let ignore_parent_path =
3735 ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
3736 assert!(self.entry_for_path(&ignore_parent_path).is_some());
3737 assert!(self
3738 .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
3739 .is_some());
3740 }
3741 }
3742
3743 fn to_vec(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
3744 let mut paths = Vec::new();
3745 for entry in self.entries_by_path.cursor::<()>() {
3746 if include_ignored || !entry.is_ignored {
3747 paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
3748 }
3749 }
3750 paths.sort_by(|a, b| a.0.cmp(b.0));
3751 paths
3752 }
3753 }
3754}