1use super::{ignore::IgnoreStack, DiagnosticSummary};
2use crate::{copy_recursive, ProjectEntryId, RemoveOptions};
3use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
4use anyhow::{anyhow, Context, Result};
5use client::{proto, Client};
6use clock::ReplicaId;
7use collections::{HashMap, VecDeque};
8use fs::LineEnding;
9use fs::{repository::GitRepository, Fs};
10use futures::{
11 channel::{
12 mpsc::{self, UnboundedReceiver, UnboundedSender},
13 oneshot,
14 },
15 select_biased, Stream, StreamExt,
16};
17use fuzzy::CharBag;
18use git::{DOT_GIT, GITIGNORE};
19use gpui::{
20 executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
21 Task,
22};
23use language::File as _;
24use language::{
25 proto::{
26 deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
27 serialize_version,
28 },
29 Buffer, DiagnosticEntry, PointUtf16, Rope, RopeFingerprint, Unclipped,
30};
31use parking_lot::Mutex;
32use postage::barrier;
33use postage::{
34 prelude::{Sink as _, Stream as _},
35 watch,
36};
37use smol::channel::{self, Sender};
38use std::{
39 any::Any,
40 cmp::{self, Ordering},
41 convert::TryFrom,
42 ffi::OsStr,
43 fmt,
44 future::Future,
45 mem,
46 ops::{Deref, DerefMut},
47 path::{Path, PathBuf},
48 sync::{atomic::AtomicUsize, Arc},
49 task::Poll,
50 time::{Duration, SystemTime},
51};
52use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
53use util::paths::HOME;
54use util::{ResultExt, TryFutureExt};
55
56#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
57pub struct WorktreeId(usize);
58
59pub enum Worktree {
60 Local(LocalWorktree),
61 Remote(RemoteWorktree),
62}
63
64pub struct LocalWorktree {
65 snapshot: LocalSnapshot,
66 path_changes_tx: mpsc::UnboundedSender<(Vec<PathBuf>, barrier::Sender)>,
67 is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
68 _background_scanner_task: Task<()>,
69 share: Option<ShareState>,
70 diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<Unclipped<PointUtf16>>>>,
71 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
72 client: Arc<Client>,
73 fs: Arc<dyn Fs>,
74 visible: bool,
75}
76
77pub struct RemoteWorktree {
78 snapshot: Snapshot,
79 background_snapshot: Arc<Mutex<Snapshot>>,
80 project_id: u64,
81 client: Arc<Client>,
82 updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
83 snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
84 replica_id: ReplicaId,
85 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
86 visible: bool,
87 disconnected: bool,
88}
89
90#[derive(Clone)]
91pub struct Snapshot {
92 id: WorktreeId,
93 abs_path: Arc<Path>,
94 root_name: String,
95 root_char_bag: CharBag,
96 entries_by_path: SumTree<Entry>,
97 entries_by_id: SumTree<PathEntry>,
98 scan_id: usize,
99 completed_scan_id: usize,
100}
101
102#[derive(Clone)]
103pub struct GitRepositoryEntry {
104 pub(crate) repo: Arc<Mutex<dyn GitRepository>>,
105
106 pub(crate) scan_id: usize,
107 // Path to folder containing the .git file or directory
108 pub(crate) content_path: Arc<Path>,
109 // Path to the actual .git folder.
110 // Note: if .git is a file, this points to the folder indicated by the .git file
111 pub(crate) git_dir_path: Arc<Path>,
112}
113
114impl std::fmt::Debug for GitRepositoryEntry {
115 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
116 f.debug_struct("GitRepositoryEntry")
117 .field("content_path", &self.content_path)
118 .field("git_dir_path", &self.git_dir_path)
119 .finish()
120 }
121}
122
123#[derive(Debug)]
124pub struct LocalSnapshot {
125 ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
126 git_repositories: Vec<GitRepositoryEntry>,
127 removed_entry_ids: HashMap<u64, ProjectEntryId>,
128 next_entry_id: Arc<AtomicUsize>,
129 snapshot: Snapshot,
130}
131
132impl Clone for LocalSnapshot {
133 fn clone(&self) -> Self {
134 Self {
135 ignores_by_parent_abs_path: self.ignores_by_parent_abs_path.clone(),
136 git_repositories: self.git_repositories.iter().cloned().collect(),
137 removed_entry_ids: self.removed_entry_ids.clone(),
138 next_entry_id: self.next_entry_id.clone(),
139 snapshot: self.snapshot.clone(),
140 }
141 }
142}
143
144impl Deref for LocalSnapshot {
145 type Target = Snapshot;
146
147 fn deref(&self) -> &Self::Target {
148 &self.snapshot
149 }
150}
151
152impl DerefMut for LocalSnapshot {
153 fn deref_mut(&mut self) -> &mut Self::Target {
154 &mut self.snapshot
155 }
156}
157
158enum ScanState {
159 /// The worktree is performing its initial scan of the filesystem.
160 Initializing {
161 snapshot: LocalSnapshot,
162 barrier: Option<barrier::Sender>,
163 },
164 Initialized {
165 snapshot: LocalSnapshot,
166 },
167 /// The worktree is updating in response to filesystem events.
168 Updating,
169 Updated {
170 snapshot: LocalSnapshot,
171 changes: HashMap<Arc<Path>, PathChange>,
172 barrier: Option<barrier::Sender>,
173 },
174}
175
176struct ShareState {
177 project_id: u64,
178 snapshots_tx: watch::Sender<LocalSnapshot>,
179 resume_updates: watch::Sender<()>,
180 _maintain_remote_snapshot: Task<Option<()>>,
181}
182
183pub enum Event {
184 UpdatedEntries(HashMap<Arc<Path>, PathChange>),
185 UpdatedGitRepositories(Vec<GitRepositoryEntry>),
186}
187
188impl Entity for Worktree {
189 type Event = Event;
190}
191
192impl Worktree {
193 pub async fn local(
194 client: Arc<Client>,
195 path: impl Into<Arc<Path>>,
196 visible: bool,
197 fs: Arc<dyn Fs>,
198 next_entry_id: Arc<AtomicUsize>,
199 cx: &mut AsyncAppContext,
200 ) -> Result<ModelHandle<Self>> {
201 // After determining whether the root entry is a file or a directory, populate the
202 // snapshot's "root name", which will be used for the purpose of fuzzy matching.
203 let abs_path = path.into();
204 let metadata = fs
205 .metadata(&abs_path)
206 .await
207 .context("failed to stat worktree path")?;
208
209 Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| {
210 let root_name = abs_path
211 .file_name()
212 .map_or(String::new(), |f| f.to_string_lossy().to_string());
213
214 let mut snapshot = LocalSnapshot {
215 ignores_by_parent_abs_path: Default::default(),
216 git_repositories: Default::default(),
217 removed_entry_ids: Default::default(),
218 next_entry_id,
219 snapshot: Snapshot {
220 id: WorktreeId::from_usize(cx.model_id()),
221 abs_path: abs_path.clone(),
222 root_name: root_name.clone(),
223 root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
224 entries_by_path: Default::default(),
225 entries_by_id: Default::default(),
226 scan_id: 0,
227 completed_scan_id: 0,
228 },
229 };
230
231 if let Some(metadata) = metadata {
232 snapshot.insert_entry(
233 Entry::new(
234 Arc::from(Path::new("")),
235 &metadata,
236 &snapshot.next_entry_id,
237 snapshot.root_char_bag,
238 ),
239 fs.as_ref(),
240 );
241 }
242
243 let (path_changes_tx, path_changes_rx) = mpsc::unbounded();
244 let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
245
246 cx.spawn_weak(|this, mut cx| async move {
247 while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
248 this.update(&mut cx, |this, cx| {
249 this.as_local_mut()
250 .unwrap()
251 .background_scanner_updated(state, cx);
252 });
253 }
254 })
255 .detach();
256
257 let background_scanner_task = cx.background().spawn({
258 let fs = fs.clone();
259 let snapshot = snapshot.clone();
260 let background = cx.background().clone();
261 async move {
262 let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
263 BackgroundScanner::new(snapshot, scan_states_tx, fs, background)
264 .run(events, path_changes_rx)
265 .await;
266 }
267 });
268
269 Worktree::Local(LocalWorktree {
270 snapshot,
271 is_scanning: watch::channel_with(true),
272 share: None,
273 path_changes_tx,
274 _background_scanner_task: background_scanner_task,
275 diagnostics: Default::default(),
276 diagnostic_summaries: Default::default(),
277 client,
278 fs,
279 visible,
280 })
281 }))
282 }
283
284 pub fn remote(
285 project_remote_id: u64,
286 replica_id: ReplicaId,
287 worktree: proto::WorktreeMetadata,
288 client: Arc<Client>,
289 cx: &mut MutableAppContext,
290 ) -> ModelHandle<Self> {
291 cx.add_model(|cx: &mut ModelContext<Self>| {
292 let snapshot = Snapshot {
293 id: WorktreeId(worktree.id as usize),
294 abs_path: Arc::from(PathBuf::from(worktree.abs_path)),
295 root_name: worktree.root_name.clone(),
296 root_char_bag: worktree
297 .root_name
298 .chars()
299 .map(|c| c.to_ascii_lowercase())
300 .collect(),
301 entries_by_path: Default::default(),
302 entries_by_id: Default::default(),
303 scan_id: 0,
304 completed_scan_id: 0,
305 };
306
307 let (updates_tx, mut updates_rx) = mpsc::unbounded();
308 let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
309 let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
310
311 cx.background()
312 .spawn({
313 let background_snapshot = background_snapshot.clone();
314 async move {
315 while let Some(update) = updates_rx.next().await {
316 if let Err(error) =
317 background_snapshot.lock().apply_remote_update(update)
318 {
319 log::error!("error applying worktree update: {}", error);
320 }
321 snapshot_updated_tx.send(()).await.ok();
322 }
323 }
324 })
325 .detach();
326
327 cx.spawn_weak(|this, mut cx| async move {
328 while (snapshot_updated_rx.recv().await).is_some() {
329 if let Some(this) = this.upgrade(&cx) {
330 this.update(&mut cx, |this, cx| {
331 let this = this.as_remote_mut().unwrap();
332 this.snapshot = this.background_snapshot.lock().clone();
333 cx.emit(Event::UpdatedEntries(Default::default()));
334 cx.notify();
335 while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
336 if this.observed_snapshot(*scan_id) {
337 let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
338 let _ = tx.send(());
339 } else {
340 break;
341 }
342 }
343 });
344 } else {
345 break;
346 }
347 }
348 })
349 .detach();
350
351 Worktree::Remote(RemoteWorktree {
352 project_id: project_remote_id,
353 replica_id,
354 snapshot: snapshot.clone(),
355 background_snapshot,
356 updates_tx: Some(updates_tx),
357 snapshot_subscriptions: Default::default(),
358 client: client.clone(),
359 diagnostic_summaries: Default::default(),
360 visible: worktree.visible,
361 disconnected: false,
362 })
363 })
364 }
365
366 pub fn as_local(&self) -> Option<&LocalWorktree> {
367 if let Worktree::Local(worktree) = self {
368 Some(worktree)
369 } else {
370 None
371 }
372 }
373
374 pub fn as_remote(&self) -> Option<&RemoteWorktree> {
375 if let Worktree::Remote(worktree) = self {
376 Some(worktree)
377 } else {
378 None
379 }
380 }
381
382 pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
383 if let Worktree::Local(worktree) = self {
384 Some(worktree)
385 } else {
386 None
387 }
388 }
389
390 pub fn as_remote_mut(&mut self) -> Option<&mut RemoteWorktree> {
391 if let Worktree::Remote(worktree) = self {
392 Some(worktree)
393 } else {
394 None
395 }
396 }
397
398 pub fn is_local(&self) -> bool {
399 matches!(self, Worktree::Local(_))
400 }
401
402 pub fn is_remote(&self) -> bool {
403 !self.is_local()
404 }
405
406 pub fn snapshot(&self) -> Snapshot {
407 match self {
408 Worktree::Local(worktree) => worktree.snapshot().snapshot,
409 Worktree::Remote(worktree) => worktree.snapshot(),
410 }
411 }
412
413 pub fn scan_id(&self) -> usize {
414 match self {
415 Worktree::Local(worktree) => worktree.snapshot.scan_id,
416 Worktree::Remote(worktree) => worktree.snapshot.scan_id,
417 }
418 }
419
420 pub fn completed_scan_id(&self) -> usize {
421 match self {
422 Worktree::Local(worktree) => worktree.snapshot.completed_scan_id,
423 Worktree::Remote(worktree) => worktree.snapshot.completed_scan_id,
424 }
425 }
426
427 pub fn is_visible(&self) -> bool {
428 match self {
429 Worktree::Local(worktree) => worktree.visible,
430 Worktree::Remote(worktree) => worktree.visible,
431 }
432 }
433
434 pub fn replica_id(&self) -> ReplicaId {
435 match self {
436 Worktree::Local(_) => 0,
437 Worktree::Remote(worktree) => worktree.replica_id,
438 }
439 }
440
441 pub fn diagnostic_summaries(
442 &self,
443 ) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + '_ {
444 match self {
445 Worktree::Local(worktree) => &worktree.diagnostic_summaries,
446 Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
447 }
448 .iter()
449 .map(|(path, summary)| (path.0.clone(), *summary))
450 }
451
452 pub fn abs_path(&self) -> Arc<Path> {
453 match self {
454 Worktree::Local(worktree) => worktree.abs_path.clone(),
455 Worktree::Remote(worktree) => worktree.abs_path.clone(),
456 }
457 }
458}
459
460impl LocalWorktree {
461 pub fn contains_abs_path(&self, path: &Path) -> bool {
462 path.starts_with(&self.abs_path)
463 }
464
465 fn absolutize(&self, path: &Path) -> PathBuf {
466 if path.file_name().is_some() {
467 self.abs_path.join(path)
468 } else {
469 self.abs_path.to_path_buf()
470 }
471 }
472
473 pub(crate) fn load_buffer(
474 &mut self,
475 path: &Path,
476 cx: &mut ModelContext<Worktree>,
477 ) -> Task<Result<ModelHandle<Buffer>>> {
478 let path = Arc::from(path);
479 cx.spawn(move |this, mut cx| async move {
480 let (file, contents, diff_base) = this
481 .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
482 .await?;
483 Ok(cx.add_model(|cx| {
484 let mut buffer = Buffer::from_file(0, contents, diff_base, Arc::new(file), cx);
485 buffer.git_diff_recalc(cx);
486 buffer
487 }))
488 })
489 }
490
491 pub fn diagnostics_for_path(
492 &self,
493 path: &Path,
494 ) -> Option<Vec<DiagnosticEntry<Unclipped<PointUtf16>>>> {
495 self.diagnostics.get(path).cloned()
496 }
497
498 pub fn update_diagnostics(
499 &mut self,
500 language_server_id: usize,
501 worktree_path: Arc<Path>,
502 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
503 _: &mut ModelContext<Worktree>,
504 ) -> Result<bool> {
505 self.diagnostics.remove(&worktree_path);
506 let old_summary = self
507 .diagnostic_summaries
508 .remove(&PathKey(worktree_path.clone()))
509 .unwrap_or_default();
510 let new_summary = DiagnosticSummary::new(language_server_id, &diagnostics);
511 if !new_summary.is_empty() {
512 self.diagnostic_summaries
513 .insert(PathKey(worktree_path.clone()), new_summary);
514 self.diagnostics.insert(worktree_path.clone(), diagnostics);
515 }
516
517 let updated = !old_summary.is_empty() || !new_summary.is_empty();
518 if updated {
519 if let Some(share) = self.share.as_ref() {
520 self.client
521 .send(proto::UpdateDiagnosticSummary {
522 project_id: share.project_id,
523 worktree_id: self.id().to_proto(),
524 summary: Some(proto::DiagnosticSummary {
525 path: worktree_path.to_string_lossy().to_string(),
526 language_server_id: language_server_id as u64,
527 error_count: new_summary.error_count as u32,
528 warning_count: new_summary.warning_count as u32,
529 }),
530 })
531 .log_err();
532 }
533 }
534
535 Ok(updated)
536 }
537
538 fn background_scanner_updated(
539 &mut self,
540 scan_state: ScanState,
541 cx: &mut ModelContext<Worktree>,
542 ) {
543 match scan_state {
544 ScanState::Initializing { snapshot, barrier } => {
545 *self.is_scanning.0.borrow_mut() = true;
546 self.set_snapshot(snapshot, cx);
547 drop(barrier);
548 }
549 ScanState::Initialized { snapshot } => {
550 *self.is_scanning.0.borrow_mut() = false;
551 self.set_snapshot(snapshot, cx);
552 }
553 ScanState::Updating => {
554 *self.is_scanning.0.borrow_mut() = true;
555 }
556 ScanState::Updated {
557 snapshot,
558 changes,
559 barrier,
560 } => {
561 *self.is_scanning.0.borrow_mut() = false;
562 cx.emit(Event::UpdatedEntries(changes));
563 self.set_snapshot(snapshot, cx);
564 drop(barrier);
565 }
566 }
567 cx.notify();
568 }
569
570 fn set_snapshot(&mut self, new_snapshot: LocalSnapshot, cx: &mut ModelContext<Worktree>) {
571 let updated_repos = Self::changed_repos(
572 &self.snapshot.git_repositories,
573 &new_snapshot.git_repositories,
574 );
575 self.snapshot = new_snapshot;
576
577 if let Some(share) = self.share.as_mut() {
578 *share.snapshots_tx.borrow_mut() = self.snapshot.clone();
579 }
580
581 if !updated_repos.is_empty() {
582 cx.emit(Event::UpdatedGitRepositories(updated_repos));
583 }
584 }
585
586 fn changed_repos(
587 old_repos: &[GitRepositoryEntry],
588 new_repos: &[GitRepositoryEntry],
589 ) -> Vec<GitRepositoryEntry> {
590 fn diff<'a>(
591 a: &'a [GitRepositoryEntry],
592 b: &'a [GitRepositoryEntry],
593 updated: &mut HashMap<&'a Path, GitRepositoryEntry>,
594 ) {
595 for a_repo in a {
596 let matched = b.iter().find(|b_repo| {
597 a_repo.git_dir_path == b_repo.git_dir_path && a_repo.scan_id == b_repo.scan_id
598 });
599
600 if matched.is_none() {
601 updated.insert(a_repo.git_dir_path.as_ref(), a_repo.clone());
602 }
603 }
604 }
605
606 let mut updated = HashMap::<&Path, GitRepositoryEntry>::default();
607
608 diff(old_repos, new_repos, &mut updated);
609 diff(new_repos, old_repos, &mut updated);
610
611 updated.into_values().collect()
612 }
613
614 pub fn scan_complete(&self) -> impl Future<Output = ()> {
615 let mut is_scanning_rx = self.is_scanning.1.clone();
616 async move {
617 let mut is_scanning = is_scanning_rx.borrow().clone();
618 while is_scanning {
619 if let Some(value) = is_scanning_rx.recv().await {
620 is_scanning = value;
621 } else {
622 break;
623 }
624 }
625 }
626 }
627
628 pub fn snapshot(&self) -> LocalSnapshot {
629 self.snapshot.clone()
630 }
631
632 pub fn metadata_proto(&self) -> proto::WorktreeMetadata {
633 proto::WorktreeMetadata {
634 id: self.id().to_proto(),
635 root_name: self.root_name().to_string(),
636 visible: self.visible,
637 abs_path: self.abs_path().as_os_str().to_string_lossy().into(),
638 }
639 }
640
641 fn load(
642 &self,
643 path: &Path,
644 cx: &mut ModelContext<Worktree>,
645 ) -> Task<Result<(File, String, Option<String>)>> {
646 let handle = cx.handle();
647 let path = Arc::from(path);
648 let abs_path = self.absolutize(&path);
649 let fs = self.fs.clone();
650 let snapshot = self.snapshot();
651
652 cx.spawn(|this, mut cx| async move {
653 let text = fs.load(&abs_path).await?;
654
655 let diff_base = if let Some(repo) = snapshot.repo_for(&path) {
656 if let Ok(repo_relative) = path.strip_prefix(repo.content_path) {
657 let repo_relative = repo_relative.to_owned();
658 cx.background()
659 .spawn(async move { repo.repo.lock().load_index_text(&repo_relative) })
660 .await
661 } else {
662 None
663 }
664 } else {
665 None
666 };
667
668 // Eagerly populate the snapshot with an updated entry for the loaded file
669 let entry = this
670 .update(&mut cx, |this, cx| {
671 this.as_local().unwrap().refresh_entry(path, None, cx)
672 })
673 .await?;
674
675 Ok((
676 File {
677 entry_id: entry.id,
678 worktree: handle,
679 path: entry.path,
680 mtime: entry.mtime,
681 is_local: true,
682 is_deleted: false,
683 },
684 text,
685 diff_base,
686 ))
687 })
688 }
689
690 pub fn save_buffer(
691 &self,
692 buffer_handle: ModelHandle<Buffer>,
693 path: Arc<Path>,
694 has_changed_file: bool,
695 cx: &mut ModelContext<Worktree>,
696 ) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
697 let handle = cx.handle();
698 let buffer = buffer_handle.read(cx);
699
700 let rpc = self.client.clone();
701 let buffer_id = buffer.remote_id();
702 let project_id = self.share.as_ref().map(|share| share.project_id);
703
704 let text = buffer.as_rope().clone();
705 let fingerprint = text.fingerprint();
706 let version = buffer.version();
707 let save = self.write_file(path, text, buffer.line_ending(), cx);
708
709 cx.as_mut().spawn(|mut cx| async move {
710 let entry = save.await?;
711
712 if has_changed_file {
713 let new_file = Arc::new(File {
714 entry_id: entry.id,
715 worktree: handle,
716 path: entry.path,
717 mtime: entry.mtime,
718 is_local: true,
719 is_deleted: false,
720 });
721
722 if let Some(project_id) = project_id {
723 rpc.send(proto::UpdateBufferFile {
724 project_id,
725 buffer_id,
726 file: Some(new_file.to_proto()),
727 })
728 .log_err();
729 }
730
731 buffer_handle.update(&mut cx, |buffer, cx| {
732 if has_changed_file {
733 buffer.file_updated(new_file, cx).detach();
734 }
735 });
736 }
737
738 if let Some(project_id) = project_id {
739 rpc.send(proto::BufferSaved {
740 project_id,
741 buffer_id,
742 version: serialize_version(&version),
743 mtime: Some(entry.mtime.into()),
744 fingerprint: serialize_fingerprint(fingerprint),
745 })?;
746 }
747
748 buffer_handle.update(&mut cx, |buffer, cx| {
749 buffer.did_save(version.clone(), fingerprint, entry.mtime, cx);
750 });
751
752 Ok((version, fingerprint, entry.mtime))
753 })
754 }
755
756 pub fn create_entry(
757 &self,
758 path: impl Into<Arc<Path>>,
759 is_dir: bool,
760 cx: &mut ModelContext<Worktree>,
761 ) -> Task<Result<Entry>> {
762 let path = path.into();
763 let abs_path = self.absolutize(&path);
764 let fs = self.fs.clone();
765 let write = cx.background().spawn(async move {
766 if is_dir {
767 fs.create_dir(&abs_path).await
768 } else {
769 fs.save(&abs_path, &Default::default(), Default::default())
770 .await
771 }
772 });
773
774 cx.spawn(|this, mut cx| async move {
775 write.await?;
776 this.update(&mut cx, |this, cx| {
777 this.as_local_mut().unwrap().refresh_entry(path, None, cx)
778 })
779 .await
780 })
781 }
782
783 pub fn write_file(
784 &self,
785 path: impl Into<Arc<Path>>,
786 text: Rope,
787 line_ending: LineEnding,
788 cx: &mut ModelContext<Worktree>,
789 ) -> Task<Result<Entry>> {
790 let path = path.into();
791 let abs_path = self.absolutize(&path);
792 let fs = self.fs.clone();
793 let write = cx
794 .background()
795 .spawn(async move { fs.save(&abs_path, &text, line_ending).await });
796
797 cx.spawn(|this, mut cx| async move {
798 write.await?;
799 this.update(&mut cx, |this, cx| {
800 this.as_local_mut().unwrap().refresh_entry(path, None, cx)
801 })
802 .await
803 })
804 }
805
806 pub fn delete_entry(
807 &self,
808 entry_id: ProjectEntryId,
809 cx: &mut ModelContext<Worktree>,
810 ) -> Option<Task<Result<()>>> {
811 let entry = self.entry_for_id(entry_id)?.clone();
812 let path = entry.path.clone();
813 let abs_path = self.absolutize(&path);
814 let (tx, mut rx) = barrier::channel();
815
816 let delete = cx.background().spawn({
817 let abs_path = abs_path.clone();
818 let fs = self.fs.clone();
819 async move {
820 if entry.is_file() {
821 fs.remove_file(&abs_path, Default::default()).await
822 } else {
823 fs.remove_dir(
824 &abs_path,
825 RemoveOptions {
826 recursive: true,
827 ignore_if_not_exists: false,
828 },
829 )
830 .await
831 }
832 }
833 });
834
835 Some(cx.spawn(|this, mut cx| async move {
836 delete.await?;
837 this.update(&mut cx, |this, _| {
838 this.as_local_mut()
839 .unwrap()
840 .path_changes_tx
841 .unbounded_send((vec![abs_path], tx))
842 .unwrap();
843 });
844 rx.recv().await;
845 Ok(())
846 }))
847 }
848
849 pub fn rename_entry(
850 &self,
851 entry_id: ProjectEntryId,
852 new_path: impl Into<Arc<Path>>,
853 cx: &mut ModelContext<Worktree>,
854 ) -> Option<Task<Result<Entry>>> {
855 let old_path = self.entry_for_id(entry_id)?.path.clone();
856 let new_path = new_path.into();
857 let abs_old_path = self.absolutize(&old_path);
858 let abs_new_path = self.absolutize(&new_path);
859 let fs = self.fs.clone();
860 let rename = cx.background().spawn(async move {
861 fs.rename(&abs_old_path, &abs_new_path, Default::default())
862 .await
863 });
864
865 Some(cx.spawn(|this, mut cx| async move {
866 rename.await?;
867 this.update(&mut cx, |this, cx| {
868 this.as_local_mut()
869 .unwrap()
870 .refresh_entry(new_path.clone(), Some(old_path), cx)
871 })
872 .await
873 }))
874 }
875
876 pub fn copy_entry(
877 &self,
878 entry_id: ProjectEntryId,
879 new_path: impl Into<Arc<Path>>,
880 cx: &mut ModelContext<Worktree>,
881 ) -> Option<Task<Result<Entry>>> {
882 let old_path = self.entry_for_id(entry_id)?.path.clone();
883 let new_path = new_path.into();
884 let abs_old_path = self.absolutize(&old_path);
885 let abs_new_path = self.absolutize(&new_path);
886 let fs = self.fs.clone();
887 let copy = cx.background().spawn(async move {
888 copy_recursive(
889 fs.as_ref(),
890 &abs_old_path,
891 &abs_new_path,
892 Default::default(),
893 )
894 .await
895 });
896
897 Some(cx.spawn(|this, mut cx| async move {
898 copy.await?;
899 this.update(&mut cx, |this, cx| {
900 this.as_local_mut()
901 .unwrap()
902 .refresh_entry(new_path.clone(), None, cx)
903 })
904 .await
905 }))
906 }
907
908 fn refresh_entry(
909 &self,
910 path: Arc<Path>,
911 old_path: Option<Arc<Path>>,
912 cx: &mut ModelContext<Worktree>,
913 ) -> Task<Result<Entry>> {
914 let fs = self.fs.clone();
915 let abs_path = self.abs_path.clone();
916 let path_changes_tx = self.path_changes_tx.clone();
917 cx.spawn_weak(move |this, mut cx| async move {
918 let abs_path = fs.canonicalize(&abs_path).await?;
919 let paths = if let Some(old_path) = old_path {
920 vec![abs_path.join(&path), abs_path.join(&old_path)]
921 } else {
922 vec![abs_path.join(&path)]
923 };
924
925 let (tx, mut rx) = barrier::channel();
926 path_changes_tx.unbounded_send((paths, tx)).unwrap();
927 rx.recv().await;
928 this.upgrade(&cx)
929 .ok_or_else(|| anyhow!("worktree was dropped"))?
930 .update(&mut cx, |this, _| {
931 this.entry_for_path(path)
932 .cloned()
933 .ok_or_else(|| anyhow!("failed to read path after update"))
934 })
935 })
936 }
937
938 pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
939 let (share_tx, share_rx) = oneshot::channel();
940
941 if let Some(share) = self.share.as_mut() {
942 let _ = share_tx.send(());
943 *share.resume_updates.borrow_mut() = ();
944 } else {
945 let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
946 let (resume_updates_tx, mut resume_updates_rx) = watch::channel();
947 let worktree_id = cx.model_id() as u64;
948
949 for (path, summary) in self.diagnostic_summaries.iter() {
950 if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
951 project_id,
952 worktree_id,
953 summary: Some(summary.to_proto(&path.0)),
954 }) {
955 return Task::ready(Err(e));
956 }
957 }
958
959 let _maintain_remote_snapshot = cx.background().spawn({
960 let client = self.client.clone();
961 async move {
962 let mut share_tx = Some(share_tx);
963 let mut prev_snapshot = LocalSnapshot {
964 ignores_by_parent_abs_path: Default::default(),
965 git_repositories: Default::default(),
966 removed_entry_ids: Default::default(),
967 next_entry_id: Default::default(),
968 snapshot: Snapshot {
969 id: WorktreeId(worktree_id as usize),
970 abs_path: Path::new("").into(),
971 root_name: Default::default(),
972 root_char_bag: Default::default(),
973 entries_by_path: Default::default(),
974 entries_by_id: Default::default(),
975 scan_id: 0,
976 completed_scan_id: 0,
977 },
978 };
979 while let Some(snapshot) = snapshots_rx.recv().await {
980 #[cfg(any(test, feature = "test-support"))]
981 const MAX_CHUNK_SIZE: usize = 2;
982 #[cfg(not(any(test, feature = "test-support")))]
983 const MAX_CHUNK_SIZE: usize = 256;
984
985 let update =
986 snapshot.build_update(&prev_snapshot, project_id, worktree_id, true);
987 for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
988 let _ = resume_updates_rx.try_recv();
989 while let Err(error) = client.request(update.clone()).await {
990 log::error!("failed to send worktree update: {}", error);
991 log::info!("waiting to resume updates");
992 if resume_updates_rx.next().await.is_none() {
993 return Ok(());
994 }
995 }
996 }
997
998 if let Some(share_tx) = share_tx.take() {
999 let _ = share_tx.send(());
1000 }
1001
1002 prev_snapshot = snapshot;
1003 }
1004
1005 Ok::<_, anyhow::Error>(())
1006 }
1007 .log_err()
1008 });
1009
1010 self.share = Some(ShareState {
1011 project_id,
1012 snapshots_tx,
1013 resume_updates: resume_updates_tx,
1014 _maintain_remote_snapshot,
1015 });
1016 }
1017
1018 cx.foreground()
1019 .spawn(async move { share_rx.await.map_err(|_| anyhow!("share ended")) })
1020 }
1021
1022 pub fn unshare(&mut self) {
1023 self.share.take();
1024 }
1025
1026 pub fn is_shared(&self) -> bool {
1027 self.share.is_some()
1028 }
1029}
1030
1031impl RemoteWorktree {
1032 fn snapshot(&self) -> Snapshot {
1033 self.snapshot.clone()
1034 }
1035
1036 pub fn disconnected_from_host(&mut self) {
1037 self.updates_tx.take();
1038 self.snapshot_subscriptions.clear();
1039 self.disconnected = true;
1040 }
1041
1042 pub fn save_buffer(
1043 &self,
1044 buffer_handle: ModelHandle<Buffer>,
1045 cx: &mut ModelContext<Worktree>,
1046 ) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
1047 let buffer = buffer_handle.read(cx);
1048 let buffer_id = buffer.remote_id();
1049 let version = buffer.version();
1050 let rpc = self.client.clone();
1051 let project_id = self.project_id;
1052 cx.as_mut().spawn(|mut cx| async move {
1053 let response = rpc
1054 .request(proto::SaveBuffer {
1055 project_id,
1056 buffer_id,
1057 version: serialize_version(&version),
1058 })
1059 .await?;
1060 let version = deserialize_version(response.version);
1061 let fingerprint = deserialize_fingerprint(&response.fingerprint)?;
1062 let mtime = response
1063 .mtime
1064 .ok_or_else(|| anyhow!("missing mtime"))?
1065 .into();
1066
1067 buffer_handle.update(&mut cx, |buffer, cx| {
1068 buffer.did_save(version.clone(), fingerprint, mtime, cx);
1069 });
1070
1071 Ok((version, fingerprint, mtime))
1072 })
1073 }
1074
1075 pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
1076 if let Some(updates_tx) = &self.updates_tx {
1077 updates_tx
1078 .unbounded_send(update)
1079 .expect("consumer runs to completion");
1080 }
1081 }
1082
1083 fn observed_snapshot(&self, scan_id: usize) -> bool {
1084 self.completed_scan_id >= scan_id
1085 }
1086
1087 fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = Result<()>> {
1088 let (tx, rx) = oneshot::channel();
1089 if self.observed_snapshot(scan_id) {
1090 let _ = tx.send(());
1091 } else if self.disconnected {
1092 drop(tx);
1093 } else {
1094 match self
1095 .snapshot_subscriptions
1096 .binary_search_by_key(&scan_id, |probe| probe.0)
1097 {
1098 Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)),
1099 }
1100 }
1101
1102 async move {
1103 rx.await?;
1104 Ok(())
1105 }
1106 }
1107
1108 pub fn update_diagnostic_summary(
1109 &mut self,
1110 path: Arc<Path>,
1111 summary: &proto::DiagnosticSummary,
1112 ) {
1113 let summary = DiagnosticSummary {
1114 language_server_id: summary.language_server_id as usize,
1115 error_count: summary.error_count as usize,
1116 warning_count: summary.warning_count as usize,
1117 };
1118 if summary.is_empty() {
1119 self.diagnostic_summaries.remove(&PathKey(path));
1120 } else {
1121 self.diagnostic_summaries.insert(PathKey(path), summary);
1122 }
1123 }
1124
1125 pub fn insert_entry(
1126 &mut self,
1127 entry: proto::Entry,
1128 scan_id: usize,
1129 cx: &mut ModelContext<Worktree>,
1130 ) -> Task<Result<Entry>> {
1131 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1132 cx.spawn(|this, mut cx| async move {
1133 wait_for_snapshot.await?;
1134 this.update(&mut cx, |worktree, _| {
1135 let worktree = worktree.as_remote_mut().unwrap();
1136 let mut snapshot = worktree.background_snapshot.lock();
1137 let entry = snapshot.insert_entry(entry);
1138 worktree.snapshot = snapshot.clone();
1139 entry
1140 })
1141 })
1142 }
1143
1144 pub(crate) fn delete_entry(
1145 &mut self,
1146 id: ProjectEntryId,
1147 scan_id: usize,
1148 cx: &mut ModelContext<Worktree>,
1149 ) -> Task<Result<()>> {
1150 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1151 cx.spawn(|this, mut cx| async move {
1152 wait_for_snapshot.await?;
1153 this.update(&mut cx, |worktree, _| {
1154 let worktree = worktree.as_remote_mut().unwrap();
1155 let mut snapshot = worktree.background_snapshot.lock();
1156 snapshot.delete_entry(id);
1157 worktree.snapshot = snapshot.clone();
1158 });
1159 Ok(())
1160 })
1161 }
1162}
1163
1164impl Snapshot {
1165 pub fn id(&self) -> WorktreeId {
1166 self.id
1167 }
1168
1169 pub fn abs_path(&self) -> &Arc<Path> {
1170 &self.abs_path
1171 }
1172
1173 pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
1174 self.entries_by_id.get(&entry_id, &()).is_some()
1175 }
1176
1177 pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
1178 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1179 let old_entry = self.entries_by_id.insert_or_replace(
1180 PathEntry {
1181 id: entry.id,
1182 path: entry.path.clone(),
1183 is_ignored: entry.is_ignored,
1184 scan_id: 0,
1185 },
1186 &(),
1187 );
1188 if let Some(old_entry) = old_entry {
1189 self.entries_by_path.remove(&PathKey(old_entry.path), &());
1190 }
1191 self.entries_by_path.insert_or_replace(entry.clone(), &());
1192 Ok(entry)
1193 }
1194
1195 fn delete_entry(&mut self, entry_id: ProjectEntryId) -> Option<Arc<Path>> {
1196 let removed_entry = self.entries_by_id.remove(&entry_id, &())?;
1197 self.entries_by_path = {
1198 let mut cursor = self.entries_by_path.cursor();
1199 let mut new_entries_by_path =
1200 cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &());
1201 while let Some(entry) = cursor.item() {
1202 if entry.path.starts_with(&removed_entry.path) {
1203 self.entries_by_id.remove(&entry.id, &());
1204 cursor.next(&());
1205 } else {
1206 break;
1207 }
1208 }
1209 new_entries_by_path.push_tree(cursor.suffix(&()), &());
1210 new_entries_by_path
1211 };
1212
1213 Some(removed_entry.path)
1214 }
1215
1216 pub(crate) fn apply_remote_update(&mut self, update: proto::UpdateWorktree) -> Result<()> {
1217 let mut entries_by_path_edits = Vec::new();
1218 let mut entries_by_id_edits = Vec::new();
1219 for entry_id in update.removed_entries {
1220 let entry = self
1221 .entry_for_id(ProjectEntryId::from_proto(entry_id))
1222 .ok_or_else(|| anyhow!("unknown entry {}", entry_id))?;
1223 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
1224 entries_by_id_edits.push(Edit::Remove(entry.id));
1225 }
1226
1227 for entry in update.updated_entries {
1228 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1229 if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
1230 entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
1231 }
1232 entries_by_id_edits.push(Edit::Insert(PathEntry {
1233 id: entry.id,
1234 path: entry.path.clone(),
1235 is_ignored: entry.is_ignored,
1236 scan_id: 0,
1237 }));
1238 entries_by_path_edits.push(Edit::Insert(entry));
1239 }
1240
1241 self.entries_by_path.edit(entries_by_path_edits, &());
1242 self.entries_by_id.edit(entries_by_id_edits, &());
1243 self.scan_id = update.scan_id as usize;
1244 if update.is_last_update {
1245 self.completed_scan_id = update.scan_id as usize;
1246 }
1247
1248 Ok(())
1249 }
1250
1251 pub fn file_count(&self) -> usize {
1252 self.entries_by_path.summary().file_count
1253 }
1254
1255 pub fn visible_file_count(&self) -> usize {
1256 self.entries_by_path.summary().visible_file_count
1257 }
1258
1259 fn traverse_from_offset(
1260 &self,
1261 include_dirs: bool,
1262 include_ignored: bool,
1263 start_offset: usize,
1264 ) -> Traversal {
1265 let mut cursor = self.entries_by_path.cursor();
1266 cursor.seek(
1267 &TraversalTarget::Count {
1268 count: start_offset,
1269 include_dirs,
1270 include_ignored,
1271 },
1272 Bias::Right,
1273 &(),
1274 );
1275 Traversal {
1276 cursor,
1277 include_dirs,
1278 include_ignored,
1279 }
1280 }
1281
1282 fn traverse_from_path(
1283 &self,
1284 include_dirs: bool,
1285 include_ignored: bool,
1286 path: &Path,
1287 ) -> Traversal {
1288 let mut cursor = self.entries_by_path.cursor();
1289 cursor.seek(&TraversalTarget::Path(path), Bias::Left, &());
1290 Traversal {
1291 cursor,
1292 include_dirs,
1293 include_ignored,
1294 }
1295 }
1296
1297 pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
1298 self.traverse_from_offset(false, include_ignored, start)
1299 }
1300
1301 pub fn entries(&self, include_ignored: bool) -> Traversal {
1302 self.traverse_from_offset(true, include_ignored, 0)
1303 }
1304
1305 pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
1306 let empty_path = Path::new("");
1307 self.entries_by_path
1308 .cursor::<()>()
1309 .filter(move |entry| entry.path.as_ref() != empty_path)
1310 .map(|entry| &entry.path)
1311 }
1312
1313 fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
1314 let mut cursor = self.entries_by_path.cursor();
1315 cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &());
1316 let traversal = Traversal {
1317 cursor,
1318 include_dirs: true,
1319 include_ignored: true,
1320 };
1321 ChildEntriesIter {
1322 traversal,
1323 parent_path,
1324 }
1325 }
1326
1327 pub fn root_entry(&self) -> Option<&Entry> {
1328 self.entry_for_path("")
1329 }
1330
1331 pub fn root_name(&self) -> &str {
1332 &self.root_name
1333 }
1334
1335 pub fn scan_started(&mut self) {
1336 self.scan_id += 1;
1337 }
1338
1339 pub fn scan_completed(&mut self) {
1340 self.completed_scan_id = self.scan_id;
1341 }
1342
1343 pub fn scan_id(&self) -> usize {
1344 self.scan_id
1345 }
1346
1347 pub fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
1348 let path = path.as_ref();
1349 self.traverse_from_path(true, true, path)
1350 .entry()
1351 .and_then(|entry| {
1352 if entry.path.as_ref() == path {
1353 Some(entry)
1354 } else {
1355 None
1356 }
1357 })
1358 }
1359
1360 pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> {
1361 let entry = self.entries_by_id.get(&id, &())?;
1362 self.entry_for_path(&entry.path)
1363 }
1364
1365 pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
1366 self.entry_for_path(path.as_ref()).map(|e| e.inode)
1367 }
1368}
1369
1370impl LocalSnapshot {
1371 // Gives the most specific git repository for a given path
1372 pub(crate) fn repo_for(&self, path: &Path) -> Option<GitRepositoryEntry> {
1373 self.git_repositories
1374 .iter()
1375 .rev() //git_repository is ordered lexicographically
1376 .find(|repo| repo.manages(path))
1377 .cloned()
1378 }
1379
1380 pub(crate) fn repo_with_dot_git_containing(
1381 &mut self,
1382 path: &Path,
1383 ) -> Option<&mut GitRepositoryEntry> {
1384 // Git repositories cannot be nested, so we don't need to reverse the order
1385 self.git_repositories
1386 .iter_mut()
1387 .find(|repo| repo.in_dot_git(path))
1388 }
1389
1390 #[cfg(test)]
1391 pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree {
1392 let root_name = self.root_name.clone();
1393 proto::UpdateWorktree {
1394 project_id,
1395 worktree_id: self.id().to_proto(),
1396 abs_path: self.abs_path().to_string_lossy().into(),
1397 root_name,
1398 updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
1399 removed_entries: Default::default(),
1400 scan_id: self.scan_id as u64,
1401 is_last_update: true,
1402 }
1403 }
1404
1405 pub(crate) fn build_update(
1406 &self,
1407 other: &Self,
1408 project_id: u64,
1409 worktree_id: u64,
1410 include_ignored: bool,
1411 ) -> proto::UpdateWorktree {
1412 let mut updated_entries = Vec::new();
1413 let mut removed_entries = Vec::new();
1414 let mut self_entries = self
1415 .entries_by_id
1416 .cursor::<()>()
1417 .filter(|e| include_ignored || !e.is_ignored)
1418 .peekable();
1419 let mut other_entries = other
1420 .entries_by_id
1421 .cursor::<()>()
1422 .filter(|e| include_ignored || !e.is_ignored)
1423 .peekable();
1424 loop {
1425 match (self_entries.peek(), other_entries.peek()) {
1426 (Some(self_entry), Some(other_entry)) => {
1427 match Ord::cmp(&self_entry.id, &other_entry.id) {
1428 Ordering::Less => {
1429 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1430 updated_entries.push(entry);
1431 self_entries.next();
1432 }
1433 Ordering::Equal => {
1434 if self_entry.scan_id != other_entry.scan_id {
1435 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1436 updated_entries.push(entry);
1437 }
1438
1439 self_entries.next();
1440 other_entries.next();
1441 }
1442 Ordering::Greater => {
1443 removed_entries.push(other_entry.id.to_proto());
1444 other_entries.next();
1445 }
1446 }
1447 }
1448 (Some(self_entry), None) => {
1449 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1450 updated_entries.push(entry);
1451 self_entries.next();
1452 }
1453 (None, Some(other_entry)) => {
1454 removed_entries.push(other_entry.id.to_proto());
1455 other_entries.next();
1456 }
1457 (None, None) => break,
1458 }
1459 }
1460
1461 proto::UpdateWorktree {
1462 project_id,
1463 worktree_id,
1464 abs_path: self.abs_path().to_string_lossy().into(),
1465 root_name: self.root_name().to_string(),
1466 updated_entries,
1467 removed_entries,
1468 scan_id: self.scan_id as u64,
1469 is_last_update: self.completed_scan_id == self.scan_id,
1470 }
1471 }
1472
1473 fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
1474 if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) {
1475 let abs_path = self.abs_path.join(&entry.path);
1476 match smol::block_on(build_gitignore(&abs_path, fs)) {
1477 Ok(ignore) => {
1478 self.ignores_by_parent_abs_path.insert(
1479 abs_path.parent().unwrap().into(),
1480 (Arc::new(ignore), self.scan_id),
1481 );
1482 }
1483 Err(error) => {
1484 log::error!(
1485 "error loading .gitignore file {:?} - {:?}",
1486 &entry.path,
1487 error
1488 );
1489 }
1490 }
1491 }
1492
1493 self.reuse_entry_id(&mut entry);
1494
1495 if entry.kind == EntryKind::PendingDir {
1496 if let Some(existing_entry) =
1497 self.entries_by_path.get(&PathKey(entry.path.clone()), &())
1498 {
1499 entry.kind = existing_entry.kind;
1500 }
1501 }
1502
1503 let scan_id = self.scan_id;
1504 self.entries_by_path.insert_or_replace(entry.clone(), &());
1505 self.entries_by_id.insert_or_replace(
1506 PathEntry {
1507 id: entry.id,
1508 path: entry.path.clone(),
1509 is_ignored: entry.is_ignored,
1510 scan_id,
1511 },
1512 &(),
1513 );
1514
1515 entry
1516 }
1517
1518 fn populate_dir(
1519 &mut self,
1520 parent_path: Arc<Path>,
1521 entries: impl IntoIterator<Item = Entry>,
1522 ignore: Option<Arc<Gitignore>>,
1523 fs: &dyn Fs,
1524 ) {
1525 let mut parent_entry = if let Some(parent_entry) =
1526 self.entries_by_path.get(&PathKey(parent_path.clone()), &())
1527 {
1528 parent_entry.clone()
1529 } else {
1530 log::warn!(
1531 "populating a directory {:?} that has been removed",
1532 parent_path
1533 );
1534 return;
1535 };
1536
1537 if let Some(ignore) = ignore {
1538 self.ignores_by_parent_abs_path.insert(
1539 self.abs_path.join(&parent_path).into(),
1540 (ignore, self.scan_id),
1541 );
1542 }
1543 if matches!(parent_entry.kind, EntryKind::PendingDir) {
1544 parent_entry.kind = EntryKind::Dir;
1545 } else {
1546 unreachable!();
1547 }
1548
1549 if parent_path.file_name() == Some(&DOT_GIT) {
1550 let abs_path = self.abs_path.join(&parent_path);
1551 let content_path: Arc<Path> = parent_path.parent().unwrap().into();
1552 if let Err(ix) = self
1553 .git_repositories
1554 .binary_search_by_key(&&content_path, |repo| &repo.content_path)
1555 {
1556 if let Some(repo) = fs.open_repo(abs_path.as_path()) {
1557 self.git_repositories.insert(
1558 ix,
1559 GitRepositoryEntry {
1560 repo,
1561 scan_id: 0,
1562 content_path,
1563 git_dir_path: parent_path,
1564 },
1565 );
1566 }
1567 }
1568 }
1569
1570 let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
1571 let mut entries_by_id_edits = Vec::new();
1572
1573 for mut entry in entries {
1574 self.reuse_entry_id(&mut entry);
1575 entries_by_id_edits.push(Edit::Insert(PathEntry {
1576 id: entry.id,
1577 path: entry.path.clone(),
1578 is_ignored: entry.is_ignored,
1579 scan_id: self.scan_id,
1580 }));
1581 entries_by_path_edits.push(Edit::Insert(entry));
1582 }
1583
1584 self.entries_by_path.edit(entries_by_path_edits, &());
1585 self.entries_by_id.edit(entries_by_id_edits, &());
1586 }
1587
1588 fn reuse_entry_id(&mut self, entry: &mut Entry) {
1589 if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
1590 entry.id = removed_entry_id;
1591 } else if let Some(existing_entry) = self.entry_for_path(&entry.path) {
1592 entry.id = existing_entry.id;
1593 }
1594 }
1595
1596 fn remove_path(&mut self, path: &Path) {
1597 let mut new_entries;
1598 let removed_entries;
1599 {
1600 let mut cursor = self.entries_by_path.cursor::<TraversalProgress>();
1601 new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
1602 removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
1603 new_entries.push_tree(cursor.suffix(&()), &());
1604 }
1605 self.entries_by_path = new_entries;
1606
1607 let mut entries_by_id_edits = Vec::new();
1608 for entry in removed_entries.cursor::<()>() {
1609 let removed_entry_id = self
1610 .removed_entry_ids
1611 .entry(entry.inode)
1612 .or_insert(entry.id);
1613 *removed_entry_id = cmp::max(*removed_entry_id, entry.id);
1614 entries_by_id_edits.push(Edit::Remove(entry.id));
1615 }
1616 self.entries_by_id.edit(entries_by_id_edits, &());
1617
1618 if path.file_name() == Some(&GITIGNORE) {
1619 let abs_parent_path = self.abs_path.join(path.parent().unwrap());
1620 if let Some((_, scan_id)) = self
1621 .ignores_by_parent_abs_path
1622 .get_mut(abs_parent_path.as_path())
1623 {
1624 *scan_id = self.snapshot.scan_id;
1625 }
1626 } else if path.file_name() == Some(&DOT_GIT) {
1627 let parent_path = path.parent().unwrap();
1628 if let Ok(ix) = self
1629 .git_repositories
1630 .binary_search_by_key(&parent_path, |repo| repo.git_dir_path.as_ref())
1631 {
1632 self.git_repositories[ix].scan_id = self.snapshot.scan_id;
1633 }
1634 }
1635 }
1636
1637 fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
1638 let mut inodes = TreeSet::default();
1639 for ancestor in path.ancestors().skip(1) {
1640 if let Some(entry) = self.entry_for_path(ancestor) {
1641 inodes.insert(entry.inode);
1642 }
1643 }
1644 inodes
1645 }
1646
1647 fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
1648 let mut new_ignores = Vec::new();
1649 for ancestor in abs_path.ancestors().skip(1) {
1650 if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
1651 new_ignores.push((ancestor, Some(ignore.clone())));
1652 } else {
1653 new_ignores.push((ancestor, None));
1654 }
1655 }
1656
1657 let mut ignore_stack = IgnoreStack::none();
1658 for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
1659 if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
1660 ignore_stack = IgnoreStack::all();
1661 break;
1662 } else if let Some(ignore) = ignore {
1663 ignore_stack = ignore_stack.append(parent_abs_path.into(), ignore);
1664 }
1665 }
1666
1667 if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
1668 ignore_stack = IgnoreStack::all();
1669 }
1670
1671 ignore_stack
1672 }
1673
1674 pub fn git_repo_entries(&self) -> &[GitRepositoryEntry] {
1675 &self.git_repositories
1676 }
1677}
1678
1679impl GitRepositoryEntry {
1680 // Note that these paths should be relative to the worktree root.
1681 pub(crate) fn manages(&self, path: &Path) -> bool {
1682 path.starts_with(self.content_path.as_ref())
1683 }
1684
1685 // Note that this path should be relative to the worktree root.
1686 pub(crate) fn in_dot_git(&self, path: &Path) -> bool {
1687 path.starts_with(self.git_dir_path.as_ref())
1688 }
1689}
1690
1691async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
1692 let contents = fs.load(abs_path).await?;
1693 let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
1694 let mut builder = GitignoreBuilder::new(parent);
1695 for line in contents.lines() {
1696 builder.add_line(Some(abs_path.into()), line)?;
1697 }
1698 Ok(builder.build()?)
1699}
1700
1701impl WorktreeId {
1702 pub fn from_usize(handle_id: usize) -> Self {
1703 Self(handle_id)
1704 }
1705
1706 pub(crate) fn from_proto(id: u64) -> Self {
1707 Self(id as usize)
1708 }
1709
1710 pub fn to_proto(&self) -> u64 {
1711 self.0 as u64
1712 }
1713
1714 pub fn to_usize(&self) -> usize {
1715 self.0
1716 }
1717}
1718
1719impl fmt::Display for WorktreeId {
1720 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1721 self.0.fmt(f)
1722 }
1723}
1724
1725impl Deref for Worktree {
1726 type Target = Snapshot;
1727
1728 fn deref(&self) -> &Self::Target {
1729 match self {
1730 Worktree::Local(worktree) => &worktree.snapshot,
1731 Worktree::Remote(worktree) => &worktree.snapshot,
1732 }
1733 }
1734}
1735
1736impl Deref for LocalWorktree {
1737 type Target = LocalSnapshot;
1738
1739 fn deref(&self) -> &Self::Target {
1740 &self.snapshot
1741 }
1742}
1743
1744impl Deref for RemoteWorktree {
1745 type Target = Snapshot;
1746
1747 fn deref(&self) -> &Self::Target {
1748 &self.snapshot
1749 }
1750}
1751
1752impl fmt::Debug for LocalWorktree {
1753 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1754 self.snapshot.fmt(f)
1755 }
1756}
1757
1758impl fmt::Debug for Snapshot {
1759 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1760 struct EntriesById<'a>(&'a SumTree<PathEntry>);
1761 struct EntriesByPath<'a>(&'a SumTree<Entry>);
1762
1763 impl<'a> fmt::Debug for EntriesByPath<'a> {
1764 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1765 f.debug_map()
1766 .entries(self.0.iter().map(|entry| (&entry.path, entry.id)))
1767 .finish()
1768 }
1769 }
1770
1771 impl<'a> fmt::Debug for EntriesById<'a> {
1772 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1773 f.debug_list().entries(self.0.iter()).finish()
1774 }
1775 }
1776
1777 f.debug_struct("Snapshot")
1778 .field("id", &self.id)
1779 .field("root_name", &self.root_name)
1780 .field("entries_by_path", &EntriesByPath(&self.entries_by_path))
1781 .field("entries_by_id", &EntriesById(&self.entries_by_id))
1782 .finish()
1783 }
1784}
1785
1786#[derive(Clone, PartialEq)]
1787pub struct File {
1788 pub worktree: ModelHandle<Worktree>,
1789 pub path: Arc<Path>,
1790 pub mtime: SystemTime,
1791 pub(crate) entry_id: ProjectEntryId,
1792 pub(crate) is_local: bool,
1793 pub(crate) is_deleted: bool,
1794}
1795
1796impl language::File for File {
1797 fn as_local(&self) -> Option<&dyn language::LocalFile> {
1798 if self.is_local {
1799 Some(self)
1800 } else {
1801 None
1802 }
1803 }
1804
1805 fn mtime(&self) -> SystemTime {
1806 self.mtime
1807 }
1808
1809 fn path(&self) -> &Arc<Path> {
1810 &self.path
1811 }
1812
1813 fn full_path(&self, cx: &AppContext) -> PathBuf {
1814 let mut full_path = PathBuf::new();
1815 let worktree = self.worktree.read(cx);
1816
1817 if worktree.is_visible() {
1818 full_path.push(worktree.root_name());
1819 } else {
1820 let path = worktree.abs_path();
1821
1822 if worktree.is_local() && path.starts_with(HOME.as_path()) {
1823 full_path.push("~");
1824 full_path.push(path.strip_prefix(HOME.as_path()).unwrap());
1825 } else {
1826 full_path.push(path)
1827 }
1828 }
1829
1830 if self.path.components().next().is_some() {
1831 full_path.push(&self.path);
1832 }
1833
1834 full_path
1835 }
1836
1837 /// Returns the last component of this handle's absolute path. If this handle refers to the root
1838 /// of its worktree, then this method will return the name of the worktree itself.
1839 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr {
1840 self.path
1841 .file_name()
1842 .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name))
1843 }
1844
1845 fn is_deleted(&self) -> bool {
1846 self.is_deleted
1847 }
1848
1849 fn as_any(&self) -> &dyn Any {
1850 self
1851 }
1852
1853 fn to_proto(&self) -> rpc::proto::File {
1854 rpc::proto::File {
1855 worktree_id: self.worktree.id() as u64,
1856 entry_id: self.entry_id.to_proto(),
1857 path: self.path.to_string_lossy().into(),
1858 mtime: Some(self.mtime.into()),
1859 is_deleted: self.is_deleted,
1860 }
1861 }
1862}
1863
1864impl language::LocalFile for File {
1865 fn abs_path(&self, cx: &AppContext) -> PathBuf {
1866 self.worktree
1867 .read(cx)
1868 .as_local()
1869 .unwrap()
1870 .abs_path
1871 .join(&self.path)
1872 }
1873
1874 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
1875 let worktree = self.worktree.read(cx).as_local().unwrap();
1876 let abs_path = worktree.absolutize(&self.path);
1877 let fs = worktree.fs.clone();
1878 cx.background()
1879 .spawn(async move { fs.load(&abs_path).await })
1880 }
1881
1882 fn buffer_reloaded(
1883 &self,
1884 buffer_id: u64,
1885 version: &clock::Global,
1886 fingerprint: RopeFingerprint,
1887 line_ending: LineEnding,
1888 mtime: SystemTime,
1889 cx: &mut MutableAppContext,
1890 ) {
1891 let worktree = self.worktree.read(cx).as_local().unwrap();
1892 if let Some(project_id) = worktree.share.as_ref().map(|share| share.project_id) {
1893 worktree
1894 .client
1895 .send(proto::BufferReloaded {
1896 project_id,
1897 buffer_id,
1898 version: serialize_version(version),
1899 mtime: Some(mtime.into()),
1900 fingerprint: serialize_fingerprint(fingerprint),
1901 line_ending: serialize_line_ending(line_ending) as i32,
1902 })
1903 .log_err();
1904 }
1905 }
1906}
1907
1908impl File {
1909 pub fn from_proto(
1910 proto: rpc::proto::File,
1911 worktree: ModelHandle<Worktree>,
1912 cx: &AppContext,
1913 ) -> Result<Self> {
1914 let worktree_id = worktree
1915 .read(cx)
1916 .as_remote()
1917 .ok_or_else(|| anyhow!("not remote"))?
1918 .id();
1919
1920 if worktree_id.to_proto() != proto.worktree_id {
1921 return Err(anyhow!("worktree id does not match file"));
1922 }
1923
1924 Ok(Self {
1925 worktree,
1926 path: Path::new(&proto.path).into(),
1927 mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
1928 entry_id: ProjectEntryId::from_proto(proto.entry_id),
1929 is_local: false,
1930 is_deleted: proto.is_deleted,
1931 })
1932 }
1933
1934 pub fn from_dyn(file: Option<&Arc<dyn language::File>>) -> Option<&Self> {
1935 file.and_then(|f| f.as_any().downcast_ref())
1936 }
1937
1938 pub fn worktree_id(&self, cx: &AppContext) -> WorktreeId {
1939 self.worktree.read(cx).id()
1940 }
1941
1942 pub fn project_entry_id(&self, _: &AppContext) -> Option<ProjectEntryId> {
1943 if self.is_deleted {
1944 None
1945 } else {
1946 Some(self.entry_id)
1947 }
1948 }
1949}
1950
1951#[derive(Clone, Debug, PartialEq, Eq)]
1952pub struct Entry {
1953 pub id: ProjectEntryId,
1954 pub kind: EntryKind,
1955 pub path: Arc<Path>,
1956 pub inode: u64,
1957 pub mtime: SystemTime,
1958 pub is_symlink: bool,
1959 pub is_ignored: bool,
1960}
1961
1962#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1963pub enum EntryKind {
1964 PendingDir,
1965 Dir,
1966 File(CharBag),
1967}
1968
1969#[derive(Clone, Copy, Debug)]
1970pub enum PathChange {
1971 Added,
1972 Removed,
1973 Updated,
1974 AddedOrUpdated,
1975}
1976
1977impl Entry {
1978 fn new(
1979 path: Arc<Path>,
1980 metadata: &fs::Metadata,
1981 next_entry_id: &AtomicUsize,
1982 root_char_bag: CharBag,
1983 ) -> Self {
1984 Self {
1985 id: ProjectEntryId::new(next_entry_id),
1986 kind: if metadata.is_dir {
1987 EntryKind::PendingDir
1988 } else {
1989 EntryKind::File(char_bag_for_path(root_char_bag, &path))
1990 },
1991 path,
1992 inode: metadata.inode,
1993 mtime: metadata.mtime,
1994 is_symlink: metadata.is_symlink,
1995 is_ignored: false,
1996 }
1997 }
1998
1999 pub fn is_dir(&self) -> bool {
2000 matches!(self.kind, EntryKind::Dir | EntryKind::PendingDir)
2001 }
2002
2003 pub fn is_file(&self) -> bool {
2004 matches!(self.kind, EntryKind::File(_))
2005 }
2006}
2007
2008impl sum_tree::Item for Entry {
2009 type Summary = EntrySummary;
2010
2011 fn summary(&self) -> Self::Summary {
2012 let visible_count = if self.is_ignored { 0 } else { 1 };
2013 let file_count;
2014 let visible_file_count;
2015 if self.is_file() {
2016 file_count = 1;
2017 visible_file_count = visible_count;
2018 } else {
2019 file_count = 0;
2020 visible_file_count = 0;
2021 }
2022
2023 EntrySummary {
2024 max_path: self.path.clone(),
2025 count: 1,
2026 visible_count,
2027 file_count,
2028 visible_file_count,
2029 }
2030 }
2031}
2032
2033impl sum_tree::KeyedItem for Entry {
2034 type Key = PathKey;
2035
2036 fn key(&self) -> Self::Key {
2037 PathKey(self.path.clone())
2038 }
2039}
2040
2041#[derive(Clone, Debug)]
2042pub struct EntrySummary {
2043 max_path: Arc<Path>,
2044 count: usize,
2045 visible_count: usize,
2046 file_count: usize,
2047 visible_file_count: usize,
2048}
2049
2050impl Default for EntrySummary {
2051 fn default() -> Self {
2052 Self {
2053 max_path: Arc::from(Path::new("")),
2054 count: 0,
2055 visible_count: 0,
2056 file_count: 0,
2057 visible_file_count: 0,
2058 }
2059 }
2060}
2061
2062impl sum_tree::Summary for EntrySummary {
2063 type Context = ();
2064
2065 fn add_summary(&mut self, rhs: &Self, _: &()) {
2066 self.max_path = rhs.max_path.clone();
2067 self.count += rhs.count;
2068 self.visible_count += rhs.visible_count;
2069 self.file_count += rhs.file_count;
2070 self.visible_file_count += rhs.visible_file_count;
2071 }
2072}
2073
2074#[derive(Clone, Debug)]
2075struct PathEntry {
2076 id: ProjectEntryId,
2077 path: Arc<Path>,
2078 is_ignored: bool,
2079 scan_id: usize,
2080}
2081
2082impl sum_tree::Item for PathEntry {
2083 type Summary = PathEntrySummary;
2084
2085 fn summary(&self) -> Self::Summary {
2086 PathEntrySummary { max_id: self.id }
2087 }
2088}
2089
2090impl sum_tree::KeyedItem for PathEntry {
2091 type Key = ProjectEntryId;
2092
2093 fn key(&self) -> Self::Key {
2094 self.id
2095 }
2096}
2097
2098#[derive(Clone, Debug, Default)]
2099struct PathEntrySummary {
2100 max_id: ProjectEntryId,
2101}
2102
2103impl sum_tree::Summary for PathEntrySummary {
2104 type Context = ();
2105
2106 fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
2107 self.max_id = summary.max_id;
2108 }
2109}
2110
2111impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
2112 fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) {
2113 *self = summary.max_id;
2114 }
2115}
2116
2117#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
2118pub struct PathKey(Arc<Path>);
2119
2120impl Default for PathKey {
2121 fn default() -> Self {
2122 Self(Path::new("").into())
2123 }
2124}
2125
2126impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
2127 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2128 self.0 = summary.max_path.clone();
2129 }
2130}
2131
2132struct BackgroundScanner {
2133 fs: Arc<dyn Fs>,
2134 snapshot: Arc<Mutex<LocalSnapshot>>,
2135 changes: HashMap<Arc<Path>, PathChange>,
2136 notify: UnboundedSender<ScanState>,
2137 executor: Arc<executor::Background>,
2138}
2139
2140impl BackgroundScanner {
2141 fn new(
2142 snapshot: LocalSnapshot,
2143 notify: UnboundedSender<ScanState>,
2144 fs: Arc<dyn Fs>,
2145 executor: Arc<executor::Background>,
2146 ) -> Self {
2147 Self {
2148 fs,
2149 snapshot: Arc::new(Mutex::new(snapshot)),
2150 notify,
2151 executor,
2152 changes: Default::default(),
2153 }
2154 }
2155
2156 fn abs_path(&self) -> Arc<Path> {
2157 self.snapshot.lock().abs_path.clone()
2158 }
2159
2160 async fn run(
2161 mut self,
2162 events_rx: impl Stream<Item = Vec<fsevent::Event>>,
2163 mut changed_paths: UnboundedReceiver<(Vec<PathBuf>, barrier::Sender)>,
2164 ) {
2165 use futures::FutureExt as _;
2166
2167 // Retrieve the basic properties of the root node.
2168 let root_char_bag;
2169 let root_abs_path;
2170 let root_inode;
2171 let root_is_dir;
2172 let next_entry_id;
2173 {
2174 let mut snapshot = self.snapshot.lock();
2175 snapshot.scan_started();
2176 root_char_bag = snapshot.root_char_bag;
2177 root_abs_path = snapshot.abs_path.clone();
2178 root_inode = snapshot.root_entry().map(|e| e.inode);
2179 root_is_dir = snapshot.root_entry().map_or(false, |e| e.is_dir());
2180 next_entry_id = snapshot.next_entry_id.clone();
2181 }
2182
2183 // Populate ignores above the root.
2184 let ignore_stack;
2185 for ancestor in root_abs_path.ancestors().skip(1) {
2186 if let Ok(ignore) = build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
2187 {
2188 self.snapshot
2189 .lock()
2190 .ignores_by_parent_abs_path
2191 .insert(ancestor.into(), (ignore.into(), 0));
2192 }
2193 }
2194 {
2195 let mut snapshot = self.snapshot.lock();
2196 ignore_stack = snapshot.ignore_stack_for_abs_path(&root_abs_path, true);
2197 if ignore_stack.is_all() {
2198 if let Some(mut root_entry) = snapshot.root_entry().cloned() {
2199 root_entry.is_ignored = true;
2200 snapshot.insert_entry(root_entry, self.fs.as_ref());
2201 }
2202 }
2203 };
2204
2205 if root_is_dir {
2206 let mut ancestor_inodes = TreeSet::default();
2207 if let Some(root_inode) = root_inode {
2208 ancestor_inodes.insert(root_inode);
2209 }
2210
2211 let (tx, rx) = channel::unbounded();
2212 self.executor
2213 .block(tx.send(ScanJob {
2214 abs_path: root_abs_path.to_path_buf(),
2215 path: Arc::from(Path::new("")),
2216 ignore_stack,
2217 ancestor_inodes,
2218 scan_queue: tx.clone(),
2219 }))
2220 .unwrap();
2221 drop(tx);
2222
2223 // Spawn a worker thread per logical CPU.
2224 self.executor
2225 .scoped(|scope| {
2226 // One the first worker thread, listen for change requests from the worktree.
2227 // For each change request, after refreshing the given paths, report
2228 // a progress update for the snapshot.
2229 scope.spawn(async {
2230 let reporting_timer = self.delay().fuse();
2231 futures::pin_mut!(reporting_timer);
2232
2233 loop {
2234 select_biased! {
2235 job = changed_paths.next().fuse() => {
2236 let Some((abs_paths, barrier)) = job else { break };
2237 self.update_entries_for_paths(abs_paths, None).await;
2238 if self.notify.unbounded_send(ScanState::Initializing {
2239 snapshot: self.snapshot.lock().clone(),
2240 barrier: Some(barrier),
2241 }).is_err() {
2242 break;
2243 }
2244 }
2245
2246 _ = reporting_timer => {
2247 reporting_timer.set(self.delay().fuse());
2248 if self.notify.unbounded_send(ScanState::Initializing {
2249 snapshot: self.snapshot.lock().clone(),
2250 barrier: None,
2251 }).is_err() {
2252 break;
2253 }
2254 }
2255
2256 job = rx.recv().fuse() => {
2257 let Ok(job) = job else { break };
2258 if let Err(err) = self
2259 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2260 .await
2261 {
2262 log::error!("error scanning {:?}: {}", job.abs_path, err);
2263 }
2264 }
2265 }
2266 }
2267 });
2268
2269 // On all of the remaining worker threads, just scan directories.
2270 for _ in 1..self.executor.num_cpus() {
2271 scope.spawn(async {
2272 while let Ok(job) = rx.recv().await {
2273 if let Err(err) = self
2274 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2275 .await
2276 {
2277 log::error!("error scanning {:?}: {}", job.abs_path, err);
2278 }
2279 }
2280 });
2281 }
2282 })
2283 .await;
2284 }
2285
2286 self.snapshot.lock().scan_completed();
2287
2288 if self
2289 .notify
2290 .unbounded_send(ScanState::Initialized {
2291 snapshot: self.snapshot.lock().clone(),
2292 })
2293 .is_err()
2294 {
2295 return;
2296 }
2297
2298 // Process any events that occurred while performing the initial scan. These
2299 // events can't be reported as precisely, because there is no snapshot of the
2300 // worktree before they occurred.
2301 futures::pin_mut!(events_rx);
2302 if let Poll::Ready(Some(mut events)) = futures::poll!(events_rx.next()) {
2303 while let Poll::Ready(Some(additional_events)) = futures::poll!(events_rx.next()) {
2304 events.extend(additional_events);
2305 }
2306 if self.notify.unbounded_send(ScanState::Updating).is_err() {
2307 return;
2308 }
2309 if !self
2310 .process_events(events.into_iter().map(|e| e.path).collect(), true)
2311 .await
2312 {
2313 return;
2314 }
2315 if self
2316 .notify
2317 .unbounded_send(ScanState::Updated {
2318 snapshot: self.snapshot.lock().clone(),
2319 changes: mem::take(&mut self.changes),
2320 barrier: None,
2321 })
2322 .is_err()
2323 {
2324 return;
2325 }
2326 }
2327
2328 // Continue processing events until the worktree is dropped.
2329 loop {
2330 let abs_paths;
2331 let barrier;
2332 select_biased! {
2333 request = changed_paths.next().fuse() => {
2334 let Some((paths, b)) = request else { break };
2335 abs_paths = paths;
2336 barrier = Some(b);
2337 }
2338 events = events_rx.next().fuse() => {
2339 let Some(events) = events else { break };
2340 abs_paths = events.into_iter().map(|e| e.path).collect();
2341 barrier = None;
2342 }
2343 }
2344
2345 if self.notify.unbounded_send(ScanState::Updating).is_err() {
2346 return;
2347 }
2348 if !self.process_events(abs_paths, false).await {
2349 return;
2350 }
2351 if self
2352 .notify
2353 .unbounded_send(ScanState::Updated {
2354 snapshot: self.snapshot.lock().clone(),
2355 changes: mem::take(&mut self.changes),
2356 barrier,
2357 })
2358 .is_err()
2359 {
2360 return;
2361 }
2362 }
2363 }
2364
2365 async fn delay(&self) {
2366 #[cfg(any(test, feature = "test-support"))]
2367 if self.fs.is_fake() {
2368 return self.executor.simulate_random_delay().await;
2369 }
2370 smol::Timer::after(Duration::from_millis(100)).await;
2371 }
2372
2373 async fn scan_dir(
2374 &self,
2375 root_char_bag: CharBag,
2376 next_entry_id: Arc<AtomicUsize>,
2377 job: &ScanJob,
2378 ) -> Result<()> {
2379 let mut new_entries: Vec<Entry> = Vec::new();
2380 let mut new_jobs: Vec<Option<ScanJob>> = Vec::new();
2381 let mut ignore_stack = job.ignore_stack.clone();
2382 let mut new_ignore = None;
2383
2384 let mut child_paths = self.fs.read_dir(&job.abs_path).await?;
2385 while let Some(child_abs_path) = child_paths.next().await {
2386 let child_abs_path = match child_abs_path {
2387 Ok(child_abs_path) => child_abs_path,
2388 Err(error) => {
2389 log::error!("error processing entry {:?}", error);
2390 continue;
2391 }
2392 };
2393
2394 let child_name = child_abs_path.file_name().unwrap();
2395 let child_path: Arc<Path> = job.path.join(child_name).into();
2396 let child_metadata = match self.fs.metadata(&child_abs_path).await {
2397 Ok(Some(metadata)) => metadata,
2398 Ok(None) => continue,
2399 Err(err) => {
2400 log::error!("error processing {:?}: {:?}", child_abs_path, err);
2401 continue;
2402 }
2403 };
2404
2405 // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
2406 if child_name == *GITIGNORE {
2407 match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
2408 Ok(ignore) => {
2409 let ignore = Arc::new(ignore);
2410 ignore_stack =
2411 ignore_stack.append(job.abs_path.as_path().into(), ignore.clone());
2412 new_ignore = Some(ignore);
2413 }
2414 Err(error) => {
2415 log::error!(
2416 "error loading .gitignore file {:?} - {:?}",
2417 child_name,
2418 error
2419 );
2420 }
2421 }
2422
2423 // Update ignore status of any child entries we've already processed to reflect the
2424 // ignore file in the current directory. Because `.gitignore` starts with a `.`,
2425 // there should rarely be too numerous. Update the ignore stack associated with any
2426 // new jobs as well.
2427 let mut new_jobs = new_jobs.iter_mut();
2428 for entry in &mut new_entries {
2429 let entry_abs_path = self.abs_path().join(&entry.path);
2430 entry.is_ignored =
2431 ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir());
2432
2433 if entry.is_dir() {
2434 if let Some(job) = new_jobs.next().expect("Missing scan job for entry") {
2435 job.ignore_stack = if entry.is_ignored {
2436 IgnoreStack::all()
2437 } else {
2438 ignore_stack.clone()
2439 };
2440 }
2441 }
2442 }
2443 }
2444
2445 let mut child_entry = Entry::new(
2446 child_path.clone(),
2447 &child_metadata,
2448 &next_entry_id,
2449 root_char_bag,
2450 );
2451
2452 if child_entry.is_dir() {
2453 let is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
2454 child_entry.is_ignored = is_ignored;
2455
2456 // Avoid recursing until crash in the case of a recursive symlink
2457 if !job.ancestor_inodes.contains(&child_entry.inode) {
2458 let mut ancestor_inodes = job.ancestor_inodes.clone();
2459 ancestor_inodes.insert(child_entry.inode);
2460
2461 new_jobs.push(Some(ScanJob {
2462 abs_path: child_abs_path,
2463 path: child_path,
2464 ignore_stack: if is_ignored {
2465 IgnoreStack::all()
2466 } else {
2467 ignore_stack.clone()
2468 },
2469 ancestor_inodes,
2470 scan_queue: job.scan_queue.clone(),
2471 }));
2472 } else {
2473 new_jobs.push(None);
2474 }
2475 } else {
2476 child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
2477 }
2478
2479 new_entries.push(child_entry);
2480 }
2481
2482 self.snapshot.lock().populate_dir(
2483 job.path.clone(),
2484 new_entries,
2485 new_ignore,
2486 self.fs.as_ref(),
2487 );
2488
2489 for new_job in new_jobs {
2490 if let Some(new_job) = new_job {
2491 job.scan_queue.send(new_job).await.unwrap();
2492 }
2493 }
2494
2495 Ok(())
2496 }
2497
2498 async fn process_events(
2499 &mut self,
2500 abs_paths: Vec<PathBuf>,
2501 received_before_initialized: bool,
2502 ) -> bool {
2503 let (scan_queue_tx, scan_queue_rx) = channel::unbounded();
2504
2505 let prev_snapshot = {
2506 let mut snapshot = self.snapshot.lock();
2507 snapshot.scan_started();
2508 snapshot.clone()
2509 };
2510
2511 let event_paths = if let Some(event_paths) = self
2512 .update_entries_for_paths(abs_paths, Some(scan_queue_tx))
2513 .await
2514 {
2515 event_paths
2516 } else {
2517 return false;
2518 };
2519
2520 // Scan any directories that were created as part of this event batch.
2521 self.executor
2522 .scoped(|scope| {
2523 for _ in 0..self.executor.num_cpus() {
2524 scope.spawn(async {
2525 while let Ok(job) = scan_queue_rx.recv().await {
2526 if let Err(err) = self
2527 .scan_dir(
2528 prev_snapshot.root_char_bag,
2529 prev_snapshot.next_entry_id.clone(),
2530 &job,
2531 )
2532 .await
2533 {
2534 log::error!("error scanning {:?}: {}", job.abs_path, err);
2535 }
2536 }
2537 });
2538 }
2539 })
2540 .await;
2541
2542 // Attempt to detect renames only over a single batch of file-system events.
2543 self.snapshot.lock().removed_entry_ids.clear();
2544
2545 self.update_ignore_statuses().await;
2546 self.update_git_repositories();
2547 self.build_change_set(
2548 prev_snapshot.snapshot,
2549 event_paths,
2550 received_before_initialized,
2551 );
2552 self.snapshot.lock().scan_completed();
2553 true
2554 }
2555
2556 async fn update_entries_for_paths(
2557 &self,
2558 mut abs_paths: Vec<PathBuf>,
2559 scan_queue_tx: Option<Sender<ScanJob>>,
2560 ) -> Option<Vec<Arc<Path>>> {
2561 abs_paths.sort_unstable();
2562 abs_paths.dedup_by(|a, b| a.starts_with(&b));
2563
2564 let root_abs_path = self.snapshot.lock().abs_path.clone();
2565 let root_canonical_path = self.fs.canonicalize(&root_abs_path).await.ok()?;
2566 let metadata = futures::future::join_all(
2567 abs_paths
2568 .iter()
2569 .map(|abs_path| self.fs.metadata(&abs_path))
2570 .collect::<Vec<_>>(),
2571 )
2572 .await;
2573
2574 let mut snapshot = self.snapshot.lock();
2575 if scan_queue_tx.is_some() {
2576 for abs_path in &abs_paths {
2577 if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) {
2578 snapshot.remove_path(path);
2579 }
2580 }
2581 }
2582
2583 let mut event_paths = Vec::with_capacity(abs_paths.len());
2584 for (abs_path, metadata) in abs_paths.into_iter().zip(metadata.into_iter()) {
2585 let path: Arc<Path> = match abs_path.strip_prefix(&root_canonical_path) {
2586 Ok(path) => Arc::from(path.to_path_buf()),
2587 Err(_) => {
2588 log::error!(
2589 "unexpected event {:?} for root path {:?}",
2590 abs_path,
2591 root_canonical_path
2592 );
2593 continue;
2594 }
2595 };
2596 event_paths.push(path.clone());
2597 let abs_path = root_abs_path.join(&path);
2598
2599 match metadata {
2600 Ok(Some(metadata)) => {
2601 let ignore_stack =
2602 snapshot.ignore_stack_for_abs_path(&abs_path, metadata.is_dir);
2603 let mut fs_entry = Entry::new(
2604 path.clone(),
2605 &metadata,
2606 snapshot.next_entry_id.as_ref(),
2607 snapshot.root_char_bag,
2608 );
2609 fs_entry.is_ignored = ignore_stack.is_all();
2610 snapshot.insert_entry(fs_entry, self.fs.as_ref());
2611
2612 let scan_id = snapshot.scan_id;
2613 if let Some(repo) = snapshot.repo_with_dot_git_containing(&path) {
2614 repo.repo.lock().reload_index();
2615 repo.scan_id = scan_id;
2616 }
2617
2618 if let Some(scan_queue_tx) = &scan_queue_tx {
2619 let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
2620 if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
2621 ancestor_inodes.insert(metadata.inode);
2622 self.executor
2623 .block(scan_queue_tx.send(ScanJob {
2624 abs_path,
2625 path,
2626 ignore_stack,
2627 ancestor_inodes,
2628 scan_queue: scan_queue_tx.clone(),
2629 }))
2630 .unwrap();
2631 }
2632 }
2633 }
2634 Ok(None) => {}
2635 Err(err) => {
2636 // TODO - create a special 'error' entry in the entries tree to mark this
2637 log::error!("error reading file on event {:?}", err);
2638 }
2639 }
2640 }
2641
2642 Some(event_paths)
2643 }
2644
2645 async fn update_ignore_statuses(&self) {
2646 let mut snapshot = self.snapshot.lock().clone();
2647 let mut ignores_to_update = Vec::new();
2648 let mut ignores_to_delete = Vec::new();
2649 for (parent_abs_path, (_, scan_id)) in &snapshot.ignores_by_parent_abs_path {
2650 if let Ok(parent_path) = parent_abs_path.strip_prefix(&snapshot.abs_path) {
2651 if *scan_id == snapshot.scan_id && snapshot.entry_for_path(parent_path).is_some() {
2652 ignores_to_update.push(parent_abs_path.clone());
2653 }
2654
2655 let ignore_path = parent_path.join(&*GITIGNORE);
2656 if snapshot.entry_for_path(ignore_path).is_none() {
2657 ignores_to_delete.push(parent_abs_path.clone());
2658 }
2659 }
2660 }
2661
2662 for parent_abs_path in ignores_to_delete {
2663 snapshot.ignores_by_parent_abs_path.remove(&parent_abs_path);
2664 self.snapshot
2665 .lock()
2666 .ignores_by_parent_abs_path
2667 .remove(&parent_abs_path);
2668 }
2669
2670 let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
2671 ignores_to_update.sort_unstable();
2672 let mut ignores_to_update = ignores_to_update.into_iter().peekable();
2673 while let Some(parent_abs_path) = ignores_to_update.next() {
2674 while ignores_to_update
2675 .peek()
2676 .map_or(false, |p| p.starts_with(&parent_abs_path))
2677 {
2678 ignores_to_update.next().unwrap();
2679 }
2680
2681 let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true);
2682 ignore_queue_tx
2683 .send(UpdateIgnoreStatusJob {
2684 abs_path: parent_abs_path,
2685 ignore_stack,
2686 ignore_queue: ignore_queue_tx.clone(),
2687 })
2688 .await
2689 .unwrap();
2690 }
2691 drop(ignore_queue_tx);
2692
2693 self.executor
2694 .scoped(|scope| {
2695 for _ in 0..self.executor.num_cpus() {
2696 scope.spawn(async {
2697 while let Ok(job) = ignore_queue_rx.recv().await {
2698 self.update_ignore_status(job, &snapshot).await;
2699 }
2700 });
2701 }
2702 })
2703 .await;
2704 }
2705
2706 fn update_git_repositories(&self) {
2707 let mut snapshot = self.snapshot.lock();
2708 let mut git_repositories = mem::take(&mut snapshot.git_repositories);
2709 git_repositories.retain(|repo| snapshot.entry_for_path(&repo.git_dir_path).is_some());
2710 snapshot.git_repositories = git_repositories;
2711 }
2712
2713 async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
2714 let mut ignore_stack = job.ignore_stack;
2715 if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) {
2716 ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
2717 }
2718
2719 let mut entries_by_id_edits = Vec::new();
2720 let mut entries_by_path_edits = Vec::new();
2721 let path = job.abs_path.strip_prefix(&snapshot.abs_path).unwrap();
2722 for mut entry in snapshot.child_entries(path).cloned() {
2723 let was_ignored = entry.is_ignored;
2724 let abs_path = self.abs_path().join(&entry.path);
2725 entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
2726 if entry.is_dir() {
2727 let child_ignore_stack = if entry.is_ignored {
2728 IgnoreStack::all()
2729 } else {
2730 ignore_stack.clone()
2731 };
2732 job.ignore_queue
2733 .send(UpdateIgnoreStatusJob {
2734 abs_path: abs_path.into(),
2735 ignore_stack: child_ignore_stack,
2736 ignore_queue: job.ignore_queue.clone(),
2737 })
2738 .await
2739 .unwrap();
2740 }
2741
2742 if entry.is_ignored != was_ignored {
2743 let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
2744 path_entry.scan_id = snapshot.scan_id;
2745 path_entry.is_ignored = entry.is_ignored;
2746 entries_by_id_edits.push(Edit::Insert(path_entry));
2747 entries_by_path_edits.push(Edit::Insert(entry));
2748 }
2749 }
2750
2751 let mut snapshot = self.snapshot.lock();
2752 snapshot.entries_by_path.edit(entries_by_path_edits, &());
2753 snapshot.entries_by_id.edit(entries_by_id_edits, &());
2754 }
2755
2756 fn build_change_set(
2757 &mut self,
2758 old_snapshot: Snapshot,
2759 event_paths: Vec<Arc<Path>>,
2760 received_before_initialized: bool,
2761 ) {
2762 let new_snapshot = self.snapshot.lock();
2763 let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>();
2764 let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>();
2765
2766 use PathChange::{Added, AddedOrUpdated, Removed, Updated};
2767
2768 for path in event_paths {
2769 let path = PathKey(path);
2770 old_paths.seek(&path, Bias::Left, &());
2771 new_paths.seek(&path, Bias::Left, &());
2772
2773 loop {
2774 match (old_paths.item(), new_paths.item()) {
2775 (Some(old_entry), Some(new_entry)) => {
2776 if old_entry.path > path.0
2777 && new_entry.path > path.0
2778 && !old_entry.path.starts_with(&path.0)
2779 && !new_entry.path.starts_with(&path.0)
2780 {
2781 break;
2782 }
2783
2784 match Ord::cmp(&old_entry.path, &new_entry.path) {
2785 Ordering::Less => {
2786 self.changes.insert(old_entry.path.clone(), Removed);
2787 old_paths.next(&());
2788 }
2789 Ordering::Equal => {
2790 if received_before_initialized {
2791 // If the worktree was not fully initialized when this event was generated,
2792 // we can't know whether this entry was added during the scan or whether
2793 // it was merely updated.
2794 self.changes.insert(old_entry.path.clone(), AddedOrUpdated);
2795 } else if old_entry.mtime != new_entry.mtime {
2796 self.changes.insert(old_entry.path.clone(), Updated);
2797 }
2798 old_paths.next(&());
2799 new_paths.next(&());
2800 }
2801 Ordering::Greater => {
2802 self.changes.insert(new_entry.path.clone(), Added);
2803 new_paths.next(&());
2804 }
2805 }
2806 }
2807 (Some(old_entry), None) => {
2808 self.changes.insert(old_entry.path.clone(), Removed);
2809 old_paths.next(&());
2810 }
2811 (None, Some(new_entry)) => {
2812 self.changes.insert(new_entry.path.clone(), Added);
2813 new_paths.next(&());
2814 }
2815 (None, None) => break,
2816 }
2817 }
2818 }
2819 }
2820}
2821
2822fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
2823 let mut result = root_char_bag;
2824 result.extend(
2825 path.to_string_lossy()
2826 .chars()
2827 .map(|c| c.to_ascii_lowercase()),
2828 );
2829 result
2830}
2831
2832struct ScanJob {
2833 abs_path: PathBuf,
2834 path: Arc<Path>,
2835 ignore_stack: Arc<IgnoreStack>,
2836 scan_queue: Sender<ScanJob>,
2837 ancestor_inodes: TreeSet<u64>,
2838}
2839
2840struct UpdateIgnoreStatusJob {
2841 abs_path: Arc<Path>,
2842 ignore_stack: Arc<IgnoreStack>,
2843 ignore_queue: Sender<UpdateIgnoreStatusJob>,
2844}
2845
2846pub trait WorktreeHandle {
2847 #[cfg(any(test, feature = "test-support"))]
2848 fn flush_fs_events<'a>(
2849 &self,
2850 cx: &'a gpui::TestAppContext,
2851 ) -> futures::future::LocalBoxFuture<'a, ()>;
2852}
2853
2854impl WorktreeHandle for ModelHandle<Worktree> {
2855 // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
2856 // occurred before the worktree was constructed. These events can cause the worktree to perfrom
2857 // extra directory scans, and emit extra scan-state notifications.
2858 //
2859 // This function mutates the worktree's directory and waits for those mutations to be picked up,
2860 // to ensure that all redundant FS events have already been processed.
2861 #[cfg(any(test, feature = "test-support"))]
2862 fn flush_fs_events<'a>(
2863 &self,
2864 cx: &'a gpui::TestAppContext,
2865 ) -> futures::future::LocalBoxFuture<'a, ()> {
2866 use smol::future::FutureExt;
2867
2868 let filename = "fs-event-sentinel";
2869 let tree = self.clone();
2870 let (fs, root_path) = self.read_with(cx, |tree, _| {
2871 let tree = tree.as_local().unwrap();
2872 (tree.fs.clone(), tree.abs_path().clone())
2873 });
2874
2875 async move {
2876 fs.create_file(&root_path.join(filename), Default::default())
2877 .await
2878 .unwrap();
2879 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
2880 .await;
2881
2882 fs.remove_file(&root_path.join(filename), Default::default())
2883 .await
2884 .unwrap();
2885 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
2886 .await;
2887
2888 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2889 .await;
2890 }
2891 .boxed_local()
2892 }
2893}
2894
2895#[derive(Clone, Debug)]
2896struct TraversalProgress<'a> {
2897 max_path: &'a Path,
2898 count: usize,
2899 visible_count: usize,
2900 file_count: usize,
2901 visible_file_count: usize,
2902}
2903
2904impl<'a> TraversalProgress<'a> {
2905 fn count(&self, include_dirs: bool, include_ignored: bool) -> usize {
2906 match (include_ignored, include_dirs) {
2907 (true, true) => self.count,
2908 (true, false) => self.file_count,
2909 (false, true) => self.visible_count,
2910 (false, false) => self.visible_file_count,
2911 }
2912 }
2913}
2914
2915impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
2916 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2917 self.max_path = summary.max_path.as_ref();
2918 self.count += summary.count;
2919 self.visible_count += summary.visible_count;
2920 self.file_count += summary.file_count;
2921 self.visible_file_count += summary.visible_file_count;
2922 }
2923}
2924
2925impl<'a> Default for TraversalProgress<'a> {
2926 fn default() -> Self {
2927 Self {
2928 max_path: Path::new(""),
2929 count: 0,
2930 visible_count: 0,
2931 file_count: 0,
2932 visible_file_count: 0,
2933 }
2934 }
2935}
2936
2937pub struct Traversal<'a> {
2938 cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
2939 include_ignored: bool,
2940 include_dirs: bool,
2941}
2942
2943impl<'a> Traversal<'a> {
2944 pub fn advance(&mut self) -> bool {
2945 self.advance_to_offset(self.offset() + 1)
2946 }
2947
2948 pub fn advance_to_offset(&mut self, offset: usize) -> bool {
2949 self.cursor.seek_forward(
2950 &TraversalTarget::Count {
2951 count: offset,
2952 include_dirs: self.include_dirs,
2953 include_ignored: self.include_ignored,
2954 },
2955 Bias::Right,
2956 &(),
2957 )
2958 }
2959
2960 pub fn advance_to_sibling(&mut self) -> bool {
2961 while let Some(entry) = self.cursor.item() {
2962 self.cursor.seek_forward(
2963 &TraversalTarget::PathSuccessor(&entry.path),
2964 Bias::Left,
2965 &(),
2966 );
2967 if let Some(entry) = self.cursor.item() {
2968 if (self.include_dirs || !entry.is_dir())
2969 && (self.include_ignored || !entry.is_ignored)
2970 {
2971 return true;
2972 }
2973 }
2974 }
2975 false
2976 }
2977
2978 pub fn entry(&self) -> Option<&'a Entry> {
2979 self.cursor.item()
2980 }
2981
2982 pub fn offset(&self) -> usize {
2983 self.cursor
2984 .start()
2985 .count(self.include_dirs, self.include_ignored)
2986 }
2987}
2988
2989impl<'a> Iterator for Traversal<'a> {
2990 type Item = &'a Entry;
2991
2992 fn next(&mut self) -> Option<Self::Item> {
2993 if let Some(item) = self.entry() {
2994 self.advance();
2995 Some(item)
2996 } else {
2997 None
2998 }
2999 }
3000}
3001
3002#[derive(Debug)]
3003enum TraversalTarget<'a> {
3004 Path(&'a Path),
3005 PathSuccessor(&'a Path),
3006 Count {
3007 count: usize,
3008 include_ignored: bool,
3009 include_dirs: bool,
3010 },
3011}
3012
3013impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> {
3014 fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
3015 match self {
3016 TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path),
3017 TraversalTarget::PathSuccessor(path) => {
3018 if !cursor_location.max_path.starts_with(path) {
3019 Ordering::Equal
3020 } else {
3021 Ordering::Greater
3022 }
3023 }
3024 TraversalTarget::Count {
3025 count,
3026 include_dirs,
3027 include_ignored,
3028 } => Ord::cmp(
3029 count,
3030 &cursor_location.count(*include_dirs, *include_ignored),
3031 ),
3032 }
3033 }
3034}
3035
3036struct ChildEntriesIter<'a> {
3037 parent_path: &'a Path,
3038 traversal: Traversal<'a>,
3039}
3040
3041impl<'a> Iterator for ChildEntriesIter<'a> {
3042 type Item = &'a Entry;
3043
3044 fn next(&mut self) -> Option<Self::Item> {
3045 if let Some(item) = self.traversal.entry() {
3046 if item.path.starts_with(&self.parent_path) {
3047 self.traversal.advance_to_sibling();
3048 return Some(item);
3049 }
3050 }
3051 None
3052 }
3053}
3054
3055impl<'a> From<&'a Entry> for proto::Entry {
3056 fn from(entry: &'a Entry) -> Self {
3057 Self {
3058 id: entry.id.to_proto(),
3059 is_dir: entry.is_dir(),
3060 path: entry.path.to_string_lossy().into(),
3061 inode: entry.inode,
3062 mtime: Some(entry.mtime.into()),
3063 is_symlink: entry.is_symlink,
3064 is_ignored: entry.is_ignored,
3065 }
3066 }
3067}
3068
3069impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
3070 type Error = anyhow::Error;
3071
3072 fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result<Self> {
3073 if let Some(mtime) = entry.mtime {
3074 let kind = if entry.is_dir {
3075 EntryKind::Dir
3076 } else {
3077 let mut char_bag = *root_char_bag;
3078 char_bag.extend(entry.path.chars().map(|c| c.to_ascii_lowercase()));
3079 EntryKind::File(char_bag)
3080 };
3081 let path: Arc<Path> = PathBuf::from(entry.path).into();
3082 Ok(Entry {
3083 id: ProjectEntryId::from_proto(entry.id),
3084 kind,
3085 path,
3086 inode: entry.inode,
3087 mtime: mtime.into(),
3088 is_symlink: entry.is_symlink,
3089 is_ignored: entry.is_ignored,
3090 })
3091 } else {
3092 Err(anyhow!(
3093 "missing mtime in remote worktree entry {:?}",
3094 entry.path
3095 ))
3096 }
3097 }
3098}
3099
3100#[cfg(test)]
3101mod tests {
3102 use super::*;
3103 use client::test::FakeHttpClient;
3104 use fs::repository::FakeGitRepository;
3105 use fs::{FakeFs, RealFs};
3106 use gpui::{executor::Deterministic, TestAppContext};
3107 use rand::prelude::*;
3108 use serde_json::json;
3109 use std::{env, fmt::Write};
3110 use util::test::temp_tree;
3111
3112 #[gpui::test]
3113 async fn test_traversal(cx: &mut TestAppContext) {
3114 let fs = FakeFs::new(cx.background());
3115 fs.insert_tree(
3116 "/root",
3117 json!({
3118 ".gitignore": "a/b\n",
3119 "a": {
3120 "b": "",
3121 "c": "",
3122 }
3123 }),
3124 )
3125 .await;
3126
3127 let http_client = FakeHttpClient::with_404_response();
3128 let client = cx.read(|cx| Client::new(http_client, cx));
3129
3130 let tree = Worktree::local(
3131 client,
3132 Path::new("/root"),
3133 true,
3134 fs,
3135 Default::default(),
3136 &mut cx.to_async(),
3137 )
3138 .await
3139 .unwrap();
3140 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3141 .await;
3142
3143 tree.read_with(cx, |tree, _| {
3144 assert_eq!(
3145 tree.entries(false)
3146 .map(|entry| entry.path.as_ref())
3147 .collect::<Vec<_>>(),
3148 vec![
3149 Path::new(""),
3150 Path::new(".gitignore"),
3151 Path::new("a"),
3152 Path::new("a/c"),
3153 ]
3154 );
3155 assert_eq!(
3156 tree.entries(true)
3157 .map(|entry| entry.path.as_ref())
3158 .collect::<Vec<_>>(),
3159 vec![
3160 Path::new(""),
3161 Path::new(".gitignore"),
3162 Path::new("a"),
3163 Path::new("a/b"),
3164 Path::new("a/c"),
3165 ]
3166 );
3167 })
3168 }
3169
3170 #[gpui::test(iterations = 10)]
3171 async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
3172 let fs = FakeFs::new(cx.background());
3173 fs.insert_tree(
3174 "/root",
3175 json!({
3176 "lib": {
3177 "a": {
3178 "a.txt": ""
3179 },
3180 "b": {
3181 "b.txt": ""
3182 }
3183 }
3184 }),
3185 )
3186 .await;
3187 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
3188 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
3189
3190 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3191 let tree = Worktree::local(
3192 client,
3193 Path::new("/root"),
3194 true,
3195 fs.clone(),
3196 Default::default(),
3197 &mut cx.to_async(),
3198 )
3199 .await
3200 .unwrap();
3201
3202 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3203 .await;
3204
3205 tree.read_with(cx, |tree, _| {
3206 assert_eq!(
3207 tree.entries(false)
3208 .map(|entry| entry.path.as_ref())
3209 .collect::<Vec<_>>(),
3210 vec![
3211 Path::new(""),
3212 Path::new("lib"),
3213 Path::new("lib/a"),
3214 Path::new("lib/a/a.txt"),
3215 Path::new("lib/a/lib"),
3216 Path::new("lib/b"),
3217 Path::new("lib/b/b.txt"),
3218 Path::new("lib/b/lib"),
3219 ]
3220 );
3221 });
3222
3223 fs.rename(
3224 Path::new("/root/lib/a/lib"),
3225 Path::new("/root/lib/a/lib-2"),
3226 Default::default(),
3227 )
3228 .await
3229 .unwrap();
3230 executor.run_until_parked();
3231 tree.read_with(cx, |tree, _| {
3232 assert_eq!(
3233 tree.entries(false)
3234 .map(|entry| entry.path.as_ref())
3235 .collect::<Vec<_>>(),
3236 vec![
3237 Path::new(""),
3238 Path::new("lib"),
3239 Path::new("lib/a"),
3240 Path::new("lib/a/a.txt"),
3241 Path::new("lib/a/lib-2"),
3242 Path::new("lib/b"),
3243 Path::new("lib/b/b.txt"),
3244 Path::new("lib/b/lib"),
3245 ]
3246 );
3247 });
3248 }
3249
3250 #[gpui::test]
3251 async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
3252 let parent_dir = temp_tree(json!({
3253 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
3254 "tree": {
3255 ".git": {},
3256 ".gitignore": "ignored-dir\n",
3257 "tracked-dir": {
3258 "tracked-file1": "",
3259 "ancestor-ignored-file1": "",
3260 },
3261 "ignored-dir": {
3262 "ignored-file1": ""
3263 }
3264 }
3265 }));
3266 let dir = parent_dir.path().join("tree");
3267
3268 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3269
3270 let tree = Worktree::local(
3271 client,
3272 dir.as_path(),
3273 true,
3274 Arc::new(RealFs),
3275 Default::default(),
3276 &mut cx.to_async(),
3277 )
3278 .await
3279 .unwrap();
3280 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3281 .await;
3282 tree.flush_fs_events(cx).await;
3283 cx.read(|cx| {
3284 let tree = tree.read(cx);
3285 assert!(
3286 !tree
3287 .entry_for_path("tracked-dir/tracked-file1")
3288 .unwrap()
3289 .is_ignored
3290 );
3291 assert!(
3292 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
3293 .unwrap()
3294 .is_ignored
3295 );
3296 assert!(
3297 tree.entry_for_path("ignored-dir/ignored-file1")
3298 .unwrap()
3299 .is_ignored
3300 );
3301 });
3302
3303 std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
3304 std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
3305 std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
3306 tree.flush_fs_events(cx).await;
3307 cx.read(|cx| {
3308 let tree = tree.read(cx);
3309 assert!(
3310 !tree
3311 .entry_for_path("tracked-dir/tracked-file2")
3312 .unwrap()
3313 .is_ignored
3314 );
3315 assert!(
3316 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
3317 .unwrap()
3318 .is_ignored
3319 );
3320 assert!(
3321 tree.entry_for_path("ignored-dir/ignored-file2")
3322 .unwrap()
3323 .is_ignored
3324 );
3325 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
3326 });
3327 }
3328
3329 #[gpui::test]
3330 async fn test_git_repository_for_path(cx: &mut TestAppContext) {
3331 let root = temp_tree(json!({
3332 "dir1": {
3333 ".git": {},
3334 "deps": {
3335 "dep1": {
3336 ".git": {},
3337 "src": {
3338 "a.txt": ""
3339 }
3340 }
3341 },
3342 "src": {
3343 "b.txt": ""
3344 }
3345 },
3346 "c.txt": "",
3347 }));
3348
3349 let http_client = FakeHttpClient::with_404_response();
3350 let client = cx.read(|cx| Client::new(http_client, cx));
3351 let tree = Worktree::local(
3352 client,
3353 root.path(),
3354 true,
3355 Arc::new(RealFs),
3356 Default::default(),
3357 &mut cx.to_async(),
3358 )
3359 .await
3360 .unwrap();
3361
3362 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3363 .await;
3364 tree.flush_fs_events(cx).await;
3365
3366 tree.read_with(cx, |tree, _cx| {
3367 let tree = tree.as_local().unwrap();
3368
3369 assert!(tree.repo_for("c.txt".as_ref()).is_none());
3370
3371 let repo = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap();
3372 assert_eq!(repo.content_path.as_ref(), Path::new("dir1"));
3373 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git"));
3374
3375 let repo = tree.repo_for("dir1/deps/dep1/src/a.txt".as_ref()).unwrap();
3376 assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1"));
3377 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/deps/dep1/.git"),);
3378 });
3379
3380 let original_scan_id = tree.read_with(cx, |tree, _cx| {
3381 let tree = tree.as_local().unwrap();
3382 tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id
3383 });
3384
3385 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
3386 tree.flush_fs_events(cx).await;
3387
3388 tree.read_with(cx, |tree, _cx| {
3389 let tree = tree.as_local().unwrap();
3390 let new_scan_id = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id;
3391 assert_ne!(
3392 original_scan_id, new_scan_id,
3393 "original {original_scan_id}, new {new_scan_id}"
3394 );
3395 });
3396
3397 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
3398 tree.flush_fs_events(cx).await;
3399
3400 tree.read_with(cx, |tree, _cx| {
3401 let tree = tree.as_local().unwrap();
3402
3403 assert!(tree.repo_for("dir1/src/b.txt".as_ref()).is_none());
3404 });
3405 }
3406
3407 #[test]
3408 fn test_changed_repos() {
3409 fn fake_entry(git_dir_path: impl AsRef<Path>, scan_id: usize) -> GitRepositoryEntry {
3410 GitRepositoryEntry {
3411 repo: Arc::new(Mutex::new(FakeGitRepository::default())),
3412 scan_id,
3413 content_path: git_dir_path.as_ref().parent().unwrap().into(),
3414 git_dir_path: git_dir_path.as_ref().into(),
3415 }
3416 }
3417
3418 let prev_repos: Vec<GitRepositoryEntry> = vec![
3419 fake_entry("/.git", 0),
3420 fake_entry("/a/.git", 0),
3421 fake_entry("/a/b/.git", 0),
3422 ];
3423
3424 let new_repos: Vec<GitRepositoryEntry> = vec![
3425 fake_entry("/a/.git", 1),
3426 fake_entry("/a/b/.git", 0),
3427 fake_entry("/a/c/.git", 0),
3428 ];
3429
3430 let res = LocalWorktree::changed_repos(&prev_repos, &new_repos);
3431
3432 // Deletion retained
3433 assert!(res
3434 .iter()
3435 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/.git") && repo.scan_id == 0)
3436 .is_some());
3437
3438 // Update retained
3439 assert!(res
3440 .iter()
3441 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/.git") && repo.scan_id == 1)
3442 .is_some());
3443
3444 // Addition retained
3445 assert!(res
3446 .iter()
3447 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/c/.git") && repo.scan_id == 0)
3448 .is_some());
3449
3450 // Nochange, not retained
3451 assert!(res
3452 .iter()
3453 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/b/.git") && repo.scan_id == 0)
3454 .is_none());
3455 }
3456
3457 #[gpui::test]
3458 async fn test_write_file(cx: &mut TestAppContext) {
3459 let dir = temp_tree(json!({
3460 ".git": {},
3461 ".gitignore": "ignored-dir\n",
3462 "tracked-dir": {},
3463 "ignored-dir": {}
3464 }));
3465
3466 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3467
3468 let tree = Worktree::local(
3469 client,
3470 dir.path(),
3471 true,
3472 Arc::new(RealFs),
3473 Default::default(),
3474 &mut cx.to_async(),
3475 )
3476 .await
3477 .unwrap();
3478 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3479 .await;
3480 tree.flush_fs_events(cx).await;
3481
3482 tree.update(cx, |tree, cx| {
3483 tree.as_local().unwrap().write_file(
3484 Path::new("tracked-dir/file.txt"),
3485 "hello".into(),
3486 Default::default(),
3487 cx,
3488 )
3489 })
3490 .await
3491 .unwrap();
3492 tree.update(cx, |tree, cx| {
3493 tree.as_local().unwrap().write_file(
3494 Path::new("ignored-dir/file.txt"),
3495 "world".into(),
3496 Default::default(),
3497 cx,
3498 )
3499 })
3500 .await
3501 .unwrap();
3502
3503 tree.read_with(cx, |tree, _| {
3504 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
3505 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
3506 assert!(!tracked.is_ignored);
3507 assert!(ignored.is_ignored);
3508 });
3509 }
3510
3511 #[gpui::test(iterations = 30)]
3512 async fn test_create_directory(cx: &mut TestAppContext) {
3513 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3514
3515 let fs = FakeFs::new(cx.background());
3516 fs.insert_tree(
3517 "/a",
3518 json!({
3519 "b": {},
3520 "c": {},
3521 "d": {},
3522 }),
3523 )
3524 .await;
3525
3526 let tree = Worktree::local(
3527 client,
3528 "/a".as_ref(),
3529 true,
3530 fs,
3531 Default::default(),
3532 &mut cx.to_async(),
3533 )
3534 .await
3535 .unwrap();
3536
3537 let entry = tree
3538 .update(cx, |tree, cx| {
3539 tree.as_local_mut()
3540 .unwrap()
3541 .create_entry("a/e".as_ref(), true, cx)
3542 })
3543 .await
3544 .unwrap();
3545 assert!(entry.is_dir());
3546
3547 cx.foreground().run_until_parked();
3548 tree.read_with(cx, |tree, _| {
3549 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
3550 });
3551 }
3552
3553 #[gpui::test(iterations = 100)]
3554 async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
3555 let operations = env::var("OPERATIONS")
3556 .map(|o| o.parse().unwrap())
3557 .unwrap_or(40);
3558 let initial_entries = env::var("INITIAL_ENTRIES")
3559 .map(|o| o.parse().unwrap())
3560 .unwrap_or(20);
3561
3562 let root_dir = Path::new("/test");
3563 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
3564 fs.as_fake().insert_tree(root_dir, json!({})).await;
3565 for _ in 0..initial_entries {
3566 randomly_mutate_tree(&fs, root_dir, 1.0, &mut rng).await;
3567 }
3568 log::info!("generated initial tree");
3569
3570 let next_entry_id = Arc::new(AtomicUsize::default());
3571 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3572 let worktree = Worktree::local(
3573 client.clone(),
3574 root_dir,
3575 true,
3576 fs.clone(),
3577 next_entry_id.clone(),
3578 &mut cx.to_async(),
3579 )
3580 .await
3581 .unwrap();
3582
3583 worktree
3584 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
3585 .await;
3586
3587 // After the initial scan is complete, the `UpdatedEntries` event can
3588 // be used to follow along with all changes to the worktree's snapshot.
3589 worktree.update(cx, |tree, cx| {
3590 let mut paths = tree
3591 .as_local()
3592 .unwrap()
3593 .paths()
3594 .cloned()
3595 .collect::<Vec<_>>();
3596
3597 cx.subscribe(&worktree, move |tree, _, event, _| {
3598 if let Event::UpdatedEntries(changes) = event {
3599 for (path, change_type) in changes.iter() {
3600 let path = path.clone();
3601 let ix = match paths.binary_search(&path) {
3602 Ok(ix) | Err(ix) => ix,
3603 };
3604 match change_type {
3605 PathChange::Added => {
3606 assert_ne!(paths.get(ix), Some(&path));
3607 paths.insert(ix, path);
3608 }
3609 PathChange::Removed => {
3610 assert_eq!(paths.get(ix), Some(&path));
3611 paths.remove(ix);
3612 }
3613 PathChange::Updated => {
3614 assert_eq!(paths.get(ix), Some(&path));
3615 }
3616 PathChange::AddedOrUpdated => {
3617 if paths[ix] != path {
3618 paths.insert(ix, path);
3619 }
3620 }
3621 }
3622 }
3623 let new_paths = tree.paths().cloned().collect::<Vec<_>>();
3624 assert_eq!(paths, new_paths, "incorrect changes: {:?}", changes);
3625 }
3626 })
3627 .detach();
3628 });
3629
3630 let mut snapshots = Vec::new();
3631 let mut mutations_len = operations;
3632 while mutations_len > 1 {
3633 randomly_mutate_tree(&fs, root_dir, 1.0, &mut rng).await;
3634 let buffered_event_count = fs.as_fake().buffered_event_count().await;
3635 if buffered_event_count > 0 && rng.gen_bool(0.3) {
3636 let len = rng.gen_range(0..=buffered_event_count);
3637 log::info!("flushing {} events", len);
3638 fs.as_fake().flush_events(len).await;
3639 } else {
3640 randomly_mutate_tree(&fs, root_dir, 0.6, &mut rng).await;
3641 mutations_len -= 1;
3642 }
3643
3644 cx.foreground().run_until_parked();
3645 if rng.gen_bool(0.2) {
3646 log::info!("storing snapshot {}", snapshots.len());
3647 let snapshot =
3648 worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
3649 snapshots.push(snapshot);
3650 }
3651 }
3652
3653 log::info!("quiescing");
3654 fs.as_fake().flush_events(usize::MAX).await;
3655 cx.foreground().run_until_parked();
3656 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
3657 snapshot.check_invariants();
3658
3659 {
3660 let new_worktree = Worktree::local(
3661 client.clone(),
3662 root_dir,
3663 true,
3664 fs.clone(),
3665 next_entry_id,
3666 &mut cx.to_async(),
3667 )
3668 .await
3669 .unwrap();
3670 new_worktree
3671 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
3672 .await;
3673 let new_snapshot =
3674 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
3675 assert_eq!(snapshot.to_vec(true), new_snapshot.to_vec(true));
3676 }
3677
3678 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate() {
3679 let include_ignored = rng.gen::<bool>();
3680 if !include_ignored {
3681 let mut entries_by_path_edits = Vec::new();
3682 let mut entries_by_id_edits = Vec::new();
3683 for entry in prev_snapshot
3684 .entries_by_id
3685 .cursor::<()>()
3686 .filter(|e| e.is_ignored)
3687 {
3688 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
3689 entries_by_id_edits.push(Edit::Remove(entry.id));
3690 }
3691
3692 prev_snapshot
3693 .entries_by_path
3694 .edit(entries_by_path_edits, &());
3695 prev_snapshot.entries_by_id.edit(entries_by_id_edits, &());
3696 }
3697
3698 let update = snapshot.build_update(&prev_snapshot, 0, 0, include_ignored);
3699 prev_snapshot.apply_remote_update(update.clone()).unwrap();
3700 assert_eq!(
3701 prev_snapshot.to_vec(include_ignored),
3702 snapshot.to_vec(include_ignored),
3703 "wrong update for snapshot {i}. update: {:?}",
3704 update
3705 );
3706 }
3707 }
3708
3709 async fn randomly_mutate_tree(
3710 fs: &Arc<dyn Fs>,
3711 root_path: &Path,
3712 insertion_probability: f64,
3713 rng: &mut impl Rng,
3714 ) {
3715 let mut files = Vec::new();
3716 let mut dirs = Vec::new();
3717 for path in fs.as_fake().paths().await {
3718 if path.starts_with(root_path) {
3719 if fs.is_file(&path).await {
3720 files.push(path);
3721 } else {
3722 dirs.push(path);
3723 }
3724 }
3725 }
3726
3727 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
3728 let path = dirs.choose(rng).unwrap();
3729 let new_path = path.join(gen_name(rng));
3730
3731 if rng.gen() {
3732 log::info!(
3733 "creating dir {:?}",
3734 new_path.strip_prefix(root_path).unwrap()
3735 );
3736 fs.create_dir(&new_path).await.unwrap();
3737 } else {
3738 log::info!(
3739 "creating file {:?}",
3740 new_path.strip_prefix(root_path).unwrap()
3741 );
3742 fs.create_file(&new_path, Default::default()).await.unwrap();
3743 }
3744 } else if rng.gen_bool(0.05) {
3745 let ignore_dir_path = dirs.choose(rng).unwrap();
3746 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
3747
3748 let subdirs = dirs
3749 .iter()
3750 .filter(|d| d.starts_with(&ignore_dir_path))
3751 .cloned()
3752 .collect::<Vec<_>>();
3753 let subfiles = files
3754 .iter()
3755 .filter(|d| d.starts_with(&ignore_dir_path))
3756 .cloned()
3757 .collect::<Vec<_>>();
3758 let files_to_ignore = {
3759 let len = rng.gen_range(0..=subfiles.len());
3760 subfiles.choose_multiple(rng, len)
3761 };
3762 let dirs_to_ignore = {
3763 let len = rng.gen_range(0..subdirs.len());
3764 subdirs.choose_multiple(rng, len)
3765 };
3766
3767 let mut ignore_contents = String::new();
3768 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
3769 writeln!(
3770 ignore_contents,
3771 "{}",
3772 path_to_ignore
3773 .strip_prefix(&ignore_dir_path)
3774 .unwrap()
3775 .to_str()
3776 .unwrap()
3777 )
3778 .unwrap();
3779 }
3780 log::info!(
3781 "creating gitignore {:?} with contents:\n{}",
3782 ignore_path.strip_prefix(&root_path).unwrap(),
3783 ignore_contents
3784 );
3785 fs.save(
3786 &ignore_path,
3787 &ignore_contents.as_str().into(),
3788 Default::default(),
3789 )
3790 .await
3791 .unwrap();
3792 } else {
3793 let old_path = {
3794 let file_path = files.choose(rng);
3795 let dir_path = dirs[1..].choose(rng);
3796 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
3797 };
3798
3799 let is_rename = rng.gen();
3800 if is_rename {
3801 let new_path_parent = dirs
3802 .iter()
3803 .filter(|d| !d.starts_with(old_path))
3804 .choose(rng)
3805 .unwrap();
3806
3807 let overwrite_existing_dir =
3808 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
3809 let new_path = if overwrite_existing_dir {
3810 fs.remove_dir(
3811 &new_path_parent,
3812 RemoveOptions {
3813 recursive: true,
3814 ignore_if_not_exists: true,
3815 },
3816 )
3817 .await
3818 .unwrap();
3819 new_path_parent.to_path_buf()
3820 } else {
3821 new_path_parent.join(gen_name(rng))
3822 };
3823
3824 log::info!(
3825 "renaming {:?} to {}{:?}",
3826 old_path.strip_prefix(&root_path).unwrap(),
3827 if overwrite_existing_dir {
3828 "overwrite "
3829 } else {
3830 ""
3831 },
3832 new_path.strip_prefix(&root_path).unwrap()
3833 );
3834 fs.rename(
3835 &old_path,
3836 &new_path,
3837 fs::RenameOptions {
3838 overwrite: true,
3839 ignore_if_exists: true,
3840 },
3841 )
3842 .await
3843 .unwrap();
3844 } else if fs.is_file(&old_path).await {
3845 log::info!(
3846 "deleting file {:?}",
3847 old_path.strip_prefix(&root_path).unwrap()
3848 );
3849 fs.remove_file(old_path, Default::default()).await.unwrap();
3850 } else {
3851 log::info!(
3852 "deleting dir {:?}",
3853 old_path.strip_prefix(&root_path).unwrap()
3854 );
3855 fs.remove_dir(
3856 &old_path,
3857 RemoveOptions {
3858 recursive: true,
3859 ignore_if_not_exists: true,
3860 },
3861 )
3862 .await
3863 .unwrap();
3864 }
3865 }
3866 }
3867
3868 fn gen_name(rng: &mut impl Rng) -> String {
3869 (0..6)
3870 .map(|_| rng.sample(rand::distributions::Alphanumeric))
3871 .map(char::from)
3872 .collect()
3873 }
3874
3875 impl LocalSnapshot {
3876 fn check_invariants(&self) {
3877 let mut files = self.files(true, 0);
3878 let mut visible_files = self.files(false, 0);
3879 for entry in self.entries_by_path.cursor::<()>() {
3880 if entry.is_file() {
3881 assert_eq!(files.next().unwrap().inode, entry.inode);
3882 if !entry.is_ignored {
3883 assert_eq!(visible_files.next().unwrap().inode, entry.inode);
3884 }
3885 }
3886 }
3887 assert!(files.next().is_none());
3888 assert!(visible_files.next().is_none());
3889
3890 let mut bfs_paths = Vec::new();
3891 let mut stack = vec![Path::new("")];
3892 while let Some(path) = stack.pop() {
3893 bfs_paths.push(path);
3894 let ix = stack.len();
3895 for child_entry in self.child_entries(path) {
3896 stack.insert(ix, &child_entry.path);
3897 }
3898 }
3899
3900 let dfs_paths_via_iter = self
3901 .entries_by_path
3902 .cursor::<()>()
3903 .map(|e| e.path.as_ref())
3904 .collect::<Vec<_>>();
3905 assert_eq!(bfs_paths, dfs_paths_via_iter);
3906
3907 let dfs_paths_via_traversal = self
3908 .entries(true)
3909 .map(|e| e.path.as_ref())
3910 .collect::<Vec<_>>();
3911 assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
3912
3913 for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
3914 let ignore_parent_path =
3915 ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
3916 assert!(self.entry_for_path(&ignore_parent_path).is_some());
3917 assert!(self
3918 .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
3919 .is_some());
3920 }
3921 }
3922
3923 fn to_vec(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
3924 let mut paths = Vec::new();
3925 for entry in self.entries_by_path.cursor::<()>() {
3926 if include_ignored || !entry.is_ignored {
3927 paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
3928 }
3929 }
3930 paths.sort_by(|a, b| a.0.cmp(b.0));
3931 paths
3932 }
3933 }
3934}