1use super::{ignore::IgnoreStack, DiagnosticSummary};
2use crate::{copy_recursive, ProjectEntryId, RemoveOptions};
3use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
4use anyhow::{anyhow, Context, Result};
5use client::{proto, Client};
6use clock::ReplicaId;
7use collections::{HashMap, VecDeque};
8use fs::LineEnding;
9use fs::{repository::GitRepository, Fs};
10use futures::{
11 channel::{
12 mpsc::{self, UnboundedSender},
13 oneshot,
14 },
15 Stream, StreamExt,
16};
17use fuzzy::CharBag;
18use git::{DOT_GIT, GITIGNORE};
19use gpui::{
20 executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
21 Task,
22};
23use language::File as _;
24use language::{
25 proto::{
26 deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
27 serialize_version,
28 },
29 Buffer, DiagnosticEntry, PointUtf16, Rope, RopeFingerprint, Unclipped,
30};
31use parking_lot::Mutex;
32use postage::{
33 prelude::{Sink as _, Stream as _},
34 watch,
35};
36use smol::channel::{self, Sender};
37use std::{
38 any::Any,
39 cmp::{self, Ordering},
40 convert::TryFrom,
41 ffi::OsStr,
42 fmt,
43 future::Future,
44 mem,
45 ops::{Deref, DerefMut},
46 path::{Path, PathBuf},
47 sync::{atomic::AtomicUsize, Arc},
48 task::Poll,
49 time::{Duration, SystemTime},
50};
51use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
52use util::paths::HOME;
53use util::{ResultExt, TryFutureExt};
54
55#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
56pub struct WorktreeId(usize);
57
58#[allow(clippy::large_enum_variant)]
59pub enum Worktree {
60 Local(LocalWorktree),
61 Remote(RemoteWorktree),
62}
63
64pub struct LocalWorktree {
65 snapshot: LocalSnapshot,
66 background_snapshot: Arc<Mutex<LocalSnapshot>>,
67 is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
68 _background_scanner_task: Task<()>,
69 share: Option<ShareState>,
70 diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<Unclipped<PointUtf16>>>>,
71 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
72 client: Arc<Client>,
73 fs: Arc<dyn Fs>,
74 visible: bool,
75}
76
77pub struct RemoteWorktree {
78 pub snapshot: Snapshot,
79 pub(crate) background_snapshot: Arc<Mutex<Snapshot>>,
80 project_id: u64,
81 client: Arc<Client>,
82 updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
83 snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
84 replica_id: ReplicaId,
85 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
86 visible: bool,
87 disconnected: bool,
88}
89
90#[derive(Clone)]
91pub struct Snapshot {
92 id: WorktreeId,
93 abs_path: Arc<Path>,
94 root_name: String,
95 root_char_bag: CharBag,
96 entries_by_path: SumTree<Entry>,
97 entries_by_id: SumTree<PathEntry>,
98 scan_id: usize,
99 completed_scan_id: usize,
100}
101
102#[derive(Clone)]
103pub struct GitRepositoryEntry {
104 pub(crate) repo: Arc<Mutex<dyn GitRepository>>,
105
106 pub(crate) scan_id: usize,
107 // Path to folder containing the .git file or directory
108 pub(crate) content_path: Arc<Path>,
109 // Path to the actual .git folder.
110 // Note: if .git is a file, this points to the folder indicated by the .git file
111 pub(crate) git_dir_path: Arc<Path>,
112}
113
114impl std::fmt::Debug for GitRepositoryEntry {
115 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
116 f.debug_struct("GitRepositoryEntry")
117 .field("content_path", &self.content_path)
118 .field("git_dir_path", &self.git_dir_path)
119 .field("libgit_repository", &"LibGitRepository")
120 .finish()
121 }
122}
123
124#[derive(Debug)]
125pub struct LocalSnapshot {
126 ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
127 git_repositories: Vec<GitRepositoryEntry>,
128 removed_entry_ids: HashMap<u64, ProjectEntryId>,
129 next_entry_id: Arc<AtomicUsize>,
130 snapshot: Snapshot,
131}
132
133impl Clone for LocalSnapshot {
134 fn clone(&self) -> Self {
135 Self {
136 ignores_by_parent_abs_path: self.ignores_by_parent_abs_path.clone(),
137 git_repositories: self.git_repositories.iter().cloned().collect(),
138 removed_entry_ids: self.removed_entry_ids.clone(),
139 next_entry_id: self.next_entry_id.clone(),
140 snapshot: self.snapshot.clone(),
141 }
142 }
143}
144
145impl Deref for LocalSnapshot {
146 type Target = Snapshot;
147
148 fn deref(&self) -> &Self::Target {
149 &self.snapshot
150 }
151}
152
153impl DerefMut for LocalSnapshot {
154 fn deref_mut(&mut self) -> &mut Self::Target {
155 &mut self.snapshot
156 }
157}
158
159#[derive(Clone, Debug)]
160enum ScanState {
161 /// The worktree is performing its initial scan of the filesystem.
162 Initializing(LocalSnapshot),
163 Initialized(LocalSnapshot),
164 /// The worktree is updating in response to filesystem events.
165 Updating,
166 Updated(LocalSnapshot, HashMap<Arc<Path>, PathChange>),
167 Err(Arc<anyhow::Error>),
168}
169
170struct ShareState {
171 project_id: u64,
172 snapshots_tx: watch::Sender<LocalSnapshot>,
173 resume_updates: watch::Sender<()>,
174 _maintain_remote_snapshot: Task<Option<()>>,
175}
176
177pub enum Event {
178 UpdatedEntries(HashMap<Arc<Path>, PathChange>),
179 UpdatedGitRepositories(Vec<GitRepositoryEntry>),
180}
181
182impl Entity for Worktree {
183 type Event = Event;
184}
185
186impl Worktree {
187 pub async fn local(
188 client: Arc<Client>,
189 path: impl Into<Arc<Path>>,
190 visible: bool,
191 fs: Arc<dyn Fs>,
192 next_entry_id: Arc<AtomicUsize>,
193 cx: &mut AsyncAppContext,
194 ) -> Result<ModelHandle<Self>> {
195 // After determining whether the root entry is a file or a directory, populate the
196 // snapshot's "root name", which will be used for the purpose of fuzzy matching.
197 let abs_path = path.into();
198 let metadata = fs
199 .metadata(&abs_path)
200 .await
201 .context("failed to stat worktree path")?;
202
203 Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| {
204 let root_name = abs_path
205 .file_name()
206 .map_or(String::new(), |f| f.to_string_lossy().to_string());
207
208 let mut snapshot = LocalSnapshot {
209 ignores_by_parent_abs_path: Default::default(),
210 git_repositories: Default::default(),
211 removed_entry_ids: Default::default(),
212 next_entry_id,
213 snapshot: Snapshot {
214 id: WorktreeId::from_usize(cx.model_id()),
215 abs_path: abs_path.clone(),
216 root_name: root_name.clone(),
217 root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
218 entries_by_path: Default::default(),
219 entries_by_id: Default::default(),
220 scan_id: 0,
221 completed_scan_id: 0,
222 },
223 };
224
225 if let Some(metadata) = metadata {
226 snapshot.insert_entry(
227 Entry::new(
228 Arc::from(Path::new("")),
229 &metadata,
230 &snapshot.next_entry_id,
231 snapshot.root_char_bag,
232 ),
233 fs.as_ref(),
234 );
235 }
236
237 let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
238 let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
239
240 cx.spawn_weak(|this, mut cx| async move {
241 while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
242 this.update(&mut cx, |this, cx| {
243 this.as_local_mut()
244 .unwrap()
245 .background_scanner_updated(state, cx);
246 });
247 }
248 })
249 .detach();
250
251 let background_scanner_task = cx.background().spawn({
252 let fs = fs.clone();
253 let background_snapshot = background_snapshot.clone();
254 let background = cx.background().clone();
255 async move {
256 let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
257 BackgroundScanner::new(background_snapshot, scan_states_tx, fs, background)
258 .run(events)
259 .await;
260 }
261 });
262
263 Worktree::Local(LocalWorktree {
264 snapshot,
265 background_snapshot,
266 is_scanning: watch::channel_with(true),
267 share: None,
268 _background_scanner_task: background_scanner_task,
269 diagnostics: Default::default(),
270 diagnostic_summaries: Default::default(),
271 client,
272 fs,
273 visible,
274 })
275 }))
276 }
277
278 pub fn remote(
279 project_remote_id: u64,
280 replica_id: ReplicaId,
281 worktree: proto::WorktreeMetadata,
282 client: Arc<Client>,
283 cx: &mut MutableAppContext,
284 ) -> ModelHandle<Self> {
285 cx.add_model(|cx: &mut ModelContext<Self>| {
286 let snapshot = Snapshot {
287 id: WorktreeId(worktree.id as usize),
288 abs_path: Arc::from(PathBuf::from(worktree.abs_path)),
289 root_name: worktree.root_name.clone(),
290 root_char_bag: worktree
291 .root_name
292 .chars()
293 .map(|c| c.to_ascii_lowercase())
294 .collect(),
295 entries_by_path: Default::default(),
296 entries_by_id: Default::default(),
297 scan_id: 0,
298 completed_scan_id: 0,
299 };
300
301 let (updates_tx, mut updates_rx) = mpsc::unbounded();
302 let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
303 let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
304
305 cx.background()
306 .spawn({
307 let background_snapshot = background_snapshot.clone();
308 async move {
309 while let Some(update) = updates_rx.next().await {
310 if let Err(error) =
311 background_snapshot.lock().apply_remote_update(update)
312 {
313 log::error!("error applying worktree update: {}", error);
314 }
315 snapshot_updated_tx.send(()).await.ok();
316 }
317 }
318 })
319 .detach();
320
321 cx.spawn_weak(|this, mut cx| async move {
322 while (snapshot_updated_rx.recv().await).is_some() {
323 if let Some(this) = this.upgrade(&cx) {
324 this.update(&mut cx, |this, cx| {
325 let this = this.as_remote_mut().unwrap();
326 this.snapshot = this.background_snapshot.lock().clone();
327 cx.emit(Event::UpdatedEntries(Default::default()));
328 cx.notify();
329 while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
330 if this.observed_snapshot(*scan_id) {
331 let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
332 let _ = tx.send(());
333 } else {
334 break;
335 }
336 }
337 });
338 } else {
339 break;
340 }
341 }
342 })
343 .detach();
344
345 Worktree::Remote(RemoteWorktree {
346 project_id: project_remote_id,
347 replica_id,
348 snapshot: snapshot.clone(),
349 background_snapshot,
350 updates_tx: Some(updates_tx),
351 snapshot_subscriptions: Default::default(),
352 client: client.clone(),
353 diagnostic_summaries: Default::default(),
354 visible: worktree.visible,
355 disconnected: false,
356 })
357 })
358 }
359
360 pub fn as_local(&self) -> Option<&LocalWorktree> {
361 if let Worktree::Local(worktree) = self {
362 Some(worktree)
363 } else {
364 None
365 }
366 }
367
368 pub fn as_remote(&self) -> Option<&RemoteWorktree> {
369 if let Worktree::Remote(worktree) = self {
370 Some(worktree)
371 } else {
372 None
373 }
374 }
375
376 pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
377 if let Worktree::Local(worktree) = self {
378 Some(worktree)
379 } else {
380 None
381 }
382 }
383
384 pub fn as_remote_mut(&mut self) -> Option<&mut RemoteWorktree> {
385 if let Worktree::Remote(worktree) = self {
386 Some(worktree)
387 } else {
388 None
389 }
390 }
391
392 pub fn is_local(&self) -> bool {
393 matches!(self, Worktree::Local(_))
394 }
395
396 pub fn is_remote(&self) -> bool {
397 !self.is_local()
398 }
399
400 pub fn snapshot(&self) -> Snapshot {
401 match self {
402 Worktree::Local(worktree) => worktree.snapshot().snapshot,
403 Worktree::Remote(worktree) => worktree.snapshot(),
404 }
405 }
406
407 pub fn scan_id(&self) -> usize {
408 match self {
409 Worktree::Local(worktree) => worktree.snapshot.scan_id,
410 Worktree::Remote(worktree) => worktree.snapshot.scan_id,
411 }
412 }
413
414 pub fn completed_scan_id(&self) -> usize {
415 match self {
416 Worktree::Local(worktree) => worktree.snapshot.completed_scan_id,
417 Worktree::Remote(worktree) => worktree.snapshot.completed_scan_id,
418 }
419 }
420
421 pub fn is_visible(&self) -> bool {
422 match self {
423 Worktree::Local(worktree) => worktree.visible,
424 Worktree::Remote(worktree) => worktree.visible,
425 }
426 }
427
428 pub fn replica_id(&self) -> ReplicaId {
429 match self {
430 Worktree::Local(_) => 0,
431 Worktree::Remote(worktree) => worktree.replica_id,
432 }
433 }
434
435 pub fn diagnostic_summaries(
436 &self,
437 ) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + '_ {
438 match self {
439 Worktree::Local(worktree) => &worktree.diagnostic_summaries,
440 Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
441 }
442 .iter()
443 .map(|(path, summary)| (path.0.clone(), *summary))
444 }
445
446 pub fn abs_path(&self) -> Arc<Path> {
447 match self {
448 Worktree::Local(worktree) => worktree.abs_path.clone(),
449 Worktree::Remote(worktree) => worktree.abs_path.clone(),
450 }
451 }
452}
453
454impl LocalWorktree {
455 pub fn contains_abs_path(&self, path: &Path) -> bool {
456 path.starts_with(&self.abs_path)
457 }
458
459 fn absolutize(&self, path: &Path) -> PathBuf {
460 if path.file_name().is_some() {
461 self.abs_path.join(path)
462 } else {
463 self.abs_path.to_path_buf()
464 }
465 }
466
467 pub(crate) fn load_buffer(
468 &mut self,
469 path: &Path,
470 cx: &mut ModelContext<Worktree>,
471 ) -> Task<Result<ModelHandle<Buffer>>> {
472 let path = Arc::from(path);
473 cx.spawn(move |this, mut cx| async move {
474 let (file, contents, diff_base) = this
475 .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
476 .await?;
477 Ok(cx.add_model(|cx| {
478 let mut buffer = Buffer::from_file(0, contents, diff_base, Arc::new(file), cx);
479 buffer.git_diff_recalc(cx);
480 buffer
481 }))
482 })
483 }
484
485 pub fn diagnostics_for_path(
486 &self,
487 path: &Path,
488 ) -> Option<Vec<DiagnosticEntry<Unclipped<PointUtf16>>>> {
489 self.diagnostics.get(path).cloned()
490 }
491
492 pub fn update_diagnostics(
493 &mut self,
494 language_server_id: usize,
495 worktree_path: Arc<Path>,
496 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
497 _: &mut ModelContext<Worktree>,
498 ) -> Result<bool> {
499 self.diagnostics.remove(&worktree_path);
500 let old_summary = self
501 .diagnostic_summaries
502 .remove(&PathKey(worktree_path.clone()))
503 .unwrap_or_default();
504 let new_summary = DiagnosticSummary::new(language_server_id, &diagnostics);
505 if !new_summary.is_empty() {
506 self.diagnostic_summaries
507 .insert(PathKey(worktree_path.clone()), new_summary);
508 self.diagnostics.insert(worktree_path.clone(), diagnostics);
509 }
510
511 let updated = !old_summary.is_empty() || !new_summary.is_empty();
512 if updated {
513 if let Some(share) = self.share.as_ref() {
514 self.client
515 .send(proto::UpdateDiagnosticSummary {
516 project_id: share.project_id,
517 worktree_id: self.id().to_proto(),
518 summary: Some(proto::DiagnosticSummary {
519 path: worktree_path.to_string_lossy().to_string(),
520 language_server_id: language_server_id as u64,
521 error_count: new_summary.error_count as u32,
522 warning_count: new_summary.warning_count as u32,
523 }),
524 })
525 .log_err();
526 }
527 }
528
529 Ok(updated)
530 }
531
532 fn background_scanner_updated(
533 &mut self,
534 scan_state: ScanState,
535 cx: &mut ModelContext<Worktree>,
536 ) {
537 match scan_state {
538 ScanState::Initializing(new_snapshot) => {
539 *self.is_scanning.0.borrow_mut() = true;
540 self.set_snapshot(new_snapshot, cx);
541 }
542 ScanState::Initialized(new_snapshot) => {
543 *self.is_scanning.0.borrow_mut() = false;
544 self.set_snapshot(new_snapshot, cx);
545 }
546 ScanState::Updating => {
547 *self.is_scanning.0.borrow_mut() = true;
548 }
549 ScanState::Updated(new_snapshot, changes) => {
550 *self.is_scanning.0.borrow_mut() = false;
551 cx.emit(Event::UpdatedEntries(changes));
552 self.set_snapshot(new_snapshot, cx);
553 }
554 ScanState::Err(error) => {
555 *self.is_scanning.0.borrow_mut() = false;
556 log::error!("error scanning worktree {:?}", error);
557 }
558 }
559 }
560
561 fn set_snapshot(&mut self, new_snapshot: LocalSnapshot, cx: &mut ModelContext<Worktree>) {
562 let updated_repos = Self::changed_repos(
563 &self.snapshot.git_repositories,
564 &new_snapshot.git_repositories,
565 );
566 self.snapshot = new_snapshot;
567
568 if let Some(share) = self.share.as_mut() {
569 *share.snapshots_tx.borrow_mut() = self.snapshot.clone();
570 }
571
572 if !updated_repos.is_empty() {
573 cx.emit(Event::UpdatedGitRepositories(updated_repos));
574 }
575 cx.notify();
576 }
577
578 fn changed_repos(
579 old_repos: &[GitRepositoryEntry],
580 new_repos: &[GitRepositoryEntry],
581 ) -> Vec<GitRepositoryEntry> {
582 fn diff<'a>(
583 a: &'a [GitRepositoryEntry],
584 b: &'a [GitRepositoryEntry],
585 updated: &mut HashMap<&'a Path, GitRepositoryEntry>,
586 ) {
587 for a_repo in a {
588 let matched = b.iter().find(|b_repo| {
589 a_repo.git_dir_path == b_repo.git_dir_path && a_repo.scan_id == b_repo.scan_id
590 });
591
592 if matched.is_none() {
593 updated.insert(a_repo.git_dir_path.as_ref(), a_repo.clone());
594 }
595 }
596 }
597
598 let mut updated = HashMap::<&Path, GitRepositoryEntry>::default();
599
600 diff(old_repos, new_repos, &mut updated);
601 diff(new_repos, old_repos, &mut updated);
602
603 updated.into_values().collect()
604 }
605
606 pub fn scan_complete(&self) -> impl Future<Output = ()> {
607 let mut is_scanning_rx = self.is_scanning.1.clone();
608 async move {
609 let mut is_scanning = is_scanning_rx.borrow().clone();
610 while is_scanning {
611 if let Some(value) = is_scanning_rx.recv().await {
612 is_scanning = value;
613 } else {
614 break;
615 }
616 }
617 }
618 }
619
620 pub fn snapshot(&self) -> LocalSnapshot {
621 self.snapshot.clone()
622 }
623
624 pub fn metadata_proto(&self) -> proto::WorktreeMetadata {
625 proto::WorktreeMetadata {
626 id: self.id().to_proto(),
627 root_name: self.root_name().to_string(),
628 visible: self.visible,
629 abs_path: self.abs_path().as_os_str().to_string_lossy().into(),
630 }
631 }
632
633 fn load(
634 &self,
635 path: &Path,
636 cx: &mut ModelContext<Worktree>,
637 ) -> Task<Result<(File, String, Option<String>)>> {
638 let handle = cx.handle();
639 let path = Arc::from(path);
640 let abs_path = self.absolutize(&path);
641 let fs = self.fs.clone();
642 let snapshot = self.snapshot();
643
644 cx.spawn(|this, mut cx| async move {
645 let text = fs.load(&abs_path).await?;
646
647 let diff_base = if let Some(repo) = snapshot.repo_for(&path) {
648 if let Ok(repo_relative) = path.strip_prefix(repo.content_path) {
649 let repo_relative = repo_relative.to_owned();
650 cx.background()
651 .spawn(async move { repo.repo.lock().load_index_text(&repo_relative) })
652 .await
653 } else {
654 None
655 }
656 } else {
657 None
658 };
659
660 // Eagerly populate the snapshot with an updated entry for the loaded file
661 let entry = this
662 .update(&mut cx, |this, cx| {
663 this.as_local()
664 .unwrap()
665 .refresh_entry(path, abs_path, None, cx)
666 })
667 .await?;
668
669 Ok((
670 File {
671 entry_id: entry.id,
672 worktree: handle,
673 path: entry.path,
674 mtime: entry.mtime,
675 is_local: true,
676 is_deleted: false,
677 },
678 text,
679 diff_base,
680 ))
681 })
682 }
683
684 pub fn save_buffer(
685 &self,
686 buffer_handle: ModelHandle<Buffer>,
687 path: Arc<Path>,
688 has_changed_file: bool,
689 cx: &mut ModelContext<Worktree>,
690 ) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
691 let handle = cx.handle();
692 let buffer = buffer_handle.read(cx);
693
694 let rpc = self.client.clone();
695 let buffer_id = buffer.remote_id();
696 let project_id = self.share.as_ref().map(|share| share.project_id);
697
698 let text = buffer.as_rope().clone();
699 let fingerprint = text.fingerprint();
700 let version = buffer.version();
701 let save = self.write_file(path, text, buffer.line_ending(), cx);
702
703 cx.as_mut().spawn(|mut cx| async move {
704 let entry = save.await?;
705
706 if has_changed_file {
707 let new_file = Arc::new(File {
708 entry_id: entry.id,
709 worktree: handle,
710 path: entry.path,
711 mtime: entry.mtime,
712 is_local: true,
713 is_deleted: false,
714 });
715
716 if let Some(project_id) = project_id {
717 rpc.send(proto::UpdateBufferFile {
718 project_id,
719 buffer_id,
720 file: Some(new_file.to_proto()),
721 })
722 .log_err();
723 }
724
725 buffer_handle.update(&mut cx, |buffer, cx| {
726 if has_changed_file {
727 buffer.file_updated(new_file, cx).detach();
728 }
729 });
730 }
731
732 if let Some(project_id) = project_id {
733 rpc.send(proto::BufferSaved {
734 project_id,
735 buffer_id,
736 version: serialize_version(&version),
737 mtime: Some(entry.mtime.into()),
738 fingerprint: serialize_fingerprint(fingerprint),
739 })?;
740 }
741
742 buffer_handle.update(&mut cx, |buffer, cx| {
743 buffer.did_save(version.clone(), fingerprint, entry.mtime, cx);
744 });
745
746 Ok((version, fingerprint, entry.mtime))
747 })
748 }
749
750 pub fn create_entry(
751 &self,
752 path: impl Into<Arc<Path>>,
753 is_dir: bool,
754 cx: &mut ModelContext<Worktree>,
755 ) -> Task<Result<Entry>> {
756 self.write_entry_internal(
757 path,
758 if is_dir {
759 None
760 } else {
761 Some(Default::default())
762 },
763 cx,
764 )
765 }
766
767 pub fn write_file(
768 &self,
769 path: impl Into<Arc<Path>>,
770 text: Rope,
771 line_ending: LineEnding,
772 cx: &mut ModelContext<Worktree>,
773 ) -> Task<Result<Entry>> {
774 self.write_entry_internal(path, Some((text, line_ending)), cx)
775 }
776
777 pub fn delete_entry(
778 &self,
779 entry_id: ProjectEntryId,
780 cx: &mut ModelContext<Worktree>,
781 ) -> Option<Task<Result<()>>> {
782 let entry = self.entry_for_id(entry_id)?.clone();
783 let abs_path = self.absolutize(&entry.path);
784 let delete = cx.background().spawn({
785 let fs = self.fs.clone();
786 let abs_path = abs_path;
787 async move {
788 if entry.is_file() {
789 fs.remove_file(&abs_path, Default::default()).await
790 } else {
791 fs.remove_dir(
792 &abs_path,
793 RemoveOptions {
794 recursive: true,
795 ignore_if_not_exists: false,
796 },
797 )
798 .await
799 }
800 }
801 });
802
803 Some(cx.spawn(|this, mut cx| async move {
804 delete.await?;
805 this.update(&mut cx, |this, cx| {
806 let this = this.as_local_mut().unwrap();
807
808 this.background_snapshot.lock().delete_entry(entry_id);
809
810 if let Some(path) = this.snapshot.delete_entry(entry_id) {
811 cx.emit(Event::UpdatedEntries(
812 [(path, PathChange::Removed)].into_iter().collect(),
813 ));
814 }
815 });
816 Ok(())
817 }))
818 }
819
820 pub fn rename_entry(
821 &self,
822 entry_id: ProjectEntryId,
823 new_path: impl Into<Arc<Path>>,
824 cx: &mut ModelContext<Worktree>,
825 ) -> Option<Task<Result<Entry>>> {
826 let old_path = self.entry_for_id(entry_id)?.path.clone();
827 let new_path = new_path.into();
828 let abs_old_path = self.absolutize(&old_path);
829 let abs_new_path = self.absolutize(new_path.as_ref());
830 let rename = cx.background().spawn({
831 let fs = self.fs.clone();
832 let abs_new_path = abs_new_path.clone();
833 async move {
834 fs.rename(&abs_old_path, &abs_new_path, Default::default())
835 .await
836 }
837 });
838
839 Some(cx.spawn(|this, mut cx| async move {
840 rename.await?;
841 let entry = this
842 .update(&mut cx, |this, cx| {
843 this.as_local_mut().unwrap().refresh_entry(
844 new_path.clone(),
845 abs_new_path,
846 Some(old_path),
847 cx,
848 )
849 })
850 .await?;
851 Ok(entry)
852 }))
853 }
854
855 pub fn copy_entry(
856 &self,
857 entry_id: ProjectEntryId,
858 new_path: impl Into<Arc<Path>>,
859 cx: &mut ModelContext<Worktree>,
860 ) -> Option<Task<Result<Entry>>> {
861 let old_path = self.entry_for_id(entry_id)?.path.clone();
862 let new_path = new_path.into();
863 let abs_old_path = self.absolutize(&old_path);
864 let abs_new_path = self.absolutize(&new_path);
865 let copy = cx.background().spawn({
866 let fs = self.fs.clone();
867 let abs_new_path = abs_new_path.clone();
868 async move {
869 copy_recursive(
870 fs.as_ref(),
871 &abs_old_path,
872 &abs_new_path,
873 Default::default(),
874 )
875 .await
876 }
877 });
878
879 Some(cx.spawn(|this, mut cx| async move {
880 copy.await?;
881 let entry = this
882 .update(&mut cx, |this, cx| {
883 this.as_local_mut().unwrap().refresh_entry(
884 new_path.clone(),
885 abs_new_path,
886 None,
887 cx,
888 )
889 })
890 .await?;
891 Ok(entry)
892 }))
893 }
894
895 fn write_entry_internal(
896 &self,
897 path: impl Into<Arc<Path>>,
898 text_if_file: Option<(Rope, LineEnding)>,
899 cx: &mut ModelContext<Worktree>,
900 ) -> Task<Result<Entry>> {
901 let path = path.into();
902 let abs_path = self.absolutize(&path);
903 let write = cx.background().spawn({
904 let fs = self.fs.clone();
905 let abs_path = abs_path.clone();
906 async move {
907 if let Some((text, line_ending)) = text_if_file {
908 fs.save(&abs_path, &text, line_ending).await
909 } else {
910 fs.create_dir(&abs_path).await
911 }
912 }
913 });
914
915 cx.spawn(|this, mut cx| async move {
916 write.await?;
917 let entry = this
918 .update(&mut cx, |this, cx| {
919 this.as_local_mut()
920 .unwrap()
921 .refresh_entry(path, abs_path, None, cx)
922 })
923 .await?;
924 Ok(entry)
925 })
926 }
927
928 fn refresh_entry(
929 &self,
930 path: Arc<Path>,
931 abs_path: PathBuf,
932 old_path: Option<Arc<Path>>,
933 cx: &mut ModelContext<Worktree>,
934 ) -> Task<Result<Entry>> {
935 let fs = self.fs.clone();
936 let root_char_bag = self.snapshot.root_char_bag;
937 let next_entry_id = self.snapshot.next_entry_id.clone();
938 cx.spawn_weak(|this, mut cx| async move {
939 let metadata = fs
940 .metadata(&abs_path)
941 .await?
942 .ok_or_else(|| anyhow!("could not read saved file metadata"))?;
943 let this = this
944 .upgrade(&cx)
945 .ok_or_else(|| anyhow!("worktree was dropped"))?;
946 this.update(&mut cx, |this, cx| {
947 let this = this.as_local_mut().unwrap();
948 let mut entry = Entry::new(path, &metadata, &next_entry_id, root_char_bag);
949 entry.is_ignored = this
950 .snapshot
951 .ignore_stack_for_abs_path(&abs_path, entry.is_dir())
952 .is_abs_path_ignored(&abs_path, entry.is_dir());
953
954 {
955 let mut snapshot = this.background_snapshot.lock();
956 snapshot.scan_started();
957 if let Some(old_path) = &old_path {
958 snapshot.remove_path(old_path);
959 }
960 snapshot.insert_entry(entry.clone(), fs.as_ref());
961 snapshot.scan_completed();
962 }
963
964 let mut changes = HashMap::default();
965
966 this.snapshot.scan_started();
967 if let Some(old_path) = &old_path {
968 this.snapshot.remove_path(old_path);
969 changes.insert(old_path.clone(), PathChange::Removed);
970 }
971 let exists = this.snapshot.entry_for_path(&entry.path).is_some();
972 let inserted_entry = this.snapshot.insert_entry(entry, fs.as_ref());
973 changes.insert(
974 inserted_entry.path.clone(),
975 if exists {
976 PathChange::Updated
977 } else {
978 PathChange::Added
979 },
980 );
981 this.snapshot.scan_completed();
982
983 eprintln!("refreshed {:?}", changes);
984 cx.emit(Event::UpdatedEntries(changes));
985 Ok(inserted_entry)
986 })
987 })
988 }
989
990 pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
991 let (share_tx, share_rx) = oneshot::channel();
992
993 if let Some(share) = self.share.as_mut() {
994 let _ = share_tx.send(());
995 *share.resume_updates.borrow_mut() = ();
996 } else {
997 let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
998 let (resume_updates_tx, mut resume_updates_rx) = watch::channel();
999 let worktree_id = cx.model_id() as u64;
1000
1001 for (path, summary) in self.diagnostic_summaries.iter() {
1002 if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
1003 project_id,
1004 worktree_id,
1005 summary: Some(summary.to_proto(&path.0)),
1006 }) {
1007 return Task::ready(Err(e));
1008 }
1009 }
1010
1011 let _maintain_remote_snapshot = cx.background().spawn({
1012 let client = self.client.clone();
1013 async move {
1014 let mut share_tx = Some(share_tx);
1015 let mut prev_snapshot = LocalSnapshot {
1016 ignores_by_parent_abs_path: Default::default(),
1017 git_repositories: Default::default(),
1018 removed_entry_ids: Default::default(),
1019 next_entry_id: Default::default(),
1020 snapshot: Snapshot {
1021 id: WorktreeId(worktree_id as usize),
1022 abs_path: Path::new("").into(),
1023 root_name: Default::default(),
1024 root_char_bag: Default::default(),
1025 entries_by_path: Default::default(),
1026 entries_by_id: Default::default(),
1027 scan_id: 0,
1028 completed_scan_id: 0,
1029 },
1030 };
1031 while let Some(snapshot) = snapshots_rx.recv().await {
1032 #[cfg(any(test, feature = "test-support"))]
1033 const MAX_CHUNK_SIZE: usize = 2;
1034 #[cfg(not(any(test, feature = "test-support")))]
1035 const MAX_CHUNK_SIZE: usize = 256;
1036
1037 let update =
1038 snapshot.build_update(&prev_snapshot, project_id, worktree_id, true);
1039 for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
1040 let _ = resume_updates_rx.try_recv();
1041 while let Err(error) = client.request(update.clone()).await {
1042 log::error!("failed to send worktree update: {}", error);
1043 log::info!("waiting to resume updates");
1044 if resume_updates_rx.next().await.is_none() {
1045 return Ok(());
1046 }
1047 }
1048 }
1049
1050 if let Some(share_tx) = share_tx.take() {
1051 let _ = share_tx.send(());
1052 }
1053
1054 prev_snapshot = snapshot;
1055 }
1056
1057 Ok::<_, anyhow::Error>(())
1058 }
1059 .log_err()
1060 });
1061
1062 self.share = Some(ShareState {
1063 project_id,
1064 snapshots_tx,
1065 resume_updates: resume_updates_tx,
1066 _maintain_remote_snapshot,
1067 });
1068 }
1069
1070 cx.foreground()
1071 .spawn(async move { share_rx.await.map_err(|_| anyhow!("share ended")) })
1072 }
1073
1074 pub fn unshare(&mut self) {
1075 self.share.take();
1076 }
1077
1078 pub fn is_shared(&self) -> bool {
1079 self.share.is_some()
1080 }
1081}
1082
1083impl RemoteWorktree {
1084 fn snapshot(&self) -> Snapshot {
1085 self.snapshot.clone()
1086 }
1087
1088 pub fn disconnected_from_host(&mut self) {
1089 self.updates_tx.take();
1090 self.snapshot_subscriptions.clear();
1091 self.disconnected = true;
1092 }
1093
1094 pub fn save_buffer(
1095 &self,
1096 buffer_handle: ModelHandle<Buffer>,
1097 cx: &mut ModelContext<Worktree>,
1098 ) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
1099 let buffer = buffer_handle.read(cx);
1100 let buffer_id = buffer.remote_id();
1101 let version = buffer.version();
1102 let rpc = self.client.clone();
1103 let project_id = self.project_id;
1104 cx.as_mut().spawn(|mut cx| async move {
1105 let response = rpc
1106 .request(proto::SaveBuffer {
1107 project_id,
1108 buffer_id,
1109 version: serialize_version(&version),
1110 })
1111 .await?;
1112 let version = deserialize_version(response.version);
1113 let fingerprint = deserialize_fingerprint(&response.fingerprint)?;
1114 let mtime = response
1115 .mtime
1116 .ok_or_else(|| anyhow!("missing mtime"))?
1117 .into();
1118
1119 buffer_handle.update(&mut cx, |buffer, cx| {
1120 buffer.did_save(version.clone(), fingerprint, mtime, cx);
1121 });
1122
1123 Ok((version, fingerprint, mtime))
1124 })
1125 }
1126
1127 pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
1128 if let Some(updates_tx) = &self.updates_tx {
1129 updates_tx
1130 .unbounded_send(update)
1131 .expect("consumer runs to completion");
1132 }
1133 }
1134
1135 fn observed_snapshot(&self, scan_id: usize) -> bool {
1136 self.completed_scan_id >= scan_id
1137 }
1138
1139 fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = Result<()>> {
1140 let (tx, rx) = oneshot::channel();
1141 if self.observed_snapshot(scan_id) {
1142 let _ = tx.send(());
1143 } else if self.disconnected {
1144 drop(tx);
1145 } else {
1146 match self
1147 .snapshot_subscriptions
1148 .binary_search_by_key(&scan_id, |probe| probe.0)
1149 {
1150 Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)),
1151 }
1152 }
1153
1154 async move {
1155 rx.await?;
1156 Ok(())
1157 }
1158 }
1159
1160 pub fn update_diagnostic_summary(
1161 &mut self,
1162 path: Arc<Path>,
1163 summary: &proto::DiagnosticSummary,
1164 ) {
1165 let summary = DiagnosticSummary {
1166 language_server_id: summary.language_server_id as usize,
1167 error_count: summary.error_count as usize,
1168 warning_count: summary.warning_count as usize,
1169 };
1170 if summary.is_empty() {
1171 self.diagnostic_summaries.remove(&PathKey(path));
1172 } else {
1173 self.diagnostic_summaries.insert(PathKey(path), summary);
1174 }
1175 }
1176
1177 pub fn insert_entry(
1178 &mut self,
1179 entry: proto::Entry,
1180 scan_id: usize,
1181 cx: &mut ModelContext<Worktree>,
1182 ) -> Task<Result<Entry>> {
1183 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1184 cx.spawn(|this, mut cx| async move {
1185 wait_for_snapshot.await?;
1186 this.update(&mut cx, |worktree, _| {
1187 let worktree = worktree.as_remote_mut().unwrap();
1188 let mut snapshot = worktree.background_snapshot.lock();
1189 let entry = snapshot.insert_entry(entry);
1190 worktree.snapshot = snapshot.clone();
1191 entry
1192 })
1193 })
1194 }
1195
1196 pub(crate) fn delete_entry(
1197 &mut self,
1198 id: ProjectEntryId,
1199 scan_id: usize,
1200 cx: &mut ModelContext<Worktree>,
1201 ) -> Task<Result<()>> {
1202 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1203 cx.spawn(|this, mut cx| async move {
1204 wait_for_snapshot.await?;
1205 this.update(&mut cx, |worktree, _| {
1206 let worktree = worktree.as_remote_mut().unwrap();
1207 let mut snapshot = worktree.background_snapshot.lock();
1208 snapshot.delete_entry(id);
1209 worktree.snapshot = snapshot.clone();
1210 });
1211 Ok(())
1212 })
1213 }
1214}
1215
1216impl Snapshot {
1217 pub fn id(&self) -> WorktreeId {
1218 self.id
1219 }
1220
1221 pub fn abs_path(&self) -> &Arc<Path> {
1222 &self.abs_path
1223 }
1224
1225 pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
1226 self.entries_by_id.get(&entry_id, &()).is_some()
1227 }
1228
1229 pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
1230 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1231 let old_entry = self.entries_by_id.insert_or_replace(
1232 PathEntry {
1233 id: entry.id,
1234 path: entry.path.clone(),
1235 is_ignored: entry.is_ignored,
1236 scan_id: 0,
1237 },
1238 &(),
1239 );
1240 if let Some(old_entry) = old_entry {
1241 self.entries_by_path.remove(&PathKey(old_entry.path), &());
1242 }
1243 self.entries_by_path.insert_or_replace(entry.clone(), &());
1244 Ok(entry)
1245 }
1246
1247 fn delete_entry(&mut self, entry_id: ProjectEntryId) -> Option<Arc<Path>> {
1248 let removed_entry = self.entries_by_id.remove(&entry_id, &())?;
1249 self.entries_by_path = {
1250 let mut cursor = self.entries_by_path.cursor();
1251 let mut new_entries_by_path =
1252 cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &());
1253 while let Some(entry) = cursor.item() {
1254 if entry.path.starts_with(&removed_entry.path) {
1255 self.entries_by_id.remove(&entry.id, &());
1256 cursor.next(&());
1257 } else {
1258 break;
1259 }
1260 }
1261 new_entries_by_path.push_tree(cursor.suffix(&()), &());
1262 new_entries_by_path
1263 };
1264
1265 Some(removed_entry.path)
1266 }
1267
1268 pub(crate) fn apply_remote_update(&mut self, update: proto::UpdateWorktree) -> Result<()> {
1269 let mut entries_by_path_edits = Vec::new();
1270 let mut entries_by_id_edits = Vec::new();
1271 for entry_id in update.removed_entries {
1272 let entry = self
1273 .entry_for_id(ProjectEntryId::from_proto(entry_id))
1274 .ok_or_else(|| anyhow!("unknown entry {}", entry_id))?;
1275 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
1276 entries_by_id_edits.push(Edit::Remove(entry.id));
1277 }
1278
1279 for entry in update.updated_entries {
1280 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1281 if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
1282 entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
1283 }
1284 entries_by_id_edits.push(Edit::Insert(PathEntry {
1285 id: entry.id,
1286 path: entry.path.clone(),
1287 is_ignored: entry.is_ignored,
1288 scan_id: 0,
1289 }));
1290 entries_by_path_edits.push(Edit::Insert(entry));
1291 }
1292
1293 self.entries_by_path.edit(entries_by_path_edits, &());
1294 self.entries_by_id.edit(entries_by_id_edits, &());
1295 self.scan_id = update.scan_id as usize;
1296 if update.is_last_update {
1297 self.completed_scan_id = update.scan_id as usize;
1298 }
1299
1300 Ok(())
1301 }
1302
1303 pub fn file_count(&self) -> usize {
1304 self.entries_by_path.summary().file_count
1305 }
1306
1307 pub fn visible_file_count(&self) -> usize {
1308 self.entries_by_path.summary().visible_file_count
1309 }
1310
1311 fn traverse_from_offset(
1312 &self,
1313 include_dirs: bool,
1314 include_ignored: bool,
1315 start_offset: usize,
1316 ) -> Traversal {
1317 let mut cursor = self.entries_by_path.cursor();
1318 cursor.seek(
1319 &TraversalTarget::Count {
1320 count: start_offset,
1321 include_dirs,
1322 include_ignored,
1323 },
1324 Bias::Right,
1325 &(),
1326 );
1327 Traversal {
1328 cursor,
1329 include_dirs,
1330 include_ignored,
1331 }
1332 }
1333
1334 fn traverse_from_path(
1335 &self,
1336 include_dirs: bool,
1337 include_ignored: bool,
1338 path: &Path,
1339 ) -> Traversal {
1340 let mut cursor = self.entries_by_path.cursor();
1341 cursor.seek(&TraversalTarget::Path(path), Bias::Left, &());
1342 Traversal {
1343 cursor,
1344 include_dirs,
1345 include_ignored,
1346 }
1347 }
1348
1349 pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
1350 self.traverse_from_offset(false, include_ignored, start)
1351 }
1352
1353 pub fn entries(&self, include_ignored: bool) -> Traversal {
1354 self.traverse_from_offset(true, include_ignored, 0)
1355 }
1356
1357 pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
1358 let empty_path = Path::new("");
1359 self.entries_by_path
1360 .cursor::<()>()
1361 .filter(move |entry| entry.path.as_ref() != empty_path)
1362 .map(|entry| &entry.path)
1363 }
1364
1365 fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
1366 let mut cursor = self.entries_by_path.cursor();
1367 cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &());
1368 let traversal = Traversal {
1369 cursor,
1370 include_dirs: true,
1371 include_ignored: true,
1372 };
1373 ChildEntriesIter {
1374 traversal,
1375 parent_path,
1376 }
1377 }
1378
1379 pub fn root_entry(&self) -> Option<&Entry> {
1380 self.entry_for_path("")
1381 }
1382
1383 pub fn root_name(&self) -> &str {
1384 &self.root_name
1385 }
1386
1387 pub fn scan_started(&mut self) {
1388 self.scan_id += 1;
1389 }
1390
1391 pub fn scan_completed(&mut self) {
1392 self.completed_scan_id = self.scan_id;
1393 }
1394
1395 pub fn scan_id(&self) -> usize {
1396 self.scan_id
1397 }
1398
1399 pub fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
1400 let path = path.as_ref();
1401 self.traverse_from_path(true, true, path)
1402 .entry()
1403 .and_then(|entry| {
1404 if entry.path.as_ref() == path {
1405 Some(entry)
1406 } else {
1407 None
1408 }
1409 })
1410 }
1411
1412 pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> {
1413 let entry = self.entries_by_id.get(&id, &())?;
1414 self.entry_for_path(&entry.path)
1415 }
1416
1417 pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
1418 self.entry_for_path(path.as_ref()).map(|e| e.inode)
1419 }
1420}
1421
1422impl LocalSnapshot {
1423 // Gives the most specific git repository for a given path
1424 pub(crate) fn repo_for(&self, path: &Path) -> Option<GitRepositoryEntry> {
1425 self.git_repositories
1426 .iter()
1427 .rev() //git_repository is ordered lexicographically
1428 .find(|repo| repo.manages(path))
1429 .cloned()
1430 }
1431
1432 pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut GitRepositoryEntry> {
1433 // Git repositories cannot be nested, so we don't need to reverse the order
1434 self.git_repositories
1435 .iter_mut()
1436 .find(|repo| repo.in_dot_git(path))
1437 }
1438
1439 #[cfg(test)]
1440 pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree {
1441 let root_name = self.root_name.clone();
1442 proto::UpdateWorktree {
1443 project_id,
1444 worktree_id: self.id().to_proto(),
1445 abs_path: self.abs_path().to_string_lossy().into(),
1446 root_name,
1447 updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
1448 removed_entries: Default::default(),
1449 scan_id: self.scan_id as u64,
1450 is_last_update: true,
1451 }
1452 }
1453
1454 pub(crate) fn build_update(
1455 &self,
1456 other: &Self,
1457 project_id: u64,
1458 worktree_id: u64,
1459 include_ignored: bool,
1460 ) -> proto::UpdateWorktree {
1461 let mut updated_entries = Vec::new();
1462 let mut removed_entries = Vec::new();
1463 let mut self_entries = self
1464 .entries_by_id
1465 .cursor::<()>()
1466 .filter(|e| include_ignored || !e.is_ignored)
1467 .peekable();
1468 let mut other_entries = other
1469 .entries_by_id
1470 .cursor::<()>()
1471 .filter(|e| include_ignored || !e.is_ignored)
1472 .peekable();
1473 loop {
1474 match (self_entries.peek(), other_entries.peek()) {
1475 (Some(self_entry), Some(other_entry)) => {
1476 match Ord::cmp(&self_entry.id, &other_entry.id) {
1477 Ordering::Less => {
1478 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1479 updated_entries.push(entry);
1480 self_entries.next();
1481 }
1482 Ordering::Equal => {
1483 if self_entry.scan_id != other_entry.scan_id {
1484 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1485 updated_entries.push(entry);
1486 }
1487
1488 self_entries.next();
1489 other_entries.next();
1490 }
1491 Ordering::Greater => {
1492 removed_entries.push(other_entry.id.to_proto());
1493 other_entries.next();
1494 }
1495 }
1496 }
1497 (Some(self_entry), None) => {
1498 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1499 updated_entries.push(entry);
1500 self_entries.next();
1501 }
1502 (None, Some(other_entry)) => {
1503 removed_entries.push(other_entry.id.to_proto());
1504 other_entries.next();
1505 }
1506 (None, None) => break,
1507 }
1508 }
1509
1510 proto::UpdateWorktree {
1511 project_id,
1512 worktree_id,
1513 abs_path: self.abs_path().to_string_lossy().into(),
1514 root_name: self.root_name().to_string(),
1515 updated_entries,
1516 removed_entries,
1517 scan_id: self.scan_id as u64,
1518 is_last_update: self.completed_scan_id == self.scan_id,
1519 }
1520 }
1521
1522 fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
1523 if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) {
1524 let abs_path = self.abs_path.join(&entry.path);
1525 match smol::block_on(build_gitignore(&abs_path, fs)) {
1526 Ok(ignore) => {
1527 self.ignores_by_parent_abs_path.insert(
1528 abs_path.parent().unwrap().into(),
1529 (Arc::new(ignore), self.scan_id),
1530 );
1531 }
1532 Err(error) => {
1533 log::error!(
1534 "error loading .gitignore file {:?} - {:?}",
1535 &entry.path,
1536 error
1537 );
1538 }
1539 }
1540 }
1541
1542 self.reuse_entry_id(&mut entry);
1543
1544 if entry.kind == EntryKind::PendingDir {
1545 if let Some(existing_entry) =
1546 self.entries_by_path.get(&PathKey(entry.path.clone()), &())
1547 {
1548 entry.kind = existing_entry.kind;
1549 }
1550 }
1551
1552 let scan_id = self.scan_id;
1553 self.entries_by_path.insert_or_replace(entry.clone(), &());
1554 self.entries_by_id.insert_or_replace(
1555 PathEntry {
1556 id: entry.id,
1557 path: entry.path.clone(),
1558 is_ignored: entry.is_ignored,
1559 scan_id,
1560 },
1561 &(),
1562 );
1563
1564 entry
1565 }
1566
1567 fn populate_dir(
1568 &mut self,
1569 parent_path: Arc<Path>,
1570 entries: impl IntoIterator<Item = Entry>,
1571 ignore: Option<Arc<Gitignore>>,
1572 fs: &dyn Fs,
1573 ) {
1574 let mut parent_entry = if let Some(parent_entry) =
1575 self.entries_by_path.get(&PathKey(parent_path.clone()), &())
1576 {
1577 parent_entry.clone()
1578 } else {
1579 log::warn!(
1580 "populating a directory {:?} that has been removed",
1581 parent_path
1582 );
1583 return;
1584 };
1585
1586 if let Some(ignore) = ignore {
1587 self.ignores_by_parent_abs_path.insert(
1588 self.abs_path.join(&parent_path).into(),
1589 (ignore, self.scan_id),
1590 );
1591 }
1592 if matches!(parent_entry.kind, EntryKind::PendingDir) {
1593 parent_entry.kind = EntryKind::Dir;
1594 } else {
1595 unreachable!();
1596 }
1597
1598 if parent_path.file_name() == Some(&DOT_GIT) {
1599 let abs_path = self.abs_path.join(&parent_path);
1600 let content_path: Arc<Path> = parent_path.parent().unwrap().into();
1601 if let Err(ix) = self
1602 .git_repositories
1603 .binary_search_by_key(&&content_path, |repo| &repo.content_path)
1604 {
1605 if let Some(repo) = fs.open_repo(abs_path.as_path()) {
1606 self.git_repositories.insert(
1607 ix,
1608 GitRepositoryEntry {
1609 repo,
1610 scan_id: 0,
1611 content_path,
1612 git_dir_path: parent_path,
1613 },
1614 );
1615 }
1616 }
1617 }
1618
1619 let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
1620 let mut entries_by_id_edits = Vec::new();
1621
1622 for mut entry in entries {
1623 self.reuse_entry_id(&mut entry);
1624 entries_by_id_edits.push(Edit::Insert(PathEntry {
1625 id: entry.id,
1626 path: entry.path.clone(),
1627 is_ignored: entry.is_ignored,
1628 scan_id: self.scan_id,
1629 }));
1630 entries_by_path_edits.push(Edit::Insert(entry));
1631 }
1632
1633 self.entries_by_path.edit(entries_by_path_edits, &());
1634 self.entries_by_id.edit(entries_by_id_edits, &());
1635 }
1636
1637 fn reuse_entry_id(&mut self, entry: &mut Entry) {
1638 if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
1639 entry.id = removed_entry_id;
1640 } else if let Some(existing_entry) = self.entry_for_path(&entry.path) {
1641 entry.id = existing_entry.id;
1642 }
1643 }
1644
1645 fn remove_path(&mut self, path: &Path) {
1646 let mut new_entries;
1647 let removed_entries;
1648 {
1649 let mut cursor = self.entries_by_path.cursor::<TraversalProgress>();
1650 new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
1651 removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
1652 new_entries.push_tree(cursor.suffix(&()), &());
1653 }
1654 self.entries_by_path = new_entries;
1655
1656 let mut entries_by_id_edits = Vec::new();
1657 for entry in removed_entries.cursor::<()>() {
1658 let removed_entry_id = self
1659 .removed_entry_ids
1660 .entry(entry.inode)
1661 .or_insert(entry.id);
1662 *removed_entry_id = cmp::max(*removed_entry_id, entry.id);
1663 entries_by_id_edits.push(Edit::Remove(entry.id));
1664 }
1665 self.entries_by_id.edit(entries_by_id_edits, &());
1666
1667 if path.file_name() == Some(&GITIGNORE) {
1668 let abs_parent_path = self.abs_path.join(path.parent().unwrap());
1669 if let Some((_, scan_id)) = self
1670 .ignores_by_parent_abs_path
1671 .get_mut(abs_parent_path.as_path())
1672 {
1673 *scan_id = self.snapshot.scan_id;
1674 }
1675 } else if path.file_name() == Some(&DOT_GIT) {
1676 let parent_path = path.parent().unwrap();
1677 if let Ok(ix) = self
1678 .git_repositories
1679 .binary_search_by_key(&parent_path, |repo| repo.git_dir_path.as_ref())
1680 {
1681 self.git_repositories[ix].scan_id = self.snapshot.scan_id;
1682 }
1683 }
1684 }
1685
1686 fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
1687 let mut inodes = TreeSet::default();
1688 for ancestor in path.ancestors().skip(1) {
1689 if let Some(entry) = self.entry_for_path(ancestor) {
1690 inodes.insert(entry.inode);
1691 }
1692 }
1693 inodes
1694 }
1695
1696 fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
1697 let mut new_ignores = Vec::new();
1698 for ancestor in abs_path.ancestors().skip(1) {
1699 if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
1700 new_ignores.push((ancestor, Some(ignore.clone())));
1701 } else {
1702 new_ignores.push((ancestor, None));
1703 }
1704 }
1705
1706 let mut ignore_stack = IgnoreStack::none();
1707 for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
1708 if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
1709 ignore_stack = IgnoreStack::all();
1710 break;
1711 } else if let Some(ignore) = ignore {
1712 ignore_stack = ignore_stack.append(parent_abs_path.into(), ignore);
1713 }
1714 }
1715
1716 if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
1717 ignore_stack = IgnoreStack::all();
1718 }
1719
1720 ignore_stack
1721 }
1722
1723 pub fn git_repo_entries(&self) -> &[GitRepositoryEntry] {
1724 &self.git_repositories
1725 }
1726}
1727
1728impl GitRepositoryEntry {
1729 // Note that these paths should be relative to the worktree root.
1730 pub(crate) fn manages(&self, path: &Path) -> bool {
1731 path.starts_with(self.content_path.as_ref())
1732 }
1733
1734 // Note that theis path should be relative to the worktree root.
1735 pub(crate) fn in_dot_git(&self, path: &Path) -> bool {
1736 path.starts_with(self.git_dir_path.as_ref())
1737 }
1738}
1739
1740async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
1741 let contents = fs.load(abs_path).await?;
1742 let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
1743 let mut builder = GitignoreBuilder::new(parent);
1744 for line in contents.lines() {
1745 builder.add_line(Some(abs_path.into()), line)?;
1746 }
1747 Ok(builder.build()?)
1748}
1749
1750impl WorktreeId {
1751 pub fn from_usize(handle_id: usize) -> Self {
1752 Self(handle_id)
1753 }
1754
1755 pub(crate) fn from_proto(id: u64) -> Self {
1756 Self(id as usize)
1757 }
1758
1759 pub fn to_proto(&self) -> u64 {
1760 self.0 as u64
1761 }
1762
1763 pub fn to_usize(&self) -> usize {
1764 self.0
1765 }
1766}
1767
1768impl fmt::Display for WorktreeId {
1769 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1770 self.0.fmt(f)
1771 }
1772}
1773
1774impl Deref for Worktree {
1775 type Target = Snapshot;
1776
1777 fn deref(&self) -> &Self::Target {
1778 match self {
1779 Worktree::Local(worktree) => &worktree.snapshot,
1780 Worktree::Remote(worktree) => &worktree.snapshot,
1781 }
1782 }
1783}
1784
1785impl Deref for LocalWorktree {
1786 type Target = LocalSnapshot;
1787
1788 fn deref(&self) -> &Self::Target {
1789 &self.snapshot
1790 }
1791}
1792
1793impl Deref for RemoteWorktree {
1794 type Target = Snapshot;
1795
1796 fn deref(&self) -> &Self::Target {
1797 &self.snapshot
1798 }
1799}
1800
1801impl fmt::Debug for LocalWorktree {
1802 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1803 self.snapshot.fmt(f)
1804 }
1805}
1806
1807impl fmt::Debug for Snapshot {
1808 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1809 struct EntriesById<'a>(&'a SumTree<PathEntry>);
1810 struct EntriesByPath<'a>(&'a SumTree<Entry>);
1811
1812 impl<'a> fmt::Debug for EntriesByPath<'a> {
1813 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1814 f.debug_map()
1815 .entries(self.0.iter().map(|entry| (&entry.path, entry.id)))
1816 .finish()
1817 }
1818 }
1819
1820 impl<'a> fmt::Debug for EntriesById<'a> {
1821 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1822 f.debug_list().entries(self.0.iter()).finish()
1823 }
1824 }
1825
1826 f.debug_struct("Snapshot")
1827 .field("id", &self.id)
1828 .field("root_name", &self.root_name)
1829 .field("entries_by_path", &EntriesByPath(&self.entries_by_path))
1830 .field("entries_by_id", &EntriesById(&self.entries_by_id))
1831 .finish()
1832 }
1833}
1834
1835#[derive(Clone, PartialEq)]
1836pub struct File {
1837 pub worktree: ModelHandle<Worktree>,
1838 pub path: Arc<Path>,
1839 pub mtime: SystemTime,
1840 pub(crate) entry_id: ProjectEntryId,
1841 pub(crate) is_local: bool,
1842 pub(crate) is_deleted: bool,
1843}
1844
1845impl language::File for File {
1846 fn as_local(&self) -> Option<&dyn language::LocalFile> {
1847 if self.is_local {
1848 Some(self)
1849 } else {
1850 None
1851 }
1852 }
1853
1854 fn mtime(&self) -> SystemTime {
1855 self.mtime
1856 }
1857
1858 fn path(&self) -> &Arc<Path> {
1859 &self.path
1860 }
1861
1862 fn full_path(&self, cx: &AppContext) -> PathBuf {
1863 let mut full_path = PathBuf::new();
1864 let worktree = self.worktree.read(cx);
1865
1866 if worktree.is_visible() {
1867 full_path.push(worktree.root_name());
1868 } else {
1869 let path = worktree.abs_path();
1870
1871 if worktree.is_local() && path.starts_with(HOME.as_path()) {
1872 full_path.push("~");
1873 full_path.push(path.strip_prefix(HOME.as_path()).unwrap());
1874 } else {
1875 full_path.push(path)
1876 }
1877 }
1878
1879 if self.path.components().next().is_some() {
1880 full_path.push(&self.path);
1881 }
1882
1883 full_path
1884 }
1885
1886 /// Returns the last component of this handle's absolute path. If this handle refers to the root
1887 /// of its worktree, then this method will return the name of the worktree itself.
1888 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr {
1889 self.path
1890 .file_name()
1891 .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name))
1892 }
1893
1894 fn is_deleted(&self) -> bool {
1895 self.is_deleted
1896 }
1897
1898 fn as_any(&self) -> &dyn Any {
1899 self
1900 }
1901
1902 fn to_proto(&self) -> rpc::proto::File {
1903 rpc::proto::File {
1904 worktree_id: self.worktree.id() as u64,
1905 entry_id: self.entry_id.to_proto(),
1906 path: self.path.to_string_lossy().into(),
1907 mtime: Some(self.mtime.into()),
1908 is_deleted: self.is_deleted,
1909 }
1910 }
1911}
1912
1913impl language::LocalFile for File {
1914 fn abs_path(&self, cx: &AppContext) -> PathBuf {
1915 self.worktree
1916 .read(cx)
1917 .as_local()
1918 .unwrap()
1919 .abs_path
1920 .join(&self.path)
1921 }
1922
1923 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
1924 let worktree = self.worktree.read(cx).as_local().unwrap();
1925 let abs_path = worktree.absolutize(&self.path);
1926 let fs = worktree.fs.clone();
1927 cx.background()
1928 .spawn(async move { fs.load(&abs_path).await })
1929 }
1930
1931 fn buffer_reloaded(
1932 &self,
1933 buffer_id: u64,
1934 version: &clock::Global,
1935 fingerprint: RopeFingerprint,
1936 line_ending: LineEnding,
1937 mtime: SystemTime,
1938 cx: &mut MutableAppContext,
1939 ) {
1940 let worktree = self.worktree.read(cx).as_local().unwrap();
1941 if let Some(project_id) = worktree.share.as_ref().map(|share| share.project_id) {
1942 worktree
1943 .client
1944 .send(proto::BufferReloaded {
1945 project_id,
1946 buffer_id,
1947 version: serialize_version(version),
1948 mtime: Some(mtime.into()),
1949 fingerprint: serialize_fingerprint(fingerprint),
1950 line_ending: serialize_line_ending(line_ending) as i32,
1951 })
1952 .log_err();
1953 }
1954 }
1955}
1956
1957impl File {
1958 pub fn from_proto(
1959 proto: rpc::proto::File,
1960 worktree: ModelHandle<Worktree>,
1961 cx: &AppContext,
1962 ) -> Result<Self> {
1963 let worktree_id = worktree
1964 .read(cx)
1965 .as_remote()
1966 .ok_or_else(|| anyhow!("not remote"))?
1967 .id();
1968
1969 if worktree_id.to_proto() != proto.worktree_id {
1970 return Err(anyhow!("worktree id does not match file"));
1971 }
1972
1973 Ok(Self {
1974 worktree,
1975 path: Path::new(&proto.path).into(),
1976 mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
1977 entry_id: ProjectEntryId::from_proto(proto.entry_id),
1978 is_local: false,
1979 is_deleted: proto.is_deleted,
1980 })
1981 }
1982
1983 pub fn from_dyn(file: Option<&Arc<dyn language::File>>) -> Option<&Self> {
1984 file.and_then(|f| f.as_any().downcast_ref())
1985 }
1986
1987 pub fn worktree_id(&self, cx: &AppContext) -> WorktreeId {
1988 self.worktree.read(cx).id()
1989 }
1990
1991 pub fn project_entry_id(&self, _: &AppContext) -> Option<ProjectEntryId> {
1992 if self.is_deleted {
1993 None
1994 } else {
1995 Some(self.entry_id)
1996 }
1997 }
1998}
1999
2000#[derive(Clone, Debug, PartialEq, Eq)]
2001pub struct Entry {
2002 pub id: ProjectEntryId,
2003 pub kind: EntryKind,
2004 pub path: Arc<Path>,
2005 pub inode: u64,
2006 pub mtime: SystemTime,
2007 pub is_symlink: bool,
2008 pub is_ignored: bool,
2009}
2010
2011#[derive(Clone, Copy, Debug, PartialEq, Eq)]
2012pub enum EntryKind {
2013 PendingDir,
2014 Dir,
2015 File(CharBag),
2016}
2017
2018#[derive(Clone, Copy, Debug)]
2019pub enum PathChange {
2020 Added,
2021 Removed,
2022 Updated,
2023 AddedOrUpdated,
2024}
2025
2026impl Entry {
2027 fn new(
2028 path: Arc<Path>,
2029 metadata: &fs::Metadata,
2030 next_entry_id: &AtomicUsize,
2031 root_char_bag: CharBag,
2032 ) -> Self {
2033 Self {
2034 id: ProjectEntryId::new(next_entry_id),
2035 kind: if metadata.is_dir {
2036 EntryKind::PendingDir
2037 } else {
2038 EntryKind::File(char_bag_for_path(root_char_bag, &path))
2039 },
2040 path,
2041 inode: metadata.inode,
2042 mtime: metadata.mtime,
2043 is_symlink: metadata.is_symlink,
2044 is_ignored: false,
2045 }
2046 }
2047
2048 pub fn is_dir(&self) -> bool {
2049 matches!(self.kind, EntryKind::Dir | EntryKind::PendingDir)
2050 }
2051
2052 pub fn is_file(&self) -> bool {
2053 matches!(self.kind, EntryKind::File(_))
2054 }
2055}
2056
2057impl sum_tree::Item for Entry {
2058 type Summary = EntrySummary;
2059
2060 fn summary(&self) -> Self::Summary {
2061 let visible_count = if self.is_ignored { 0 } else { 1 };
2062 let file_count;
2063 let visible_file_count;
2064 if self.is_file() {
2065 file_count = 1;
2066 visible_file_count = visible_count;
2067 } else {
2068 file_count = 0;
2069 visible_file_count = 0;
2070 }
2071
2072 EntrySummary {
2073 max_path: self.path.clone(),
2074 count: 1,
2075 visible_count,
2076 file_count,
2077 visible_file_count,
2078 }
2079 }
2080}
2081
2082impl sum_tree::KeyedItem for Entry {
2083 type Key = PathKey;
2084
2085 fn key(&self) -> Self::Key {
2086 PathKey(self.path.clone())
2087 }
2088}
2089
2090#[derive(Clone, Debug)]
2091pub struct EntrySummary {
2092 max_path: Arc<Path>,
2093 count: usize,
2094 visible_count: usize,
2095 file_count: usize,
2096 visible_file_count: usize,
2097}
2098
2099impl Default for EntrySummary {
2100 fn default() -> Self {
2101 Self {
2102 max_path: Arc::from(Path::new("")),
2103 count: 0,
2104 visible_count: 0,
2105 file_count: 0,
2106 visible_file_count: 0,
2107 }
2108 }
2109}
2110
2111impl sum_tree::Summary for EntrySummary {
2112 type Context = ();
2113
2114 fn add_summary(&mut self, rhs: &Self, _: &()) {
2115 self.max_path = rhs.max_path.clone();
2116 self.count += rhs.count;
2117 self.visible_count += rhs.visible_count;
2118 self.file_count += rhs.file_count;
2119 self.visible_file_count += rhs.visible_file_count;
2120 }
2121}
2122
2123#[derive(Clone, Debug)]
2124struct PathEntry {
2125 id: ProjectEntryId,
2126 path: Arc<Path>,
2127 is_ignored: bool,
2128 scan_id: usize,
2129}
2130
2131impl sum_tree::Item for PathEntry {
2132 type Summary = PathEntrySummary;
2133
2134 fn summary(&self) -> Self::Summary {
2135 PathEntrySummary { max_id: self.id }
2136 }
2137}
2138
2139impl sum_tree::KeyedItem for PathEntry {
2140 type Key = ProjectEntryId;
2141
2142 fn key(&self) -> Self::Key {
2143 self.id
2144 }
2145}
2146
2147#[derive(Clone, Debug, Default)]
2148struct PathEntrySummary {
2149 max_id: ProjectEntryId,
2150}
2151
2152impl sum_tree::Summary for PathEntrySummary {
2153 type Context = ();
2154
2155 fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
2156 self.max_id = summary.max_id;
2157 }
2158}
2159
2160impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
2161 fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) {
2162 *self = summary.max_id;
2163 }
2164}
2165
2166#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
2167pub struct PathKey(Arc<Path>);
2168
2169impl Default for PathKey {
2170 fn default() -> Self {
2171 Self(Path::new("").into())
2172 }
2173}
2174
2175impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
2176 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2177 self.0 = summary.max_path.clone();
2178 }
2179}
2180
2181struct BackgroundScanner {
2182 fs: Arc<dyn Fs>,
2183 snapshot: Arc<Mutex<LocalSnapshot>>,
2184 changes: HashMap<Arc<Path>, PathChange>,
2185 notify: UnboundedSender<ScanState>,
2186 executor: Arc<executor::Background>,
2187}
2188
2189impl BackgroundScanner {
2190 fn new(
2191 snapshot: Arc<Mutex<LocalSnapshot>>,
2192 notify: UnboundedSender<ScanState>,
2193 fs: Arc<dyn Fs>,
2194 executor: Arc<executor::Background>,
2195 ) -> Self {
2196 Self {
2197 fs,
2198 snapshot,
2199 notify,
2200 executor,
2201 changes: Default::default(),
2202 }
2203 }
2204
2205 fn abs_path(&self) -> Arc<Path> {
2206 self.snapshot.lock().abs_path.clone()
2207 }
2208
2209 async fn run(mut self, events_rx: impl Stream<Item = Vec<fsevent::Event>>) {
2210 // While performing the initial scan, send a new snapshot to the main
2211 // thread on a recurring interval.
2212 let initializing_task = self.executor.spawn({
2213 let executor = self.executor.clone();
2214 let snapshot = self.snapshot.clone();
2215 let notify = self.notify.clone();
2216 let is_fake_fs = self.fs.is_fake();
2217 async move {
2218 loop {
2219 if is_fake_fs {
2220 #[cfg(any(test, feature = "test-support"))]
2221 executor.simulate_random_delay().await;
2222 } else {
2223 smol::Timer::after(Duration::from_millis(100)).await;
2224 }
2225
2226 executor.timer(Duration::from_millis(100)).await;
2227 if notify
2228 .unbounded_send(ScanState::Initializing(snapshot.lock().clone()))
2229 .is_err()
2230 {
2231 break;
2232 }
2233 }
2234 }
2235 });
2236
2237 // Scan the entire directory.
2238 if let Err(err) = self.scan_dirs().await {
2239 if self
2240 .notify
2241 .unbounded_send(ScanState::Err(Arc::new(err)))
2242 .is_err()
2243 {
2244 return;
2245 }
2246 }
2247
2248 drop(initializing_task);
2249
2250 if self
2251 .notify
2252 .unbounded_send(ScanState::Initialized(self.snapshot.lock().clone()))
2253 .is_err()
2254 {
2255 return;
2256 }
2257
2258 futures::pin_mut!(events_rx);
2259
2260 // Process any events that occurred while performing the initial scan. These
2261 // events can't be reported as precisely, because there is no snapshot of the
2262 // worktree before they occurred.
2263 if let Some(mut events) = events_rx.next().await {
2264 while let Poll::Ready(Some(additional_events)) = futures::poll!(events_rx.next()) {
2265 events.extend(additional_events);
2266 }
2267 if self.notify.unbounded_send(ScanState::Updating).is_err() {
2268 return;
2269 }
2270 if !self.process_events(events, true).await {
2271 return;
2272 }
2273 if self
2274 .notify
2275 .unbounded_send(ScanState::Updated(
2276 self.snapshot.lock().clone(),
2277 mem::take(&mut self.changes),
2278 ))
2279 .is_err()
2280 {
2281 return;
2282 }
2283 }
2284
2285 // Continue processing events until the worktree is dropped.
2286 while let Some(mut events) = events_rx.next().await {
2287 while let Poll::Ready(Some(additional_events)) = futures::poll!(events_rx.next()) {
2288 events.extend(additional_events);
2289 }
2290 if self.notify.unbounded_send(ScanState::Updating).is_err() {
2291 return;
2292 }
2293 if !self.process_events(events, false).await {
2294 return;
2295 }
2296 if self
2297 .notify
2298 .unbounded_send(ScanState::Updated(
2299 self.snapshot.lock().clone(),
2300 mem::take(&mut self.changes),
2301 ))
2302 .is_err()
2303 {
2304 return;
2305 }
2306 }
2307 }
2308
2309 async fn scan_dirs(&mut self) -> Result<()> {
2310 let root_char_bag;
2311 let root_abs_path;
2312 let root_inode;
2313 let is_dir;
2314 let next_entry_id;
2315 {
2316 let mut snapshot = self.snapshot.lock();
2317 snapshot.scan_started();
2318 root_char_bag = snapshot.root_char_bag;
2319 root_abs_path = snapshot.abs_path.clone();
2320 root_inode = snapshot.root_entry().map(|e| e.inode);
2321 is_dir = snapshot.root_entry().map_or(false, |e| e.is_dir());
2322 next_entry_id = snapshot.next_entry_id.clone();
2323 };
2324
2325 // Populate ignores above the root.
2326 for ancestor in root_abs_path.ancestors().skip(1) {
2327 if let Ok(ignore) = build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
2328 {
2329 self.snapshot
2330 .lock()
2331 .ignores_by_parent_abs_path
2332 .insert(ancestor.into(), (ignore.into(), 0));
2333 }
2334 }
2335
2336 let ignore_stack = {
2337 let mut snapshot = self.snapshot.lock();
2338 let ignore_stack = snapshot.ignore_stack_for_abs_path(&root_abs_path, true);
2339 if ignore_stack.is_all() {
2340 if let Some(mut root_entry) = snapshot.root_entry().cloned() {
2341 root_entry.is_ignored = true;
2342 snapshot.insert_entry(root_entry, self.fs.as_ref());
2343 }
2344 }
2345 ignore_stack
2346 };
2347
2348 if is_dir {
2349 let path: Arc<Path> = Arc::from(Path::new(""));
2350 let mut ancestor_inodes = TreeSet::default();
2351 if let Some(root_inode) = root_inode {
2352 ancestor_inodes.insert(root_inode);
2353 }
2354
2355 let (tx, rx) = channel::unbounded();
2356 self.executor
2357 .block(tx.send(ScanJob {
2358 abs_path: root_abs_path.to_path_buf(),
2359 path,
2360 ignore_stack,
2361 ancestor_inodes,
2362 scan_queue: tx.clone(),
2363 }))
2364 .unwrap();
2365 drop(tx);
2366
2367 self.executor
2368 .scoped(|scope| {
2369 for _ in 0..self.executor.num_cpus() {
2370 scope.spawn(async {
2371 while let Ok(job) = rx.recv().await {
2372 if let Err(err) = self
2373 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2374 .await
2375 {
2376 log::error!("error scanning {:?}: {}", job.abs_path, err);
2377 }
2378 }
2379 });
2380 }
2381 })
2382 .await;
2383
2384 self.snapshot.lock().scan_completed();
2385 }
2386
2387 Ok(())
2388 }
2389
2390 async fn scan_dir(
2391 &self,
2392 root_char_bag: CharBag,
2393 next_entry_id: Arc<AtomicUsize>,
2394 job: &ScanJob,
2395 ) -> Result<()> {
2396 let mut new_entries: Vec<Entry> = Vec::new();
2397 let mut new_jobs: Vec<Option<ScanJob>> = Vec::new();
2398 let mut ignore_stack = job.ignore_stack.clone();
2399 let mut new_ignore = None;
2400
2401 let mut child_paths = self.fs.read_dir(&job.abs_path).await?;
2402 while let Some(child_abs_path) = child_paths.next().await {
2403 let child_abs_path = match child_abs_path {
2404 Ok(child_abs_path) => child_abs_path,
2405 Err(error) => {
2406 log::error!("error processing entry {:?}", error);
2407 continue;
2408 }
2409 };
2410
2411 let child_name = child_abs_path.file_name().unwrap();
2412 let child_path: Arc<Path> = job.path.join(child_name).into();
2413 let child_metadata = match self.fs.metadata(&child_abs_path).await {
2414 Ok(Some(metadata)) => metadata,
2415 Ok(None) => continue,
2416 Err(err) => {
2417 log::error!("error processing {:?}: {:?}", child_abs_path, err);
2418 continue;
2419 }
2420 };
2421
2422 // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
2423 if child_name == *GITIGNORE {
2424 match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
2425 Ok(ignore) => {
2426 let ignore = Arc::new(ignore);
2427 ignore_stack =
2428 ignore_stack.append(job.abs_path.as_path().into(), ignore.clone());
2429 new_ignore = Some(ignore);
2430 }
2431 Err(error) => {
2432 log::error!(
2433 "error loading .gitignore file {:?} - {:?}",
2434 child_name,
2435 error
2436 );
2437 }
2438 }
2439
2440 // Update ignore status of any child entries we've already processed to reflect the
2441 // ignore file in the current directory. Because `.gitignore` starts with a `.`,
2442 // there should rarely be too numerous. Update the ignore stack associated with any
2443 // new jobs as well.
2444 let mut new_jobs = new_jobs.iter_mut();
2445 for entry in &mut new_entries {
2446 let entry_abs_path = self.abs_path().join(&entry.path);
2447 entry.is_ignored =
2448 ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir());
2449
2450 if entry.is_dir() {
2451 if let Some(job) = new_jobs.next().expect("Missing scan job for entry") {
2452 job.ignore_stack = if entry.is_ignored {
2453 IgnoreStack::all()
2454 } else {
2455 ignore_stack.clone()
2456 };
2457 }
2458 }
2459 }
2460 }
2461
2462 let mut child_entry = Entry::new(
2463 child_path.clone(),
2464 &child_metadata,
2465 &next_entry_id,
2466 root_char_bag,
2467 );
2468
2469 if child_entry.is_dir() {
2470 let is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
2471 child_entry.is_ignored = is_ignored;
2472
2473 // Avoid recursing until crash in the case of a recursive symlink
2474 if !job.ancestor_inodes.contains(&child_entry.inode) {
2475 let mut ancestor_inodes = job.ancestor_inodes.clone();
2476 ancestor_inodes.insert(child_entry.inode);
2477
2478 new_jobs.push(Some(ScanJob {
2479 abs_path: child_abs_path,
2480 path: child_path,
2481 ignore_stack: if is_ignored {
2482 IgnoreStack::all()
2483 } else {
2484 ignore_stack.clone()
2485 },
2486 ancestor_inodes,
2487 scan_queue: job.scan_queue.clone(),
2488 }));
2489 } else {
2490 new_jobs.push(None);
2491 }
2492 } else {
2493 child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
2494 }
2495
2496 new_entries.push(child_entry);
2497 }
2498
2499 self.snapshot.lock().populate_dir(
2500 job.path.clone(),
2501 new_entries,
2502 new_ignore,
2503 self.fs.as_ref(),
2504 );
2505
2506 for new_job in new_jobs {
2507 if let Some(new_job) = new_job {
2508 job.scan_queue.send(new_job).await.unwrap();
2509 }
2510 }
2511
2512 Ok(())
2513 }
2514
2515 async fn process_events(
2516 &mut self,
2517 mut events: Vec<fsevent::Event>,
2518 received_before_initialized: bool,
2519 ) -> bool {
2520 events.sort_unstable_by(|a, b| a.path.cmp(&b.path));
2521 events.dedup_by(|a, b| a.path.starts_with(&b.path));
2522
2523 let root_char_bag;
2524 let root_abs_path;
2525 let next_entry_id;
2526 let prev_snapshot;
2527 {
2528 let mut snapshot = self.snapshot.lock();
2529 prev_snapshot = snapshot.snapshot.clone();
2530 root_char_bag = snapshot.root_char_bag;
2531 root_abs_path = snapshot.abs_path.clone();
2532 next_entry_id = snapshot.next_entry_id.clone();
2533 snapshot.scan_started();
2534 }
2535
2536 let root_canonical_path = if let Ok(path) = self.fs.canonicalize(&root_abs_path).await {
2537 path
2538 } else {
2539 return false;
2540 };
2541 let metadata = futures::future::join_all(
2542 events
2543 .iter()
2544 .map(|event| self.fs.metadata(&event.path))
2545 .collect::<Vec<_>>(),
2546 )
2547 .await;
2548
2549 // Hold the snapshot lock while clearing and re-inserting the root entries
2550 // for each event. This way, the snapshot is not observable to the foreground
2551 // thread while this operation is in-progress.
2552 let mut event_paths = Vec::with_capacity(events.len());
2553 let (scan_queue_tx, scan_queue_rx) = channel::unbounded();
2554 {
2555 let mut snapshot = self.snapshot.lock();
2556 for event in &events {
2557 if let Ok(path) = event.path.strip_prefix(&root_canonical_path) {
2558 snapshot.remove_path(path);
2559 }
2560 }
2561
2562 for (event, metadata) in events.into_iter().zip(metadata.into_iter()) {
2563 let path: Arc<Path> = match event.path.strip_prefix(&root_canonical_path) {
2564 Ok(path) => Arc::from(path.to_path_buf()),
2565 Err(_) => {
2566 log::error!(
2567 "unexpected event {:?} for root path {:?}",
2568 event.path,
2569 root_canonical_path
2570 );
2571 continue;
2572 }
2573 };
2574 event_paths.push(path.clone());
2575 let abs_path = root_abs_path.join(&path);
2576
2577 match metadata {
2578 Ok(Some(metadata)) => {
2579 let ignore_stack =
2580 snapshot.ignore_stack_for_abs_path(&abs_path, metadata.is_dir);
2581 let mut fs_entry = Entry::new(
2582 path.clone(),
2583 &metadata,
2584 snapshot.next_entry_id.as_ref(),
2585 snapshot.root_char_bag,
2586 );
2587 fs_entry.is_ignored = ignore_stack.is_all();
2588 snapshot.insert_entry(fs_entry, self.fs.as_ref());
2589
2590 let scan_id = snapshot.scan_id;
2591 if let Some(repo) = snapshot.in_dot_git(&path) {
2592 repo.repo.lock().reload_index();
2593 repo.scan_id = scan_id;
2594 }
2595
2596 let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
2597 if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
2598 ancestor_inodes.insert(metadata.inode);
2599 self.executor
2600 .block(scan_queue_tx.send(ScanJob {
2601 abs_path,
2602 path,
2603 ignore_stack,
2604 ancestor_inodes,
2605 scan_queue: scan_queue_tx.clone(),
2606 }))
2607 .unwrap();
2608 }
2609 }
2610 Ok(None) => {}
2611 Err(err) => {
2612 // TODO - create a special 'error' entry in the entries tree to mark this
2613 log::error!("error reading file on event {:?}", err);
2614 }
2615 }
2616 }
2617 drop(scan_queue_tx);
2618 }
2619
2620 // Scan any directories that were created as part of this event batch.
2621 self.executor
2622 .scoped(|scope| {
2623 for _ in 0..self.executor.num_cpus() {
2624 scope.spawn(async {
2625 while let Ok(job) = scan_queue_rx.recv().await {
2626 if let Err(err) = self
2627 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2628 .await
2629 {
2630 log::error!("error scanning {:?}: {}", job.abs_path, err);
2631 }
2632 }
2633 });
2634 }
2635 })
2636 .await;
2637
2638 // Attempt to detect renames only over a single batch of file-system events.
2639 self.snapshot.lock().removed_entry_ids.clear();
2640
2641 self.update_ignore_statuses().await;
2642 self.update_git_repositories();
2643 self.build_change_set(prev_snapshot, event_paths, received_before_initialized);
2644 self.snapshot.lock().scan_completed();
2645 true
2646 }
2647
2648 async fn update_ignore_statuses(&self) {
2649 let mut snapshot = self.snapshot.lock().clone();
2650 let mut ignores_to_update = Vec::new();
2651 let mut ignores_to_delete = Vec::new();
2652 for (parent_abs_path, (_, scan_id)) in &snapshot.ignores_by_parent_abs_path {
2653 if let Ok(parent_path) = parent_abs_path.strip_prefix(&snapshot.abs_path) {
2654 if *scan_id == snapshot.scan_id && snapshot.entry_for_path(parent_path).is_some() {
2655 ignores_to_update.push(parent_abs_path.clone());
2656 }
2657
2658 let ignore_path = parent_path.join(&*GITIGNORE);
2659 if snapshot.entry_for_path(ignore_path).is_none() {
2660 ignores_to_delete.push(parent_abs_path.clone());
2661 }
2662 }
2663 }
2664
2665 for parent_abs_path in ignores_to_delete {
2666 snapshot.ignores_by_parent_abs_path.remove(&parent_abs_path);
2667 self.snapshot
2668 .lock()
2669 .ignores_by_parent_abs_path
2670 .remove(&parent_abs_path);
2671 }
2672
2673 let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
2674 ignores_to_update.sort_unstable();
2675 let mut ignores_to_update = ignores_to_update.into_iter().peekable();
2676 while let Some(parent_abs_path) = ignores_to_update.next() {
2677 while ignores_to_update
2678 .peek()
2679 .map_or(false, |p| p.starts_with(&parent_abs_path))
2680 {
2681 ignores_to_update.next().unwrap();
2682 }
2683
2684 let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true);
2685 ignore_queue_tx
2686 .send(UpdateIgnoreStatusJob {
2687 abs_path: parent_abs_path,
2688 ignore_stack,
2689 ignore_queue: ignore_queue_tx.clone(),
2690 })
2691 .await
2692 .unwrap();
2693 }
2694 drop(ignore_queue_tx);
2695
2696 self.executor
2697 .scoped(|scope| {
2698 for _ in 0..self.executor.num_cpus() {
2699 scope.spawn(async {
2700 while let Ok(job) = ignore_queue_rx.recv().await {
2701 self.update_ignore_status(job, &snapshot).await;
2702 }
2703 });
2704 }
2705 })
2706 .await;
2707 }
2708
2709 fn update_git_repositories(&self) {
2710 let mut snapshot = self.snapshot.lock();
2711 let mut git_repositories = mem::take(&mut snapshot.git_repositories);
2712 git_repositories.retain(|repo| snapshot.entry_for_path(&repo.git_dir_path).is_some());
2713 snapshot.git_repositories = git_repositories;
2714 }
2715
2716 async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
2717 let mut ignore_stack = job.ignore_stack;
2718 if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) {
2719 ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
2720 }
2721
2722 let mut entries_by_id_edits = Vec::new();
2723 let mut entries_by_path_edits = Vec::new();
2724 let path = job.abs_path.strip_prefix(&snapshot.abs_path).unwrap();
2725 for mut entry in snapshot.child_entries(path).cloned() {
2726 let was_ignored = entry.is_ignored;
2727 let abs_path = self.abs_path().join(&entry.path);
2728 entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
2729 if entry.is_dir() {
2730 let child_ignore_stack = if entry.is_ignored {
2731 IgnoreStack::all()
2732 } else {
2733 ignore_stack.clone()
2734 };
2735 job.ignore_queue
2736 .send(UpdateIgnoreStatusJob {
2737 abs_path: abs_path.into(),
2738 ignore_stack: child_ignore_stack,
2739 ignore_queue: job.ignore_queue.clone(),
2740 })
2741 .await
2742 .unwrap();
2743 }
2744
2745 if entry.is_ignored != was_ignored {
2746 let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
2747 path_entry.scan_id = snapshot.scan_id;
2748 path_entry.is_ignored = entry.is_ignored;
2749 entries_by_id_edits.push(Edit::Insert(path_entry));
2750 entries_by_path_edits.push(Edit::Insert(entry));
2751 }
2752 }
2753
2754 let mut snapshot = self.snapshot.lock();
2755 snapshot.entries_by_path.edit(entries_by_path_edits, &());
2756 snapshot.entries_by_id.edit(entries_by_id_edits, &());
2757 }
2758
2759 fn build_change_set(
2760 &mut self,
2761 old_snapshot: Snapshot,
2762 event_paths: Vec<Arc<Path>>,
2763 received_before_initialized: bool,
2764 ) {
2765 let new_snapshot = self.snapshot.lock();
2766 let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>();
2767 let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>();
2768
2769 use PathChange::{Added, AddedOrUpdated, Removed, Updated};
2770
2771 for path in event_paths {
2772 let path = PathKey(path);
2773 old_paths.seek(&path, Bias::Left, &());
2774 new_paths.seek(&path, Bias::Left, &());
2775
2776 loop {
2777 match (old_paths.item(), new_paths.item()) {
2778 (Some(old_entry), Some(new_entry)) => {
2779 if old_entry.path > path.0
2780 && new_entry.path > path.0
2781 && !old_entry.path.starts_with(&path.0)
2782 && !new_entry.path.starts_with(&path.0)
2783 {
2784 break;
2785 }
2786
2787 match Ord::cmp(&old_entry.path, &new_entry.path) {
2788 Ordering::Less => {
2789 self.changes.insert(old_entry.path.clone(), Removed);
2790 old_paths.next(&());
2791 }
2792 Ordering::Equal => {
2793 if received_before_initialized {
2794 // If the worktree was not fully initialized when this event was generated,
2795 // we can't know whether this entry was added during the scan or whether
2796 // it was merely updated.
2797 self.changes.insert(old_entry.path.clone(), AddedOrUpdated);
2798 } else if old_entry.mtime != new_entry.mtime {
2799 self.changes.insert(old_entry.path.clone(), Updated);
2800 }
2801 old_paths.next(&());
2802 new_paths.next(&());
2803 }
2804 Ordering::Greater => {
2805 self.changes.insert(new_entry.path.clone(), Added);
2806 new_paths.next(&());
2807 }
2808 }
2809 }
2810 (Some(old_entry), None) => {
2811 self.changes.insert(old_entry.path.clone(), Removed);
2812 old_paths.next(&());
2813 }
2814 (None, Some(new_entry)) => {
2815 self.changes.insert(new_entry.path.clone(), Added);
2816 new_paths.next(&());
2817 }
2818 (None, None) => break,
2819 }
2820 }
2821 }
2822 }
2823}
2824
2825fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
2826 let mut result = root_char_bag;
2827 result.extend(
2828 path.to_string_lossy()
2829 .chars()
2830 .map(|c| c.to_ascii_lowercase()),
2831 );
2832 result
2833}
2834
2835struct ScanJob {
2836 abs_path: PathBuf,
2837 path: Arc<Path>,
2838 ignore_stack: Arc<IgnoreStack>,
2839 scan_queue: Sender<ScanJob>,
2840 ancestor_inodes: TreeSet<u64>,
2841}
2842
2843struct UpdateIgnoreStatusJob {
2844 abs_path: Arc<Path>,
2845 ignore_stack: Arc<IgnoreStack>,
2846 ignore_queue: Sender<UpdateIgnoreStatusJob>,
2847}
2848
2849pub trait WorktreeHandle {
2850 #[cfg(any(test, feature = "test-support"))]
2851 fn flush_fs_events<'a>(
2852 &self,
2853 cx: &'a gpui::TestAppContext,
2854 ) -> futures::future::LocalBoxFuture<'a, ()>;
2855}
2856
2857impl WorktreeHandle for ModelHandle<Worktree> {
2858 // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
2859 // occurred before the worktree was constructed. These events can cause the worktree to perfrom
2860 // extra directory scans, and emit extra scan-state notifications.
2861 //
2862 // This function mutates the worktree's directory and waits for those mutations to be picked up,
2863 // to ensure that all redundant FS events have already been processed.
2864 #[cfg(any(test, feature = "test-support"))]
2865 fn flush_fs_events<'a>(
2866 &self,
2867 cx: &'a gpui::TestAppContext,
2868 ) -> futures::future::LocalBoxFuture<'a, ()> {
2869 use smol::future::FutureExt;
2870
2871 let filename = "fs-event-sentinel";
2872 let tree = self.clone();
2873 let (fs, root_path) = self.read_with(cx, |tree, _| {
2874 let tree = tree.as_local().unwrap();
2875 (tree.fs.clone(), tree.abs_path().clone())
2876 });
2877
2878 async move {
2879 fs.create_file(&root_path.join(filename), Default::default())
2880 .await
2881 .unwrap();
2882 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
2883 .await;
2884
2885 fs.remove_file(&root_path.join(filename), Default::default())
2886 .await
2887 .unwrap();
2888 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
2889 .await;
2890
2891 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2892 .await;
2893 }
2894 .boxed_local()
2895 }
2896}
2897
2898#[derive(Clone, Debug)]
2899struct TraversalProgress<'a> {
2900 max_path: &'a Path,
2901 count: usize,
2902 visible_count: usize,
2903 file_count: usize,
2904 visible_file_count: usize,
2905}
2906
2907impl<'a> TraversalProgress<'a> {
2908 fn count(&self, include_dirs: bool, include_ignored: bool) -> usize {
2909 match (include_ignored, include_dirs) {
2910 (true, true) => self.count,
2911 (true, false) => self.file_count,
2912 (false, true) => self.visible_count,
2913 (false, false) => self.visible_file_count,
2914 }
2915 }
2916}
2917
2918impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
2919 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2920 self.max_path = summary.max_path.as_ref();
2921 self.count += summary.count;
2922 self.visible_count += summary.visible_count;
2923 self.file_count += summary.file_count;
2924 self.visible_file_count += summary.visible_file_count;
2925 }
2926}
2927
2928impl<'a> Default for TraversalProgress<'a> {
2929 fn default() -> Self {
2930 Self {
2931 max_path: Path::new(""),
2932 count: 0,
2933 visible_count: 0,
2934 file_count: 0,
2935 visible_file_count: 0,
2936 }
2937 }
2938}
2939
2940pub struct Traversal<'a> {
2941 cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
2942 include_ignored: bool,
2943 include_dirs: bool,
2944}
2945
2946impl<'a> Traversal<'a> {
2947 pub fn advance(&mut self) -> bool {
2948 self.advance_to_offset(self.offset() + 1)
2949 }
2950
2951 pub fn advance_to_offset(&mut self, offset: usize) -> bool {
2952 self.cursor.seek_forward(
2953 &TraversalTarget::Count {
2954 count: offset,
2955 include_dirs: self.include_dirs,
2956 include_ignored: self.include_ignored,
2957 },
2958 Bias::Right,
2959 &(),
2960 )
2961 }
2962
2963 pub fn advance_to_sibling(&mut self) -> bool {
2964 while let Some(entry) = self.cursor.item() {
2965 self.cursor.seek_forward(
2966 &TraversalTarget::PathSuccessor(&entry.path),
2967 Bias::Left,
2968 &(),
2969 );
2970 if let Some(entry) = self.cursor.item() {
2971 if (self.include_dirs || !entry.is_dir())
2972 && (self.include_ignored || !entry.is_ignored)
2973 {
2974 return true;
2975 }
2976 }
2977 }
2978 false
2979 }
2980
2981 pub fn entry(&self) -> Option<&'a Entry> {
2982 self.cursor.item()
2983 }
2984
2985 pub fn offset(&self) -> usize {
2986 self.cursor
2987 .start()
2988 .count(self.include_dirs, self.include_ignored)
2989 }
2990}
2991
2992impl<'a> Iterator for Traversal<'a> {
2993 type Item = &'a Entry;
2994
2995 fn next(&mut self) -> Option<Self::Item> {
2996 if let Some(item) = self.entry() {
2997 self.advance();
2998 Some(item)
2999 } else {
3000 None
3001 }
3002 }
3003}
3004
3005#[derive(Debug)]
3006enum TraversalTarget<'a> {
3007 Path(&'a Path),
3008 PathSuccessor(&'a Path),
3009 Count {
3010 count: usize,
3011 include_ignored: bool,
3012 include_dirs: bool,
3013 },
3014}
3015
3016impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> {
3017 fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
3018 match self {
3019 TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path),
3020 TraversalTarget::PathSuccessor(path) => {
3021 if !cursor_location.max_path.starts_with(path) {
3022 Ordering::Equal
3023 } else {
3024 Ordering::Greater
3025 }
3026 }
3027 TraversalTarget::Count {
3028 count,
3029 include_dirs,
3030 include_ignored,
3031 } => Ord::cmp(
3032 count,
3033 &cursor_location.count(*include_dirs, *include_ignored),
3034 ),
3035 }
3036 }
3037}
3038
3039struct ChildEntriesIter<'a> {
3040 parent_path: &'a Path,
3041 traversal: Traversal<'a>,
3042}
3043
3044impl<'a> Iterator for ChildEntriesIter<'a> {
3045 type Item = &'a Entry;
3046
3047 fn next(&mut self) -> Option<Self::Item> {
3048 if let Some(item) = self.traversal.entry() {
3049 if item.path.starts_with(&self.parent_path) {
3050 self.traversal.advance_to_sibling();
3051 return Some(item);
3052 }
3053 }
3054 None
3055 }
3056}
3057
3058impl<'a> From<&'a Entry> for proto::Entry {
3059 fn from(entry: &'a Entry) -> Self {
3060 Self {
3061 id: entry.id.to_proto(),
3062 is_dir: entry.is_dir(),
3063 path: entry.path.to_string_lossy().into(),
3064 inode: entry.inode,
3065 mtime: Some(entry.mtime.into()),
3066 is_symlink: entry.is_symlink,
3067 is_ignored: entry.is_ignored,
3068 }
3069 }
3070}
3071
3072impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
3073 type Error = anyhow::Error;
3074
3075 fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result<Self> {
3076 if let Some(mtime) = entry.mtime {
3077 let kind = if entry.is_dir {
3078 EntryKind::Dir
3079 } else {
3080 let mut char_bag = *root_char_bag;
3081 char_bag.extend(entry.path.chars().map(|c| c.to_ascii_lowercase()));
3082 EntryKind::File(char_bag)
3083 };
3084 let path: Arc<Path> = PathBuf::from(entry.path).into();
3085 Ok(Entry {
3086 id: ProjectEntryId::from_proto(entry.id),
3087 kind,
3088 path,
3089 inode: entry.inode,
3090 mtime: mtime.into(),
3091 is_symlink: entry.is_symlink,
3092 is_ignored: entry.is_ignored,
3093 })
3094 } else {
3095 Err(anyhow!(
3096 "missing mtime in remote worktree entry {:?}",
3097 entry.path
3098 ))
3099 }
3100 }
3101}
3102
3103#[cfg(test)]
3104mod tests {
3105 use super::*;
3106 use client::test::FakeHttpClient;
3107 use fs::repository::FakeGitRepository;
3108 use fs::{FakeFs, RealFs};
3109 use gpui::{executor::Deterministic, TestAppContext};
3110 use rand::prelude::*;
3111 use serde_json::json;
3112 use std::{env, fmt::Write};
3113 use util::test::temp_tree;
3114
3115 #[gpui::test]
3116 async fn test_traversal(cx: &mut TestAppContext) {
3117 let fs = FakeFs::new(cx.background());
3118 fs.insert_tree(
3119 "/root",
3120 json!({
3121 ".gitignore": "a/b\n",
3122 "a": {
3123 "b": "",
3124 "c": "",
3125 }
3126 }),
3127 )
3128 .await;
3129
3130 let http_client = FakeHttpClient::with_404_response();
3131 let client = cx.read(|cx| Client::new(http_client, cx));
3132
3133 let tree = Worktree::local(
3134 client,
3135 Arc::from(Path::new("/root")),
3136 true,
3137 fs,
3138 Default::default(),
3139 &mut cx.to_async(),
3140 )
3141 .await
3142 .unwrap();
3143 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3144 .await;
3145
3146 tree.read_with(cx, |tree, _| {
3147 assert_eq!(
3148 tree.entries(false)
3149 .map(|entry| entry.path.as_ref())
3150 .collect::<Vec<_>>(),
3151 vec![
3152 Path::new(""),
3153 Path::new(".gitignore"),
3154 Path::new("a"),
3155 Path::new("a/c"),
3156 ]
3157 );
3158 assert_eq!(
3159 tree.entries(true)
3160 .map(|entry| entry.path.as_ref())
3161 .collect::<Vec<_>>(),
3162 vec![
3163 Path::new(""),
3164 Path::new(".gitignore"),
3165 Path::new("a"),
3166 Path::new("a/b"),
3167 Path::new("a/c"),
3168 ]
3169 );
3170 })
3171 }
3172
3173 #[gpui::test(iterations = 10)]
3174 async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
3175 let fs = FakeFs::new(cx.background());
3176 fs.insert_tree(
3177 "/root",
3178 json!({
3179 "lib": {
3180 "a": {
3181 "a.txt": ""
3182 },
3183 "b": {
3184 "b.txt": ""
3185 }
3186 }
3187 }),
3188 )
3189 .await;
3190 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
3191 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
3192
3193 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3194 let tree = Worktree::local(
3195 client,
3196 Arc::from(Path::new("/root")),
3197 true,
3198 fs.clone(),
3199 Default::default(),
3200 &mut cx.to_async(),
3201 )
3202 .await
3203 .unwrap();
3204
3205 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3206 .await;
3207
3208 tree.read_with(cx, |tree, _| {
3209 assert_eq!(
3210 tree.entries(false)
3211 .map(|entry| entry.path.as_ref())
3212 .collect::<Vec<_>>(),
3213 vec![
3214 Path::new(""),
3215 Path::new("lib"),
3216 Path::new("lib/a"),
3217 Path::new("lib/a/a.txt"),
3218 Path::new("lib/a/lib"),
3219 Path::new("lib/b"),
3220 Path::new("lib/b/b.txt"),
3221 Path::new("lib/b/lib"),
3222 ]
3223 );
3224 });
3225
3226 fs.rename(
3227 Path::new("/root/lib/a/lib"),
3228 Path::new("/root/lib/a/lib-2"),
3229 Default::default(),
3230 )
3231 .await
3232 .unwrap();
3233 executor.run_until_parked();
3234 tree.read_with(cx, |tree, _| {
3235 assert_eq!(
3236 tree.entries(false)
3237 .map(|entry| entry.path.as_ref())
3238 .collect::<Vec<_>>(),
3239 vec![
3240 Path::new(""),
3241 Path::new("lib"),
3242 Path::new("lib/a"),
3243 Path::new("lib/a/a.txt"),
3244 Path::new("lib/a/lib-2"),
3245 Path::new("lib/b"),
3246 Path::new("lib/b/b.txt"),
3247 Path::new("lib/b/lib"),
3248 ]
3249 );
3250 });
3251 }
3252
3253 #[gpui::test]
3254 async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
3255 let parent_dir = temp_tree(json!({
3256 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
3257 "tree": {
3258 ".git": {},
3259 ".gitignore": "ignored-dir\n",
3260 "tracked-dir": {
3261 "tracked-file1": "",
3262 "ancestor-ignored-file1": "",
3263 },
3264 "ignored-dir": {
3265 "ignored-file1": ""
3266 }
3267 }
3268 }));
3269 let dir = parent_dir.path().join("tree");
3270
3271 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3272
3273 let tree = Worktree::local(
3274 client,
3275 dir.as_path(),
3276 true,
3277 Arc::new(RealFs),
3278 Default::default(),
3279 &mut cx.to_async(),
3280 )
3281 .await
3282 .unwrap();
3283 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3284 .await;
3285 tree.flush_fs_events(cx).await;
3286 cx.read(|cx| {
3287 let tree = tree.read(cx);
3288 assert!(
3289 !tree
3290 .entry_for_path("tracked-dir/tracked-file1")
3291 .unwrap()
3292 .is_ignored
3293 );
3294 assert!(
3295 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
3296 .unwrap()
3297 .is_ignored
3298 );
3299 assert!(
3300 tree.entry_for_path("ignored-dir/ignored-file1")
3301 .unwrap()
3302 .is_ignored
3303 );
3304 });
3305
3306 std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
3307 std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
3308 std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
3309 tree.flush_fs_events(cx).await;
3310 cx.read(|cx| {
3311 let tree = tree.read(cx);
3312 assert!(
3313 !tree
3314 .entry_for_path("tracked-dir/tracked-file2")
3315 .unwrap()
3316 .is_ignored
3317 );
3318 assert!(
3319 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
3320 .unwrap()
3321 .is_ignored
3322 );
3323 assert!(
3324 tree.entry_for_path("ignored-dir/ignored-file2")
3325 .unwrap()
3326 .is_ignored
3327 );
3328 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
3329 });
3330 }
3331
3332 #[gpui::test]
3333 async fn test_git_repository_for_path(cx: &mut TestAppContext) {
3334 let root = temp_tree(json!({
3335 "dir1": {
3336 ".git": {},
3337 "deps": {
3338 "dep1": {
3339 ".git": {},
3340 "src": {
3341 "a.txt": ""
3342 }
3343 }
3344 },
3345 "src": {
3346 "b.txt": ""
3347 }
3348 },
3349 "c.txt": "",
3350 }));
3351
3352 let http_client = FakeHttpClient::with_404_response();
3353 let client = cx.read(|cx| Client::new(http_client, cx));
3354 let tree = Worktree::local(
3355 client,
3356 root.path(),
3357 true,
3358 Arc::new(RealFs),
3359 Default::default(),
3360 &mut cx.to_async(),
3361 )
3362 .await
3363 .unwrap();
3364
3365 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3366 .await;
3367 tree.flush_fs_events(cx).await;
3368
3369 tree.read_with(cx, |tree, _cx| {
3370 let tree = tree.as_local().unwrap();
3371
3372 assert!(tree.repo_for("c.txt".as_ref()).is_none());
3373
3374 let repo = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap();
3375 assert_eq!(repo.content_path.as_ref(), Path::new("dir1"));
3376 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git"));
3377
3378 let repo = tree.repo_for("dir1/deps/dep1/src/a.txt".as_ref()).unwrap();
3379 assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1"));
3380 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/deps/dep1/.git"),);
3381 });
3382
3383 let original_scan_id = tree.read_with(cx, |tree, _cx| {
3384 let tree = tree.as_local().unwrap();
3385 tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id
3386 });
3387
3388 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
3389 tree.flush_fs_events(cx).await;
3390
3391 tree.read_with(cx, |tree, _cx| {
3392 let tree = tree.as_local().unwrap();
3393 let new_scan_id = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id;
3394 assert_ne!(
3395 original_scan_id, new_scan_id,
3396 "original {original_scan_id}, new {new_scan_id}"
3397 );
3398 });
3399
3400 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
3401 tree.flush_fs_events(cx).await;
3402
3403 tree.read_with(cx, |tree, _cx| {
3404 let tree = tree.as_local().unwrap();
3405
3406 assert!(tree.repo_for("dir1/src/b.txt".as_ref()).is_none());
3407 });
3408 }
3409
3410 #[test]
3411 fn test_changed_repos() {
3412 fn fake_entry(git_dir_path: impl AsRef<Path>, scan_id: usize) -> GitRepositoryEntry {
3413 GitRepositoryEntry {
3414 repo: Arc::new(Mutex::new(FakeGitRepository::default())),
3415 scan_id,
3416 content_path: git_dir_path.as_ref().parent().unwrap().into(),
3417 git_dir_path: git_dir_path.as_ref().into(),
3418 }
3419 }
3420
3421 let prev_repos: Vec<GitRepositoryEntry> = vec![
3422 fake_entry("/.git", 0),
3423 fake_entry("/a/.git", 0),
3424 fake_entry("/a/b/.git", 0),
3425 ];
3426
3427 let new_repos: Vec<GitRepositoryEntry> = vec![
3428 fake_entry("/a/.git", 1),
3429 fake_entry("/a/b/.git", 0),
3430 fake_entry("/a/c/.git", 0),
3431 ];
3432
3433 let res = LocalWorktree::changed_repos(&prev_repos, &new_repos);
3434
3435 // Deletion retained
3436 assert!(res
3437 .iter()
3438 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/.git") && repo.scan_id == 0)
3439 .is_some());
3440
3441 // Update retained
3442 assert!(res
3443 .iter()
3444 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/.git") && repo.scan_id == 1)
3445 .is_some());
3446
3447 // Addition retained
3448 assert!(res
3449 .iter()
3450 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/c/.git") && repo.scan_id == 0)
3451 .is_some());
3452
3453 // Nochange, not retained
3454 assert!(res
3455 .iter()
3456 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/b/.git") && repo.scan_id == 0)
3457 .is_none());
3458 }
3459
3460 #[gpui::test]
3461 async fn test_write_file(cx: &mut TestAppContext) {
3462 let dir = temp_tree(json!({
3463 ".git": {},
3464 ".gitignore": "ignored-dir\n",
3465 "tracked-dir": {},
3466 "ignored-dir": {}
3467 }));
3468
3469 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3470
3471 let tree = Worktree::local(
3472 client,
3473 dir.path(),
3474 true,
3475 Arc::new(RealFs),
3476 Default::default(),
3477 &mut cx.to_async(),
3478 )
3479 .await
3480 .unwrap();
3481 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3482 .await;
3483 tree.flush_fs_events(cx).await;
3484
3485 tree.update(cx, |tree, cx| {
3486 tree.as_local().unwrap().write_file(
3487 Path::new("tracked-dir/file.txt"),
3488 "hello".into(),
3489 Default::default(),
3490 cx,
3491 )
3492 })
3493 .await
3494 .unwrap();
3495 tree.update(cx, |tree, cx| {
3496 tree.as_local().unwrap().write_file(
3497 Path::new("ignored-dir/file.txt"),
3498 "world".into(),
3499 Default::default(),
3500 cx,
3501 )
3502 })
3503 .await
3504 .unwrap();
3505
3506 tree.read_with(cx, |tree, _| {
3507 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
3508 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
3509 assert!(!tracked.is_ignored);
3510 assert!(ignored.is_ignored);
3511 });
3512 }
3513
3514 #[gpui::test(iterations = 30)]
3515 async fn test_create_directory(cx: &mut TestAppContext) {
3516 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3517
3518 let fs = FakeFs::new(cx.background());
3519 fs.insert_tree(
3520 "/a",
3521 json!({
3522 "b": {},
3523 "c": {},
3524 "d": {},
3525 }),
3526 )
3527 .await;
3528
3529 let tree = Worktree::local(
3530 client,
3531 "/a".as_ref(),
3532 true,
3533 fs,
3534 Default::default(),
3535 &mut cx.to_async(),
3536 )
3537 .await
3538 .unwrap();
3539
3540 let entry = tree
3541 .update(cx, |tree, cx| {
3542 tree.as_local_mut()
3543 .unwrap()
3544 .create_entry("a/e".as_ref(), true, cx)
3545 })
3546 .await
3547 .unwrap();
3548 assert!(entry.is_dir());
3549
3550 cx.foreground().run_until_parked();
3551 tree.read_with(cx, |tree, _| {
3552 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
3553 });
3554 }
3555
3556 #[gpui::test(iterations = 100)]
3557 async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
3558 let operations = env::var("OPERATIONS")
3559 .map(|o| o.parse().unwrap())
3560 .unwrap_or(40);
3561 let initial_entries = env::var("INITIAL_ENTRIES")
3562 .map(|o| o.parse().unwrap())
3563 .unwrap_or(20);
3564
3565 let root_dir = Path::new("/test");
3566 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
3567 fs.as_fake().insert_tree(root_dir, json!({})).await;
3568 for _ in 0..initial_entries {
3569 randomly_mutate_tree(&fs, root_dir, 1.0, &mut rng).await;
3570 }
3571 log::info!("generated initial tree");
3572
3573 let next_entry_id = Arc::new(AtomicUsize::default());
3574 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3575 let worktree = Worktree::local(
3576 client.clone(),
3577 root_dir,
3578 true,
3579 fs.clone(),
3580 next_entry_id.clone(),
3581 &mut cx.to_async(),
3582 )
3583 .await
3584 .unwrap();
3585
3586 worktree
3587 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
3588 .await;
3589
3590 // After the initial scan is complete, the `UpdatedEntries` event can
3591 // be used to follow along with all changes to the worktree's snapshot.
3592 worktree.update(cx, |tree, cx| {
3593 let mut paths = tree
3594 .as_local()
3595 .unwrap()
3596 .paths()
3597 .cloned()
3598 .collect::<Vec<_>>();
3599
3600 cx.subscribe(&worktree, move |tree, _, event, _| {
3601 if let Event::UpdatedEntries(changes) = event {
3602 for (path, change_type) in changes.iter() {
3603 let path = path.clone();
3604 let ix = match paths.binary_search(&path) {
3605 Ok(ix) | Err(ix) => ix,
3606 };
3607 match change_type {
3608 PathChange::Added => {
3609 assert_ne!(paths.get(ix), Some(&path));
3610 paths.insert(ix, path);
3611 }
3612 PathChange::Removed => {
3613 assert_eq!(paths.get(ix), Some(&path));
3614 paths.remove(ix);
3615 }
3616 PathChange::Updated => {
3617 assert_eq!(paths.get(ix), Some(&path));
3618 }
3619 PathChange::AddedOrUpdated => {
3620 if paths[ix] != path {
3621 paths.insert(ix, path);
3622 }
3623 }
3624 }
3625 }
3626 let new_paths = tree.paths().cloned().collect::<Vec<_>>();
3627 assert_eq!(paths, new_paths, "incorrect changes: {:?}", changes);
3628 }
3629 })
3630 .detach();
3631 });
3632
3633 let mut snapshots = Vec::new();
3634 let mut mutations_len = operations;
3635 while mutations_len > 1 {
3636 randomly_mutate_tree(&fs, root_dir, 1.0, &mut rng).await;
3637 let buffered_event_count = fs.as_fake().buffered_event_count().await;
3638 if buffered_event_count > 0 && rng.gen_bool(0.3) {
3639 let len = rng.gen_range(0..=buffered_event_count);
3640 log::info!("flushing {} events", len);
3641 fs.as_fake().flush_events(len).await;
3642 } else {
3643 randomly_mutate_tree(&fs, root_dir, 0.6, &mut rng).await;
3644 mutations_len -= 1;
3645 }
3646
3647 cx.foreground().run_until_parked();
3648 if rng.gen_bool(0.2) {
3649 log::info!("storing snapshot {}", snapshots.len());
3650 let snapshot =
3651 worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
3652 snapshots.push(snapshot);
3653 }
3654 }
3655
3656 log::info!("quiescing");
3657 fs.as_fake().flush_events(usize::MAX).await;
3658 cx.foreground().run_until_parked();
3659 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
3660 snapshot.check_invariants();
3661
3662 {
3663 let new_worktree = Worktree::local(
3664 client.clone(),
3665 root_dir,
3666 true,
3667 fs.clone(),
3668 next_entry_id,
3669 &mut cx.to_async(),
3670 )
3671 .await
3672 .unwrap();
3673 new_worktree
3674 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
3675 .await;
3676 let new_snapshot =
3677 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
3678 assert_eq!(snapshot.to_vec(true), new_snapshot.to_vec(true));
3679 }
3680
3681 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate() {
3682 let include_ignored = rng.gen::<bool>();
3683 if !include_ignored {
3684 let mut entries_by_path_edits = Vec::new();
3685 let mut entries_by_id_edits = Vec::new();
3686 for entry in prev_snapshot
3687 .entries_by_id
3688 .cursor::<()>()
3689 .filter(|e| e.is_ignored)
3690 {
3691 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
3692 entries_by_id_edits.push(Edit::Remove(entry.id));
3693 }
3694
3695 prev_snapshot
3696 .entries_by_path
3697 .edit(entries_by_path_edits, &());
3698 prev_snapshot.entries_by_id.edit(entries_by_id_edits, &());
3699 }
3700
3701 let update = snapshot.build_update(&prev_snapshot, 0, 0, include_ignored);
3702 prev_snapshot.apply_remote_update(update.clone()).unwrap();
3703 assert_eq!(
3704 prev_snapshot.to_vec(include_ignored),
3705 snapshot.to_vec(include_ignored),
3706 "wrong update for snapshot {i}. update: {:?}",
3707 update
3708 );
3709 }
3710 }
3711
3712 async fn randomly_mutate_tree(
3713 fs: &Arc<dyn Fs>,
3714 root_path: &Path,
3715 insertion_probability: f64,
3716 rng: &mut impl Rng,
3717 ) {
3718 let mut files = Vec::new();
3719 let mut dirs = Vec::new();
3720 for path in fs.as_fake().paths().await {
3721 if path.starts_with(root_path) {
3722 if fs.is_file(&path).await {
3723 files.push(path);
3724 } else {
3725 dirs.push(path);
3726 }
3727 }
3728 }
3729
3730 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
3731 let path = dirs.choose(rng).unwrap();
3732 let new_path = path.join(gen_name(rng));
3733
3734 if rng.gen() {
3735 log::info!(
3736 "creating dir {:?}",
3737 new_path.strip_prefix(root_path).unwrap()
3738 );
3739 fs.create_dir(&new_path).await.unwrap();
3740 } else {
3741 log::info!(
3742 "creating file {:?}",
3743 new_path.strip_prefix(root_path).unwrap()
3744 );
3745 fs.create_file(&new_path, Default::default()).await.unwrap();
3746 }
3747 } else if rng.gen_bool(0.05) {
3748 let ignore_dir_path = dirs.choose(rng).unwrap();
3749 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
3750
3751 let subdirs = dirs
3752 .iter()
3753 .filter(|d| d.starts_with(&ignore_dir_path))
3754 .cloned()
3755 .collect::<Vec<_>>();
3756 let subfiles = files
3757 .iter()
3758 .filter(|d| d.starts_with(&ignore_dir_path))
3759 .cloned()
3760 .collect::<Vec<_>>();
3761 let files_to_ignore = {
3762 let len = rng.gen_range(0..=subfiles.len());
3763 subfiles.choose_multiple(rng, len)
3764 };
3765 let dirs_to_ignore = {
3766 let len = rng.gen_range(0..subdirs.len());
3767 subdirs.choose_multiple(rng, len)
3768 };
3769
3770 let mut ignore_contents = String::new();
3771 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
3772 writeln!(
3773 ignore_contents,
3774 "{}",
3775 path_to_ignore
3776 .strip_prefix(&ignore_dir_path)
3777 .unwrap()
3778 .to_str()
3779 .unwrap()
3780 )
3781 .unwrap();
3782 }
3783 log::info!(
3784 "creating gitignore {:?} with contents:\n{}",
3785 ignore_path.strip_prefix(&root_path).unwrap(),
3786 ignore_contents
3787 );
3788 fs.save(
3789 &ignore_path,
3790 &ignore_contents.as_str().into(),
3791 Default::default(),
3792 )
3793 .await
3794 .unwrap();
3795 } else {
3796 let old_path = {
3797 let file_path = files.choose(rng);
3798 let dir_path = dirs[1..].choose(rng);
3799 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
3800 };
3801
3802 let is_rename = rng.gen();
3803 if is_rename {
3804 let new_path_parent = dirs
3805 .iter()
3806 .filter(|d| !d.starts_with(old_path))
3807 .choose(rng)
3808 .unwrap();
3809
3810 let overwrite_existing_dir =
3811 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
3812 let new_path = if overwrite_existing_dir {
3813 fs.remove_dir(
3814 &new_path_parent,
3815 RemoveOptions {
3816 recursive: true,
3817 ignore_if_not_exists: true,
3818 },
3819 )
3820 .await
3821 .unwrap();
3822 new_path_parent.to_path_buf()
3823 } else {
3824 new_path_parent.join(gen_name(rng))
3825 };
3826
3827 log::info!(
3828 "renaming {:?} to {}{:?}",
3829 old_path.strip_prefix(&root_path).unwrap(),
3830 if overwrite_existing_dir {
3831 "overwrite "
3832 } else {
3833 ""
3834 },
3835 new_path.strip_prefix(&root_path).unwrap()
3836 );
3837 fs.rename(
3838 &old_path,
3839 &new_path,
3840 fs::RenameOptions {
3841 overwrite: true,
3842 ignore_if_exists: true,
3843 },
3844 )
3845 .await
3846 .unwrap();
3847 } else if fs.is_file(&old_path).await {
3848 log::info!(
3849 "deleting file {:?}",
3850 old_path.strip_prefix(&root_path).unwrap()
3851 );
3852 fs.remove_file(old_path, Default::default()).await.unwrap();
3853 } else {
3854 log::info!(
3855 "deleting dir {:?}",
3856 old_path.strip_prefix(&root_path).unwrap()
3857 );
3858 fs.remove_dir(
3859 &old_path,
3860 RemoveOptions {
3861 recursive: true,
3862 ignore_if_not_exists: true,
3863 },
3864 )
3865 .await
3866 .unwrap();
3867 }
3868 }
3869 }
3870
3871 fn gen_name(rng: &mut impl Rng) -> String {
3872 (0..6)
3873 .map(|_| rng.sample(rand::distributions::Alphanumeric))
3874 .map(char::from)
3875 .collect()
3876 }
3877
3878 impl LocalSnapshot {
3879 fn check_invariants(&self) {
3880 let mut files = self.files(true, 0);
3881 let mut visible_files = self.files(false, 0);
3882 for entry in self.entries_by_path.cursor::<()>() {
3883 if entry.is_file() {
3884 assert_eq!(files.next().unwrap().inode, entry.inode);
3885 if !entry.is_ignored {
3886 assert_eq!(visible_files.next().unwrap().inode, entry.inode);
3887 }
3888 }
3889 }
3890 assert!(files.next().is_none());
3891 assert!(visible_files.next().is_none());
3892
3893 let mut bfs_paths = Vec::new();
3894 let mut stack = vec![Path::new("")];
3895 while let Some(path) = stack.pop() {
3896 bfs_paths.push(path);
3897 let ix = stack.len();
3898 for child_entry in self.child_entries(path) {
3899 stack.insert(ix, &child_entry.path);
3900 }
3901 }
3902
3903 let dfs_paths_via_iter = self
3904 .entries_by_path
3905 .cursor::<()>()
3906 .map(|e| e.path.as_ref())
3907 .collect::<Vec<_>>();
3908 assert_eq!(bfs_paths, dfs_paths_via_iter);
3909
3910 let dfs_paths_via_traversal = self
3911 .entries(true)
3912 .map(|e| e.path.as_ref())
3913 .collect::<Vec<_>>();
3914 assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
3915
3916 for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
3917 let ignore_parent_path =
3918 ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
3919 assert!(self.entry_for_path(&ignore_parent_path).is_some());
3920 assert!(self
3921 .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
3922 .is_some());
3923 }
3924 }
3925
3926 fn to_vec(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
3927 let mut paths = Vec::new();
3928 for entry in self.entries_by_path.cursor::<()>() {
3929 if include_ignored || !entry.is_ignored {
3930 paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
3931 }
3932 }
3933 paths.sort_by(|a, b| a.0.cmp(b.0));
3934 paths
3935 }
3936 }
3937}