1use crate::{ProjectEntryId, RemoveOptions};
2
3use super::{
4 fs::{self, Fs},
5 ignore::IgnoreStack,
6 DiagnosticSummary,
7};
8use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
9use anyhow::{anyhow, Context, Result};
10use client::{proto, Client};
11use clock::ReplicaId;
12use collections::{HashMap, VecDeque};
13use futures::{
14 channel::{
15 mpsc::{self, UnboundedSender},
16 oneshot,
17 },
18 Stream, StreamExt,
19};
20use fuzzy::CharBag;
21use gpui::{
22 executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
23 Task,
24};
25use language::{
26 proto::{deserialize_version, serialize_version},
27 Buffer, DiagnosticEntry, PointUtf16, Rope,
28};
29use lazy_static::lazy_static;
30use parking_lot::Mutex;
31use postage::{
32 prelude::{Sink as _, Stream as _},
33 watch,
34};
35use smol::channel::{self, Sender};
36use std::{
37 any::Any,
38 cmp::{self, Ordering},
39 convert::TryFrom,
40 ffi::{OsStr, OsString},
41 fmt,
42 future::Future,
43 ops::{Deref, DerefMut},
44 os::unix::prelude::{OsStrExt, OsStringExt},
45 path::{Path, PathBuf},
46 sync::{atomic::AtomicUsize, Arc},
47 task::Poll,
48 time::{Duration, SystemTime},
49};
50use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap};
51use util::{ResultExt, TryFutureExt};
52
53lazy_static! {
54 static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore");
55}
56
57#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
58pub struct WorktreeId(usize);
59
60pub enum Worktree {
61 Local(LocalWorktree),
62 Remote(RemoteWorktree),
63}
64
65pub struct LocalWorktree {
66 snapshot: LocalSnapshot,
67 background_snapshot: Arc<Mutex<LocalSnapshot>>,
68 last_scan_state_rx: watch::Receiver<ScanState>,
69 _background_scanner_task: Option<Task<()>>,
70 poll_task: Option<Task<()>>,
71 share: Option<ShareState>,
72 diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
73 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
74 client: Arc<Client>,
75 fs: Arc<dyn Fs>,
76 visible: bool,
77}
78
79pub struct RemoteWorktree {
80 pub snapshot: Snapshot,
81 pub(crate) background_snapshot: Arc<Mutex<Snapshot>>,
82 project_id: u64,
83 client: Arc<Client>,
84 updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
85 snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
86 replica_id: ReplicaId,
87 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
88 visible: bool,
89}
90
91#[derive(Clone)]
92pub struct Snapshot {
93 id: WorktreeId,
94 root_name: String,
95 root_char_bag: CharBag,
96 entries_by_path: SumTree<Entry>,
97 entries_by_id: SumTree<PathEntry>,
98 scan_id: usize,
99 is_complete: bool,
100}
101
102#[derive(Clone)]
103pub struct LocalSnapshot {
104 abs_path: Arc<Path>,
105 ignores: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
106 removed_entry_ids: HashMap<u64, ProjectEntryId>,
107 next_entry_id: Arc<AtomicUsize>,
108 snapshot: Snapshot,
109}
110
111impl Deref for LocalSnapshot {
112 type Target = Snapshot;
113
114 fn deref(&self) -> &Self::Target {
115 &self.snapshot
116 }
117}
118
119impl DerefMut for LocalSnapshot {
120 fn deref_mut(&mut self) -> &mut Self::Target {
121 &mut self.snapshot
122 }
123}
124
125#[derive(Clone, Debug)]
126enum ScanState {
127 Idle,
128 /// The worktree is performing its initial scan of the filesystem.
129 Initializing,
130 /// The worktree is updating in response to filesystem events.
131 Updating,
132 Err(Arc<anyhow::Error>),
133}
134
135struct ShareState {
136 project_id: u64,
137 snapshots_tx: Sender<LocalSnapshot>,
138 _maintain_remote_snapshot: Option<Task<Option<()>>>,
139}
140
141pub enum Event {
142 UpdatedEntries,
143}
144
145impl Entity for Worktree {
146 type Event = Event;
147}
148
149impl Worktree {
150 pub async fn local(
151 client: Arc<Client>,
152 path: impl Into<Arc<Path>>,
153 visible: bool,
154 fs: Arc<dyn Fs>,
155 next_entry_id: Arc<AtomicUsize>,
156 cx: &mut AsyncAppContext,
157 ) -> Result<ModelHandle<Self>> {
158 let (tree, scan_states_tx) =
159 LocalWorktree::new(client, path, visible, fs.clone(), next_entry_id, cx).await?;
160 tree.update(cx, |tree, cx| {
161 let tree = tree.as_local_mut().unwrap();
162 let abs_path = tree.abs_path().clone();
163 let background_snapshot = tree.background_snapshot.clone();
164 let background = cx.background().clone();
165 tree._background_scanner_task = Some(cx.background().spawn(async move {
166 let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
167 let scanner =
168 BackgroundScanner::new(background_snapshot, scan_states_tx, fs, background);
169 scanner.run(events).await;
170 }));
171 });
172 Ok(tree)
173 }
174
175 pub fn remote(
176 project_remote_id: u64,
177 replica_id: ReplicaId,
178 worktree: proto::WorktreeMetadata,
179 client: Arc<Client>,
180 cx: &mut MutableAppContext,
181 ) -> ModelHandle<Self> {
182 let remote_id = worktree.id;
183 let root_char_bag: CharBag = worktree
184 .root_name
185 .chars()
186 .map(|c| c.to_ascii_lowercase())
187 .collect();
188 let root_name = worktree.root_name.clone();
189 let visible = worktree.visible;
190 let snapshot = Snapshot {
191 id: WorktreeId(remote_id as usize),
192 root_name,
193 root_char_bag,
194 entries_by_path: Default::default(),
195 entries_by_id: Default::default(),
196 scan_id: 0,
197 is_complete: false,
198 };
199
200 let (updates_tx, mut updates_rx) = mpsc::unbounded();
201 let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
202 let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
203 let worktree_handle = cx.add_model(|_: &mut ModelContext<Worktree>| {
204 Worktree::Remote(RemoteWorktree {
205 project_id: project_remote_id,
206 replica_id,
207 snapshot: snapshot.clone(),
208 background_snapshot: background_snapshot.clone(),
209 updates_tx: Some(updates_tx),
210 snapshot_subscriptions: Default::default(),
211 client: client.clone(),
212 diagnostic_summaries: Default::default(),
213 visible,
214 })
215 });
216
217 cx.background()
218 .spawn(async move {
219 while let Some(update) = updates_rx.next().await {
220 if let Err(error) = background_snapshot.lock().apply_remote_update(update) {
221 log::error!("error applying worktree update: {}", error);
222 }
223 snapshot_updated_tx.send(()).await.ok();
224 }
225 })
226 .detach();
227
228 cx.spawn(|mut cx| {
229 let this = worktree_handle.downgrade();
230 async move {
231 while let Some(_) = snapshot_updated_rx.recv().await {
232 if let Some(this) = this.upgrade(&cx) {
233 this.update(&mut cx, |this, cx| {
234 this.poll_snapshot(cx);
235 let this = this.as_remote_mut().unwrap();
236 while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
237 if this.observed_snapshot(*scan_id) {
238 let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
239 let _ = tx.send(());
240 } else {
241 break;
242 }
243 }
244 });
245 } else {
246 break;
247 }
248 }
249 }
250 })
251 .detach();
252
253 worktree_handle
254 }
255
256 pub fn as_local(&self) -> Option<&LocalWorktree> {
257 if let Worktree::Local(worktree) = self {
258 Some(worktree)
259 } else {
260 None
261 }
262 }
263
264 pub fn as_remote(&self) -> Option<&RemoteWorktree> {
265 if let Worktree::Remote(worktree) = self {
266 Some(worktree)
267 } else {
268 None
269 }
270 }
271
272 pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
273 if let Worktree::Local(worktree) = self {
274 Some(worktree)
275 } else {
276 None
277 }
278 }
279
280 pub fn as_remote_mut(&mut self) -> Option<&mut RemoteWorktree> {
281 if let Worktree::Remote(worktree) = self {
282 Some(worktree)
283 } else {
284 None
285 }
286 }
287
288 pub fn is_local(&self) -> bool {
289 matches!(self, Worktree::Local(_))
290 }
291
292 pub fn is_remote(&self) -> bool {
293 !self.is_local()
294 }
295
296 pub fn snapshot(&self) -> Snapshot {
297 match self {
298 Worktree::Local(worktree) => worktree.snapshot().snapshot,
299 Worktree::Remote(worktree) => worktree.snapshot(),
300 }
301 }
302
303 pub fn scan_id(&self) -> usize {
304 match self {
305 Worktree::Local(worktree) => worktree.snapshot.scan_id,
306 Worktree::Remote(worktree) => worktree.snapshot.scan_id,
307 }
308 }
309
310 pub fn is_visible(&self) -> bool {
311 match self {
312 Worktree::Local(worktree) => worktree.visible,
313 Worktree::Remote(worktree) => worktree.visible,
314 }
315 }
316
317 pub fn replica_id(&self) -> ReplicaId {
318 match self {
319 Worktree::Local(_) => 0,
320 Worktree::Remote(worktree) => worktree.replica_id,
321 }
322 }
323
324 pub fn diagnostic_summaries<'a>(
325 &'a self,
326 ) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + 'a {
327 match self {
328 Worktree::Local(worktree) => &worktree.diagnostic_summaries,
329 Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
330 }
331 .iter()
332 .map(|(path, summary)| (path.0.clone(), summary.clone()))
333 }
334
335 fn poll_snapshot(&mut self, cx: &mut ModelContext<Self>) {
336 match self {
337 Self::Local(worktree) => {
338 let is_fake_fs = worktree.fs.is_fake();
339 worktree.snapshot = worktree.background_snapshot.lock().clone();
340 if matches!(worktree.scan_state(), ScanState::Initializing) {
341 if worktree.poll_task.is_none() {
342 worktree.poll_task = Some(cx.spawn_weak(|this, mut cx| async move {
343 if is_fake_fs {
344 #[cfg(any(test, feature = "test-support"))]
345 cx.background().simulate_random_delay().await;
346 } else {
347 smol::Timer::after(Duration::from_millis(100)).await;
348 }
349 if let Some(this) = this.upgrade(&cx) {
350 this.update(&mut cx, |this, cx| {
351 this.as_local_mut().unwrap().poll_task = None;
352 this.poll_snapshot(cx);
353 });
354 }
355 }));
356 }
357 } else {
358 worktree.poll_task.take();
359 cx.emit(Event::UpdatedEntries);
360 }
361 }
362 Self::Remote(worktree) => {
363 worktree.snapshot = worktree.background_snapshot.lock().clone();
364 cx.emit(Event::UpdatedEntries);
365 }
366 };
367
368 cx.notify();
369 }
370}
371
372impl LocalWorktree {
373 async fn new(
374 client: Arc<Client>,
375 path: impl Into<Arc<Path>>,
376 visible: bool,
377 fs: Arc<dyn Fs>,
378 next_entry_id: Arc<AtomicUsize>,
379 cx: &mut AsyncAppContext,
380 ) -> Result<(ModelHandle<Worktree>, UnboundedSender<ScanState>)> {
381 let abs_path = path.into();
382 let path: Arc<Path> = Arc::from(Path::new(""));
383
384 // After determining whether the root entry is a file or a directory, populate the
385 // snapshot's "root name", which will be used for the purpose of fuzzy matching.
386 let root_name = abs_path
387 .file_name()
388 .map_or(String::new(), |f| f.to_string_lossy().to_string());
389 let root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
390 let metadata = fs
391 .metadata(&abs_path)
392 .await
393 .context("failed to stat worktree path")?;
394
395 let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
396 let (mut last_scan_state_tx, last_scan_state_rx) =
397 watch::channel_with(ScanState::Initializing);
398 let tree = cx.add_model(move |cx: &mut ModelContext<Worktree>| {
399 let mut snapshot = LocalSnapshot {
400 abs_path,
401 ignores: Default::default(),
402 removed_entry_ids: Default::default(),
403 next_entry_id,
404 snapshot: Snapshot {
405 id: WorktreeId::from_usize(cx.model_id()),
406 root_name: root_name.clone(),
407 root_char_bag,
408 entries_by_path: Default::default(),
409 entries_by_id: Default::default(),
410 scan_id: 0,
411 is_complete: true,
412 },
413 };
414 if let Some(metadata) = metadata {
415 let entry = Entry::new(
416 path.into(),
417 &metadata,
418 &snapshot.next_entry_id,
419 snapshot.root_char_bag,
420 );
421 snapshot.insert_entry(entry, fs.as_ref());
422 }
423
424 let tree = Self {
425 snapshot: snapshot.clone(),
426 background_snapshot: Arc::new(Mutex::new(snapshot)),
427 last_scan_state_rx,
428 _background_scanner_task: None,
429 share: None,
430 poll_task: None,
431 diagnostics: Default::default(),
432 diagnostic_summaries: Default::default(),
433 client,
434 fs,
435 visible,
436 };
437
438 cx.spawn_weak(|this, mut cx| async move {
439 while let Some(scan_state) = scan_states_rx.next().await {
440 if let Some(this) = this.upgrade(&cx) {
441 last_scan_state_tx.blocking_send(scan_state).ok();
442 this.update(&mut cx, |this, cx| {
443 this.poll_snapshot(cx);
444 this.as_local().unwrap().broadcast_snapshot()
445 })
446 .await;
447 } else {
448 break;
449 }
450 }
451 })
452 .detach();
453
454 Worktree::Local(tree)
455 });
456
457 Ok((tree, scan_states_tx))
458 }
459
460 pub fn contains_abs_path(&self, path: &Path) -> bool {
461 path.starts_with(&self.abs_path)
462 }
463
464 fn absolutize(&self, path: &Path) -> PathBuf {
465 if path.file_name().is_some() {
466 self.abs_path.join(path)
467 } else {
468 self.abs_path.to_path_buf()
469 }
470 }
471
472 pub(crate) fn load_buffer(
473 &mut self,
474 path: &Path,
475 cx: &mut ModelContext<Worktree>,
476 ) -> Task<Result<ModelHandle<Buffer>>> {
477 let path = Arc::from(path);
478 cx.spawn(move |this, mut cx| async move {
479 let (file, contents) = this
480 .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
481 .await?;
482 Ok(cx.add_model(|cx| Buffer::from_file(0, contents, Arc::new(file), cx)))
483 })
484 }
485
486 pub fn diagnostics_for_path(&self, path: &Path) -> Option<Vec<DiagnosticEntry<PointUtf16>>> {
487 self.diagnostics.get(path).cloned()
488 }
489
490 pub fn update_diagnostics(
491 &mut self,
492 language_server_id: usize,
493 worktree_path: Arc<Path>,
494 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
495 _: &mut ModelContext<Worktree>,
496 ) -> Result<bool> {
497 self.diagnostics.remove(&worktree_path);
498 let old_summary = self
499 .diagnostic_summaries
500 .remove(&PathKey(worktree_path.clone()))
501 .unwrap_or_default();
502 let new_summary = DiagnosticSummary::new(language_server_id, &diagnostics);
503 if !new_summary.is_empty() {
504 self.diagnostic_summaries
505 .insert(PathKey(worktree_path.clone()), new_summary);
506 self.diagnostics.insert(worktree_path.clone(), diagnostics);
507 }
508
509 let updated = !old_summary.is_empty() || !new_summary.is_empty();
510 if updated {
511 if let Some(share) = self.share.as_ref() {
512 self.client
513 .send(proto::UpdateDiagnosticSummary {
514 project_id: share.project_id,
515 worktree_id: self.id().to_proto(),
516 summary: Some(proto::DiagnosticSummary {
517 path: worktree_path.to_string_lossy().to_string(),
518 language_server_id: language_server_id as u64,
519 error_count: new_summary.error_count as u32,
520 warning_count: new_summary.warning_count as u32,
521 }),
522 })
523 .log_err();
524 }
525 }
526
527 Ok(updated)
528 }
529
530 pub fn scan_complete(&self) -> impl Future<Output = ()> {
531 let mut scan_state_rx = self.last_scan_state_rx.clone();
532 async move {
533 let mut scan_state = Some(scan_state_rx.borrow().clone());
534 while let Some(ScanState::Initializing | ScanState::Updating) = scan_state {
535 scan_state = scan_state_rx.recv().await;
536 }
537 }
538 }
539
540 fn scan_state(&self) -> ScanState {
541 self.last_scan_state_rx.borrow().clone()
542 }
543
544 pub fn snapshot(&self) -> LocalSnapshot {
545 self.snapshot.clone()
546 }
547
548 pub fn metadata_proto(&self) -> proto::WorktreeMetadata {
549 proto::WorktreeMetadata {
550 id: self.id().to_proto(),
551 root_name: self.root_name().to_string(),
552 visible: self.visible,
553 }
554 }
555
556 fn load(&self, path: &Path, cx: &mut ModelContext<Worktree>) -> Task<Result<(File, String)>> {
557 let handle = cx.handle();
558 let path = Arc::from(path);
559 let abs_path = self.absolutize(&path);
560 let fs = self.fs.clone();
561 cx.spawn(|this, mut cx| async move {
562 let text = fs.load(&abs_path).await?;
563 // Eagerly populate the snapshot with an updated entry for the loaded file
564 let entry = this
565 .update(&mut cx, |this, cx| {
566 this.as_local()
567 .unwrap()
568 .refresh_entry(path, abs_path, None, cx)
569 })
570 .await?;
571 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
572 Ok((
573 File {
574 entry_id: Some(entry.id),
575 worktree: handle,
576 path: entry.path,
577 mtime: entry.mtime,
578 is_local: true,
579 },
580 text,
581 ))
582 })
583 }
584
585 pub fn save_buffer_as(
586 &self,
587 buffer_handle: ModelHandle<Buffer>,
588 path: impl Into<Arc<Path>>,
589 cx: &mut ModelContext<Worktree>,
590 ) -> Task<Result<()>> {
591 let buffer = buffer_handle.read(cx);
592 let text = buffer.as_rope().clone();
593 let fingerprint = text.fingerprint();
594 let version = buffer.version();
595 let save = self.write_file(path, text, cx);
596 let handle = cx.handle();
597 cx.as_mut().spawn(|mut cx| async move {
598 let entry = save.await?;
599 let file = File {
600 entry_id: Some(entry.id),
601 worktree: handle,
602 path: entry.path,
603 mtime: entry.mtime,
604 is_local: true,
605 };
606
607 buffer_handle.update(&mut cx, |buffer, cx| {
608 buffer.did_save(version, fingerprint, file.mtime, Some(Arc::new(file)), cx);
609 });
610
611 Ok(())
612 })
613 }
614
615 pub fn create_entry(
616 &self,
617 path: impl Into<Arc<Path>>,
618 is_dir: bool,
619 cx: &mut ModelContext<Worktree>,
620 ) -> Task<Result<Entry>> {
621 self.write_entry_internal(
622 path,
623 if is_dir {
624 None
625 } else {
626 Some(Default::default())
627 },
628 cx,
629 )
630 }
631
632 pub fn write_file(
633 &self,
634 path: impl Into<Arc<Path>>,
635 text: Rope,
636 cx: &mut ModelContext<Worktree>,
637 ) -> Task<Result<Entry>> {
638 self.write_entry_internal(path, Some(text), cx)
639 }
640
641 pub fn delete_entry(
642 &self,
643 entry_id: ProjectEntryId,
644 cx: &mut ModelContext<Worktree>,
645 ) -> Option<Task<Result<()>>> {
646 let entry = self.entry_for_id(entry_id)?.clone();
647 let abs_path = self.absolutize(&entry.path);
648 let delete = cx.background().spawn({
649 let fs = self.fs.clone();
650 let abs_path = abs_path.clone();
651 async move {
652 if entry.is_file() {
653 fs.remove_file(&abs_path, Default::default()).await
654 } else {
655 fs.remove_dir(
656 &abs_path,
657 RemoveOptions {
658 recursive: true,
659 ignore_if_not_exists: false,
660 },
661 )
662 .await
663 }
664 }
665 });
666
667 Some(cx.spawn(|this, mut cx| async move {
668 delete.await?;
669 this.update(&mut cx, |this, _| {
670 let this = this.as_local_mut().unwrap();
671 let mut snapshot = this.background_snapshot.lock();
672 snapshot.delete_entry(entry_id);
673 });
674 this.update(&mut cx, |this, cx| {
675 this.poll_snapshot(cx);
676 this.as_local().unwrap().broadcast_snapshot()
677 })
678 .await;
679 Ok(())
680 }))
681 }
682
683 pub fn rename_entry(
684 &self,
685 entry_id: ProjectEntryId,
686 new_path: impl Into<Arc<Path>>,
687 cx: &mut ModelContext<Worktree>,
688 ) -> Option<Task<Result<Entry>>> {
689 let old_path = self.entry_for_id(entry_id)?.path.clone();
690 let new_path = new_path.into();
691 let abs_old_path = self.absolutize(&old_path);
692 let abs_new_path = self.absolutize(&new_path);
693 let rename = cx.background().spawn({
694 let fs = self.fs.clone();
695 let abs_new_path = abs_new_path.clone();
696 async move {
697 fs.rename(&abs_old_path, &abs_new_path, Default::default())
698 .await
699 }
700 });
701
702 Some(cx.spawn(|this, mut cx| async move {
703 rename.await?;
704 let entry = this
705 .update(&mut cx, |this, cx| {
706 this.as_local_mut().unwrap().refresh_entry(
707 new_path.clone(),
708 abs_new_path,
709 Some(old_path),
710 cx,
711 )
712 })
713 .await?;
714 this.update(&mut cx, |this, cx| {
715 this.poll_snapshot(cx);
716 this.as_local().unwrap().broadcast_snapshot()
717 })
718 .await;
719 Ok(entry)
720 }))
721 }
722
723 pub fn copy_entry(
724 &self,
725 entry_id: ProjectEntryId,
726 new_path: impl Into<Arc<Path>>,
727 cx: &mut ModelContext<Worktree>,
728 ) -> Option<Task<Result<Entry>>> {
729 let old_path = self.entry_for_id(entry_id)?.path.clone();
730 let new_path = new_path.into();
731 let abs_old_path = self.absolutize(&old_path);
732 let abs_new_path = self.absolutize(&new_path);
733 let copy = cx.background().spawn({
734 let fs = self.fs.clone();
735 let abs_new_path = abs_new_path.clone();
736 async move {
737 fs.copy(&abs_old_path, &abs_new_path, Default::default())
738 .await
739 }
740 });
741
742 Some(cx.spawn(|this, mut cx| async move {
743 copy.await?;
744 let entry = this
745 .update(&mut cx, |this, cx| {
746 this.as_local_mut().unwrap().refresh_entry(
747 new_path.clone(),
748 abs_new_path,
749 None,
750 cx,
751 )
752 })
753 .await?;
754 this.update(&mut cx, |this, cx| {
755 this.poll_snapshot(cx);
756 this.as_local().unwrap().broadcast_snapshot()
757 })
758 .await;
759 Ok(entry)
760 }))
761 }
762
763 fn write_entry_internal(
764 &self,
765 path: impl Into<Arc<Path>>,
766 text_if_file: Option<Rope>,
767 cx: &mut ModelContext<Worktree>,
768 ) -> Task<Result<Entry>> {
769 let path = path.into();
770 let abs_path = self.absolutize(&path);
771 let write = cx.background().spawn({
772 let fs = self.fs.clone();
773 let abs_path = abs_path.clone();
774 async move {
775 if let Some(text) = text_if_file {
776 fs.save(&abs_path, &text).await
777 } else {
778 fs.create_dir(&abs_path).await
779 }
780 }
781 });
782
783 cx.spawn(|this, mut cx| async move {
784 write.await?;
785 let entry = this
786 .update(&mut cx, |this, cx| {
787 this.as_local_mut()
788 .unwrap()
789 .refresh_entry(path, abs_path, None, cx)
790 })
791 .await?;
792 this.update(&mut cx, |this, cx| {
793 this.poll_snapshot(cx);
794 this.as_local().unwrap().broadcast_snapshot()
795 })
796 .await;
797 Ok(entry)
798 })
799 }
800
801 fn refresh_entry(
802 &self,
803 path: Arc<Path>,
804 abs_path: PathBuf,
805 old_path: Option<Arc<Path>>,
806 cx: &mut ModelContext<Worktree>,
807 ) -> Task<Result<Entry>> {
808 let fs = self.fs.clone();
809 let root_char_bag;
810 let next_entry_id;
811 {
812 let snapshot = self.background_snapshot.lock();
813 root_char_bag = snapshot.root_char_bag;
814 next_entry_id = snapshot.next_entry_id.clone();
815 }
816 cx.spawn_weak(|this, mut cx| async move {
817 let mut entry = Entry::new(
818 path.clone(),
819 &fs.metadata(&abs_path)
820 .await?
821 .ok_or_else(|| anyhow!("could not read saved file metadata"))?,
822 &next_entry_id,
823 root_char_bag,
824 );
825
826 let this = this
827 .upgrade(&cx)
828 .ok_or_else(|| anyhow!("worktree was dropped"))?;
829 let (entry, snapshot, snapshots_tx) = this.read_with(&cx, |this, _| {
830 let this = this.as_local().unwrap();
831 let mut snapshot = this.background_snapshot.lock();
832 entry.is_ignored = snapshot
833 .ignore_stack_for_path(&path, entry.is_dir())
834 .is_path_ignored(&path, entry.is_dir());
835 if let Some(old_path) = old_path {
836 snapshot.remove_path(&old_path);
837 }
838 let entry = snapshot.insert_entry(entry, fs.as_ref());
839 snapshot.scan_id += 1;
840 let snapshots_tx = this.share.as_ref().map(|s| s.snapshots_tx.clone());
841 (entry, snapshot.clone(), snapshots_tx)
842 });
843 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
844
845 if let Some(snapshots_tx) = snapshots_tx {
846 snapshots_tx.send(snapshot).await.ok();
847 }
848
849 Ok(entry)
850 })
851 }
852
853 pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
854 let (share_tx, share_rx) = oneshot::channel();
855 let (snapshots_to_send_tx, snapshots_to_send_rx) =
856 smol::channel::unbounded::<LocalSnapshot>();
857 if self.share.is_some() {
858 let _ = share_tx.send(Ok(()));
859 } else {
860 let rpc = self.client.clone();
861 let worktree_id = cx.model_id() as u64;
862 let maintain_remote_snapshot = cx.background().spawn({
863 let rpc = rpc.clone();
864 let diagnostic_summaries = self.diagnostic_summaries.clone();
865 async move {
866 let mut prev_snapshot = match snapshots_to_send_rx.recv().await {
867 Ok(snapshot) => {
868 let update = proto::UpdateWorktree {
869 project_id,
870 worktree_id,
871 root_name: snapshot.root_name().to_string(),
872 updated_entries: snapshot
873 .entries_by_path
874 .iter()
875 .map(Into::into)
876 .collect(),
877 removed_entries: Default::default(),
878 scan_id: snapshot.scan_id as u64,
879 is_last_update: true,
880 };
881 if let Err(error) = send_worktree_update(&rpc, update).await {
882 let _ = share_tx.send(Err(error));
883 return Err(anyhow!("failed to send initial update worktree"));
884 } else {
885 let _ = share_tx.send(Ok(()));
886 snapshot
887 }
888 }
889 Err(error) => {
890 let _ = share_tx.send(Err(error.into()));
891 return Err(anyhow!("failed to send initial update worktree"));
892 }
893 };
894
895 for (path, summary) in diagnostic_summaries.iter() {
896 rpc.send(proto::UpdateDiagnosticSummary {
897 project_id,
898 worktree_id,
899 summary: Some(summary.to_proto(&path.0)),
900 })?;
901 }
902
903 while let Ok(mut snapshot) = snapshots_to_send_rx.recv().await {
904 while let Ok(newer_snapshot) = snapshots_to_send_rx.try_recv() {
905 snapshot = newer_snapshot;
906 }
907
908 send_worktree_update(
909 &rpc,
910 snapshot.build_update(&prev_snapshot, project_id, worktree_id, true),
911 )
912 .await?;
913 prev_snapshot = snapshot;
914 }
915
916 Ok::<_, anyhow::Error>(())
917 }
918 .log_err()
919 });
920 self.share = Some(ShareState {
921 project_id,
922 snapshots_tx: snapshots_to_send_tx.clone(),
923 _maintain_remote_snapshot: Some(maintain_remote_snapshot),
924 });
925 }
926
927 cx.spawn_weak(|this, cx| async move {
928 if let Some(this) = this.upgrade(&cx) {
929 this.read_with(&cx, |this, _| {
930 let this = this.as_local().unwrap();
931 let _ = snapshots_to_send_tx.try_send(this.snapshot());
932 });
933 }
934 share_rx
935 .await
936 .unwrap_or_else(|_| Err(anyhow!("share ended")))
937 })
938 }
939
940 pub fn unshare(&mut self) {
941 self.share.take();
942 }
943
944 pub fn is_shared(&self) -> bool {
945 self.share.is_some()
946 }
947
948 fn broadcast_snapshot(&self) -> impl Future<Output = ()> {
949 let mut to_send = None;
950 if matches!(self.scan_state(), ScanState::Idle) {
951 if let Some(share) = self.share.as_ref() {
952 to_send = Some((self.snapshot(), share.snapshots_tx.clone()));
953 }
954 }
955
956 async move {
957 if let Some((snapshot, snapshots_to_send_tx)) = to_send {
958 if let Err(err) = snapshots_to_send_tx.send(snapshot).await {
959 log::error!("error submitting snapshot to send {}", err);
960 }
961 }
962 }
963 }
964}
965
966impl RemoteWorktree {
967 fn snapshot(&self) -> Snapshot {
968 self.snapshot.clone()
969 }
970
971 pub fn disconnected_from_host(&mut self) {
972 self.updates_tx.take();
973 self.snapshot_subscriptions.clear();
974 }
975
976 pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
977 if let Some(updates_tx) = &self.updates_tx {
978 updates_tx
979 .unbounded_send(update)
980 .expect("consumer runs to completion");
981 }
982 }
983
984 fn observed_snapshot(&self, scan_id: usize) -> bool {
985 self.scan_id > scan_id || (self.scan_id == scan_id && self.is_complete)
986 }
987
988 fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = ()> {
989 let (tx, rx) = oneshot::channel();
990 if self.observed_snapshot(scan_id) {
991 let _ = tx.send(());
992 } else {
993 match self
994 .snapshot_subscriptions
995 .binary_search_by_key(&scan_id, |probe| probe.0)
996 {
997 Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)),
998 }
999 }
1000
1001 async move {
1002 let _ = rx.await;
1003 }
1004 }
1005
1006 pub fn update_diagnostic_summary(
1007 &mut self,
1008 path: Arc<Path>,
1009 summary: &proto::DiagnosticSummary,
1010 ) {
1011 let summary = DiagnosticSummary {
1012 language_server_id: summary.language_server_id as usize,
1013 error_count: summary.error_count as usize,
1014 warning_count: summary.warning_count as usize,
1015 };
1016 if summary.is_empty() {
1017 self.diagnostic_summaries.remove(&PathKey(path.clone()));
1018 } else {
1019 self.diagnostic_summaries
1020 .insert(PathKey(path.clone()), summary);
1021 }
1022 }
1023
1024 pub fn insert_entry(
1025 &mut self,
1026 entry: proto::Entry,
1027 scan_id: usize,
1028 cx: &mut ModelContext<Worktree>,
1029 ) -> Task<Result<Entry>> {
1030 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1031 cx.spawn(|this, mut cx| async move {
1032 wait_for_snapshot.await;
1033 this.update(&mut cx, |worktree, _| {
1034 let worktree = worktree.as_remote_mut().unwrap();
1035 let mut snapshot = worktree.background_snapshot.lock();
1036 let entry = snapshot.insert_entry(entry);
1037 worktree.snapshot = snapshot.clone();
1038 entry
1039 })
1040 })
1041 }
1042
1043 pub(crate) fn delete_entry(
1044 &mut self,
1045 id: ProjectEntryId,
1046 scan_id: usize,
1047 cx: &mut ModelContext<Worktree>,
1048 ) -> Task<Result<()>> {
1049 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1050 cx.spawn(|this, mut cx| async move {
1051 wait_for_snapshot.await;
1052 this.update(&mut cx, |worktree, _| {
1053 let worktree = worktree.as_remote_mut().unwrap();
1054 let mut snapshot = worktree.background_snapshot.lock();
1055 snapshot.delete_entry(id);
1056 worktree.snapshot = snapshot.clone();
1057 });
1058 Ok(())
1059 })
1060 }
1061}
1062
1063impl Snapshot {
1064 pub fn id(&self) -> WorktreeId {
1065 self.id
1066 }
1067
1068 pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
1069 self.entries_by_id.get(&entry_id, &()).is_some()
1070 }
1071
1072 pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
1073 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1074 let old_entry = self.entries_by_id.insert_or_replace(
1075 PathEntry {
1076 id: entry.id,
1077 path: entry.path.clone(),
1078 is_ignored: entry.is_ignored,
1079 scan_id: 0,
1080 },
1081 &(),
1082 );
1083 if let Some(old_entry) = old_entry {
1084 self.entries_by_path.remove(&PathKey(old_entry.path), &());
1085 }
1086 self.entries_by_path.insert_or_replace(entry.clone(), &());
1087 Ok(entry)
1088 }
1089
1090 fn delete_entry(&mut self, entry_id: ProjectEntryId) -> bool {
1091 if let Some(removed_entry) = self.entries_by_id.remove(&entry_id, &()) {
1092 self.entries_by_path = {
1093 let mut cursor = self.entries_by_path.cursor();
1094 let mut new_entries_by_path =
1095 cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &());
1096 while let Some(entry) = cursor.item() {
1097 if entry.path.starts_with(&removed_entry.path) {
1098 self.entries_by_id.remove(&entry.id, &());
1099 cursor.next(&());
1100 } else {
1101 break;
1102 }
1103 }
1104 new_entries_by_path.push_tree(cursor.suffix(&()), &());
1105 new_entries_by_path
1106 };
1107
1108 true
1109 } else {
1110 false
1111 }
1112 }
1113
1114 pub(crate) fn apply_remote_update(&mut self, update: proto::UpdateWorktree) -> Result<()> {
1115 let mut entries_by_path_edits = Vec::new();
1116 let mut entries_by_id_edits = Vec::new();
1117 for entry_id in update.removed_entries {
1118 let entry = self
1119 .entry_for_id(ProjectEntryId::from_proto(entry_id))
1120 .ok_or_else(|| anyhow!("unknown entry {}", entry_id))?;
1121 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
1122 entries_by_id_edits.push(Edit::Remove(entry.id));
1123 }
1124
1125 for entry in update.updated_entries {
1126 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1127 if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
1128 entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
1129 }
1130 entries_by_id_edits.push(Edit::Insert(PathEntry {
1131 id: entry.id,
1132 path: entry.path.clone(),
1133 is_ignored: entry.is_ignored,
1134 scan_id: 0,
1135 }));
1136 entries_by_path_edits.push(Edit::Insert(entry));
1137 }
1138
1139 self.entries_by_path.edit(entries_by_path_edits, &());
1140 self.entries_by_id.edit(entries_by_id_edits, &());
1141 self.scan_id = update.scan_id as usize;
1142 self.is_complete = update.is_last_update;
1143
1144 Ok(())
1145 }
1146
1147 pub fn file_count(&self) -> usize {
1148 self.entries_by_path.summary().file_count
1149 }
1150
1151 pub fn visible_file_count(&self) -> usize {
1152 self.entries_by_path.summary().visible_file_count
1153 }
1154
1155 fn traverse_from_offset(
1156 &self,
1157 include_dirs: bool,
1158 include_ignored: bool,
1159 start_offset: usize,
1160 ) -> Traversal {
1161 let mut cursor = self.entries_by_path.cursor();
1162 cursor.seek(
1163 &TraversalTarget::Count {
1164 count: start_offset,
1165 include_dirs,
1166 include_ignored,
1167 },
1168 Bias::Right,
1169 &(),
1170 );
1171 Traversal {
1172 cursor,
1173 include_dirs,
1174 include_ignored,
1175 }
1176 }
1177
1178 fn traverse_from_path(
1179 &self,
1180 include_dirs: bool,
1181 include_ignored: bool,
1182 path: &Path,
1183 ) -> Traversal {
1184 let mut cursor = self.entries_by_path.cursor();
1185 cursor.seek(&TraversalTarget::Path(path), Bias::Left, &());
1186 Traversal {
1187 cursor,
1188 include_dirs,
1189 include_ignored,
1190 }
1191 }
1192
1193 pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
1194 self.traverse_from_offset(false, include_ignored, start)
1195 }
1196
1197 pub fn entries(&self, include_ignored: bool) -> Traversal {
1198 self.traverse_from_offset(true, include_ignored, 0)
1199 }
1200
1201 pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
1202 let empty_path = Path::new("");
1203 self.entries_by_path
1204 .cursor::<()>()
1205 .filter(move |entry| entry.path.as_ref() != empty_path)
1206 .map(|entry| &entry.path)
1207 }
1208
1209 fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
1210 let mut cursor = self.entries_by_path.cursor();
1211 cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &());
1212 let traversal = Traversal {
1213 cursor,
1214 include_dirs: true,
1215 include_ignored: true,
1216 };
1217 ChildEntriesIter {
1218 traversal,
1219 parent_path,
1220 }
1221 }
1222
1223 pub fn root_entry(&self) -> Option<&Entry> {
1224 self.entry_for_path("")
1225 }
1226
1227 pub fn root_name(&self) -> &str {
1228 &self.root_name
1229 }
1230
1231 pub fn scan_id(&self) -> usize {
1232 self.scan_id
1233 }
1234
1235 pub fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
1236 let path = path.as_ref();
1237 self.traverse_from_path(true, true, path)
1238 .entry()
1239 .and_then(|entry| {
1240 if entry.path.as_ref() == path {
1241 Some(entry)
1242 } else {
1243 None
1244 }
1245 })
1246 }
1247
1248 pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> {
1249 let entry = self.entries_by_id.get(&id, &())?;
1250 self.entry_for_path(&entry.path)
1251 }
1252
1253 pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
1254 self.entry_for_path(path.as_ref()).map(|e| e.inode)
1255 }
1256}
1257
1258impl LocalSnapshot {
1259 pub fn abs_path(&self) -> &Arc<Path> {
1260 &self.abs_path
1261 }
1262
1263 #[cfg(test)]
1264 pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree {
1265 let root_name = self.root_name.clone();
1266 proto::UpdateWorktree {
1267 project_id,
1268 worktree_id: self.id().to_proto(),
1269 root_name,
1270 updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
1271 removed_entries: Default::default(),
1272 scan_id: self.scan_id as u64,
1273 is_last_update: true,
1274 }
1275 }
1276
1277 pub(crate) fn build_update(
1278 &self,
1279 other: &Self,
1280 project_id: u64,
1281 worktree_id: u64,
1282 include_ignored: bool,
1283 ) -> proto::UpdateWorktree {
1284 let mut updated_entries = Vec::new();
1285 let mut removed_entries = Vec::new();
1286 let mut self_entries = self
1287 .entries_by_id
1288 .cursor::<()>()
1289 .filter(|e| include_ignored || !e.is_ignored)
1290 .peekable();
1291 let mut other_entries = other
1292 .entries_by_id
1293 .cursor::<()>()
1294 .filter(|e| include_ignored || !e.is_ignored)
1295 .peekable();
1296 loop {
1297 match (self_entries.peek(), other_entries.peek()) {
1298 (Some(self_entry), Some(other_entry)) => {
1299 match Ord::cmp(&self_entry.id, &other_entry.id) {
1300 Ordering::Less => {
1301 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1302 updated_entries.push(entry);
1303 self_entries.next();
1304 }
1305 Ordering::Equal => {
1306 if self_entry.scan_id != other_entry.scan_id {
1307 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1308 updated_entries.push(entry);
1309 }
1310
1311 self_entries.next();
1312 other_entries.next();
1313 }
1314 Ordering::Greater => {
1315 removed_entries.push(other_entry.id.to_proto());
1316 other_entries.next();
1317 }
1318 }
1319 }
1320 (Some(self_entry), None) => {
1321 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1322 updated_entries.push(entry);
1323 self_entries.next();
1324 }
1325 (None, Some(other_entry)) => {
1326 removed_entries.push(other_entry.id.to_proto());
1327 other_entries.next();
1328 }
1329 (None, None) => break,
1330 }
1331 }
1332
1333 proto::UpdateWorktree {
1334 project_id,
1335 worktree_id,
1336 root_name: self.root_name().to_string(),
1337 updated_entries,
1338 removed_entries,
1339 scan_id: self.scan_id as u64,
1340 is_last_update: true,
1341 }
1342 }
1343
1344 fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
1345 if !entry.is_dir() && entry.path.file_name() == Some(&GITIGNORE) {
1346 let abs_path = self.abs_path.join(&entry.path);
1347 match build_gitignore(&abs_path, fs) {
1348 Ok(ignore) => {
1349 let ignore_dir_path = entry.path.parent().unwrap();
1350 self.ignores
1351 .insert(ignore_dir_path.into(), (Arc::new(ignore), self.scan_id));
1352 }
1353 Err(error) => {
1354 log::error!(
1355 "error loading .gitignore file {:?} - {:?}",
1356 &entry.path,
1357 error
1358 );
1359 }
1360 }
1361 }
1362
1363 self.reuse_entry_id(&mut entry);
1364 self.entries_by_path.insert_or_replace(entry.clone(), &());
1365 let scan_id = self.scan_id;
1366 self.entries_by_id.insert_or_replace(
1367 PathEntry {
1368 id: entry.id,
1369 path: entry.path.clone(),
1370 is_ignored: entry.is_ignored,
1371 scan_id,
1372 },
1373 &(),
1374 );
1375 entry
1376 }
1377
1378 fn populate_dir(
1379 &mut self,
1380 parent_path: Arc<Path>,
1381 entries: impl IntoIterator<Item = Entry>,
1382 ignore: Option<Arc<Gitignore>>,
1383 ) {
1384 let mut parent_entry = if let Some(parent_entry) =
1385 self.entries_by_path.get(&PathKey(parent_path.clone()), &())
1386 {
1387 parent_entry.clone()
1388 } else {
1389 log::warn!(
1390 "populating a directory {:?} that has been removed",
1391 parent_path
1392 );
1393 return;
1394 };
1395
1396 if let Some(ignore) = ignore {
1397 self.ignores.insert(parent_path, (ignore, self.scan_id));
1398 }
1399 if matches!(parent_entry.kind, EntryKind::PendingDir) {
1400 parent_entry.kind = EntryKind::Dir;
1401 } else {
1402 unreachable!();
1403 }
1404
1405 let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
1406 let mut entries_by_id_edits = Vec::new();
1407
1408 for mut entry in entries {
1409 self.reuse_entry_id(&mut entry);
1410 entries_by_id_edits.push(Edit::Insert(PathEntry {
1411 id: entry.id,
1412 path: entry.path.clone(),
1413 is_ignored: entry.is_ignored,
1414 scan_id: self.scan_id,
1415 }));
1416 entries_by_path_edits.push(Edit::Insert(entry));
1417 }
1418
1419 self.entries_by_path.edit(entries_by_path_edits, &());
1420 self.entries_by_id.edit(entries_by_id_edits, &());
1421 }
1422
1423 fn reuse_entry_id(&mut self, entry: &mut Entry) {
1424 if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
1425 entry.id = removed_entry_id;
1426 } else if let Some(existing_entry) = self.entry_for_path(&entry.path) {
1427 entry.id = existing_entry.id;
1428 }
1429 }
1430
1431 fn remove_path(&mut self, path: &Path) {
1432 let mut new_entries;
1433 let removed_entries;
1434 {
1435 let mut cursor = self.entries_by_path.cursor::<TraversalProgress>();
1436 new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
1437 removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
1438 new_entries.push_tree(cursor.suffix(&()), &());
1439 }
1440 self.entries_by_path = new_entries;
1441
1442 let mut entries_by_id_edits = Vec::new();
1443 for entry in removed_entries.cursor::<()>() {
1444 let removed_entry_id = self
1445 .removed_entry_ids
1446 .entry(entry.inode)
1447 .or_insert(entry.id);
1448 *removed_entry_id = cmp::max(*removed_entry_id, entry.id);
1449 entries_by_id_edits.push(Edit::Remove(entry.id));
1450 }
1451 self.entries_by_id.edit(entries_by_id_edits, &());
1452
1453 if path.file_name() == Some(&GITIGNORE) {
1454 if let Some((_, scan_id)) = self.ignores.get_mut(path.parent().unwrap()) {
1455 *scan_id = self.snapshot.scan_id;
1456 }
1457 }
1458 }
1459
1460 fn ignore_stack_for_path(&self, path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
1461 let mut new_ignores = Vec::new();
1462 for ancestor in path.ancestors().skip(1) {
1463 if let Some((ignore, _)) = self.ignores.get(ancestor) {
1464 new_ignores.push((ancestor, Some(ignore.clone())));
1465 } else {
1466 new_ignores.push((ancestor, None));
1467 }
1468 }
1469
1470 let mut ignore_stack = IgnoreStack::none();
1471 for (parent_path, ignore) in new_ignores.into_iter().rev() {
1472 if ignore_stack.is_path_ignored(&parent_path, true) {
1473 ignore_stack = IgnoreStack::all();
1474 break;
1475 } else if let Some(ignore) = ignore {
1476 ignore_stack = ignore_stack.append(Arc::from(parent_path), ignore);
1477 }
1478 }
1479
1480 if ignore_stack.is_path_ignored(path, is_dir) {
1481 ignore_stack = IgnoreStack::all();
1482 }
1483
1484 ignore_stack
1485 }
1486}
1487
1488fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
1489 let contents = smol::block_on(fs.load(&abs_path))?;
1490 let parent = abs_path.parent().unwrap_or(Path::new("/"));
1491 let mut builder = GitignoreBuilder::new(parent);
1492 for line in contents.lines() {
1493 builder.add_line(Some(abs_path.into()), line)?;
1494 }
1495 Ok(builder.build()?)
1496}
1497
1498impl WorktreeId {
1499 pub fn from_usize(handle_id: usize) -> Self {
1500 Self(handle_id)
1501 }
1502
1503 pub(crate) fn from_proto(id: u64) -> Self {
1504 Self(id as usize)
1505 }
1506
1507 pub fn to_proto(&self) -> u64 {
1508 self.0 as u64
1509 }
1510
1511 pub fn to_usize(&self) -> usize {
1512 self.0
1513 }
1514}
1515
1516impl fmt::Display for WorktreeId {
1517 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1518 self.0.fmt(f)
1519 }
1520}
1521
1522impl Deref for Worktree {
1523 type Target = Snapshot;
1524
1525 fn deref(&self) -> &Self::Target {
1526 match self {
1527 Worktree::Local(worktree) => &worktree.snapshot,
1528 Worktree::Remote(worktree) => &worktree.snapshot,
1529 }
1530 }
1531}
1532
1533impl Deref for LocalWorktree {
1534 type Target = LocalSnapshot;
1535
1536 fn deref(&self) -> &Self::Target {
1537 &self.snapshot
1538 }
1539}
1540
1541impl Deref for RemoteWorktree {
1542 type Target = Snapshot;
1543
1544 fn deref(&self) -> &Self::Target {
1545 &self.snapshot
1546 }
1547}
1548
1549impl fmt::Debug for LocalWorktree {
1550 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1551 self.snapshot.fmt(f)
1552 }
1553}
1554
1555impl fmt::Debug for Snapshot {
1556 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1557 struct EntriesById<'a>(&'a SumTree<PathEntry>);
1558 struct EntriesByPath<'a>(&'a SumTree<Entry>);
1559
1560 impl<'a> fmt::Debug for EntriesByPath<'a> {
1561 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1562 f.debug_map()
1563 .entries(self.0.iter().map(|entry| (&entry.path, entry.id)))
1564 .finish()
1565 }
1566 }
1567
1568 impl<'a> fmt::Debug for EntriesById<'a> {
1569 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1570 f.debug_list().entries(self.0.iter()).finish()
1571 }
1572 }
1573
1574 f.debug_struct("Snapshot")
1575 .field("id", &self.id)
1576 .field("root_name", &self.root_name)
1577 .field("entries_by_path", &EntriesByPath(&self.entries_by_path))
1578 .field("entries_by_id", &EntriesById(&self.entries_by_id))
1579 .finish()
1580 }
1581}
1582
1583#[derive(Clone, PartialEq)]
1584pub struct File {
1585 pub worktree: ModelHandle<Worktree>,
1586 pub path: Arc<Path>,
1587 pub mtime: SystemTime,
1588 pub(crate) entry_id: Option<ProjectEntryId>,
1589 pub(crate) is_local: bool,
1590}
1591
1592impl language::File for File {
1593 fn as_local(&self) -> Option<&dyn language::LocalFile> {
1594 if self.is_local {
1595 Some(self)
1596 } else {
1597 None
1598 }
1599 }
1600
1601 fn mtime(&self) -> SystemTime {
1602 self.mtime
1603 }
1604
1605 fn path(&self) -> &Arc<Path> {
1606 &self.path
1607 }
1608
1609 fn full_path(&self, cx: &AppContext) -> PathBuf {
1610 let mut full_path = PathBuf::new();
1611 full_path.push(self.worktree.read(cx).root_name());
1612 if self.path.components().next().is_some() {
1613 full_path.push(&self.path);
1614 }
1615 full_path
1616 }
1617
1618 /// Returns the last component of this handle's absolute path. If this handle refers to the root
1619 /// of its worktree, then this method will return the name of the worktree itself.
1620 fn file_name(&self, cx: &AppContext) -> OsString {
1621 self.path
1622 .file_name()
1623 .map(|name| name.into())
1624 .unwrap_or_else(|| OsString::from(&self.worktree.read(cx).root_name))
1625 }
1626
1627 fn is_deleted(&self) -> bool {
1628 self.entry_id.is_none()
1629 }
1630
1631 fn save(
1632 &self,
1633 buffer_id: u64,
1634 text: Rope,
1635 version: clock::Global,
1636 cx: &mut MutableAppContext,
1637 ) -> Task<Result<(clock::Global, String, SystemTime)>> {
1638 self.worktree.update(cx, |worktree, cx| match worktree {
1639 Worktree::Local(worktree) => {
1640 let rpc = worktree.client.clone();
1641 let project_id = worktree.share.as_ref().map(|share| share.project_id);
1642 let fingerprint = text.fingerprint();
1643 let save = worktree.write_file(self.path.clone(), text, cx);
1644 cx.background().spawn(async move {
1645 let entry = save.await?;
1646 if let Some(project_id) = project_id {
1647 rpc.send(proto::BufferSaved {
1648 project_id,
1649 buffer_id,
1650 version: serialize_version(&version),
1651 mtime: Some(entry.mtime.into()),
1652 fingerprint: fingerprint.clone(),
1653 })?;
1654 }
1655 Ok((version, fingerprint, entry.mtime))
1656 })
1657 }
1658 Worktree::Remote(worktree) => {
1659 let rpc = worktree.client.clone();
1660 let project_id = worktree.project_id;
1661 cx.foreground().spawn(async move {
1662 let response = rpc
1663 .request(proto::SaveBuffer {
1664 project_id,
1665 buffer_id,
1666 version: serialize_version(&version),
1667 })
1668 .await?;
1669 let version = deserialize_version(response.version);
1670 let mtime = response
1671 .mtime
1672 .ok_or_else(|| anyhow!("missing mtime"))?
1673 .into();
1674 Ok((version, response.fingerprint, mtime))
1675 })
1676 }
1677 })
1678 }
1679
1680 fn as_any(&self) -> &dyn Any {
1681 self
1682 }
1683
1684 fn to_proto(&self) -> rpc::proto::File {
1685 rpc::proto::File {
1686 worktree_id: self.worktree.id() as u64,
1687 entry_id: self.entry_id.map(|entry_id| entry_id.to_proto()),
1688 path: self.path.to_string_lossy().into(),
1689 mtime: Some(self.mtime.into()),
1690 }
1691 }
1692}
1693
1694impl language::LocalFile for File {
1695 fn abs_path(&self, cx: &AppContext) -> PathBuf {
1696 self.worktree
1697 .read(cx)
1698 .as_local()
1699 .unwrap()
1700 .abs_path
1701 .join(&self.path)
1702 }
1703
1704 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
1705 let worktree = self.worktree.read(cx).as_local().unwrap();
1706 let abs_path = worktree.absolutize(&self.path);
1707 let fs = worktree.fs.clone();
1708 cx.background()
1709 .spawn(async move { fs.load(&abs_path).await })
1710 }
1711
1712 fn buffer_reloaded(
1713 &self,
1714 buffer_id: u64,
1715 version: &clock::Global,
1716 fingerprint: String,
1717 mtime: SystemTime,
1718 cx: &mut MutableAppContext,
1719 ) {
1720 let worktree = self.worktree.read(cx).as_local().unwrap();
1721 if let Some(project_id) = worktree.share.as_ref().map(|share| share.project_id) {
1722 worktree
1723 .client
1724 .send(proto::BufferReloaded {
1725 project_id,
1726 buffer_id,
1727 version: serialize_version(&version),
1728 mtime: Some(mtime.into()),
1729 fingerprint,
1730 })
1731 .log_err();
1732 }
1733 }
1734}
1735
1736impl File {
1737 pub fn from_proto(
1738 proto: rpc::proto::File,
1739 worktree: ModelHandle<Worktree>,
1740 cx: &AppContext,
1741 ) -> Result<Self> {
1742 let worktree_id = worktree
1743 .read(cx)
1744 .as_remote()
1745 .ok_or_else(|| anyhow!("not remote"))?
1746 .id();
1747
1748 if worktree_id.to_proto() != proto.worktree_id {
1749 return Err(anyhow!("worktree id does not match file"));
1750 }
1751
1752 Ok(Self {
1753 worktree,
1754 path: Path::new(&proto.path).into(),
1755 mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
1756 entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
1757 is_local: false,
1758 })
1759 }
1760
1761 pub fn from_dyn(file: Option<&dyn language::File>) -> Option<&Self> {
1762 file.and_then(|f| f.as_any().downcast_ref())
1763 }
1764
1765 pub fn worktree_id(&self, cx: &AppContext) -> WorktreeId {
1766 self.worktree.read(cx).id()
1767 }
1768
1769 pub fn project_entry_id(&self, _: &AppContext) -> Option<ProjectEntryId> {
1770 self.entry_id
1771 }
1772}
1773
1774#[derive(Clone, Debug, PartialEq, Eq)]
1775pub struct Entry {
1776 pub id: ProjectEntryId,
1777 pub kind: EntryKind,
1778 pub path: Arc<Path>,
1779 pub inode: u64,
1780 pub mtime: SystemTime,
1781 pub is_symlink: bool,
1782 pub is_ignored: bool,
1783}
1784
1785#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1786pub enum EntryKind {
1787 PendingDir,
1788 Dir,
1789 File(CharBag),
1790}
1791
1792impl Entry {
1793 fn new(
1794 path: Arc<Path>,
1795 metadata: &fs::Metadata,
1796 next_entry_id: &AtomicUsize,
1797 root_char_bag: CharBag,
1798 ) -> Self {
1799 Self {
1800 id: ProjectEntryId::new(next_entry_id),
1801 kind: if metadata.is_dir {
1802 EntryKind::PendingDir
1803 } else {
1804 EntryKind::File(char_bag_for_path(root_char_bag, &path))
1805 },
1806 path,
1807 inode: metadata.inode,
1808 mtime: metadata.mtime,
1809 is_symlink: metadata.is_symlink,
1810 is_ignored: false,
1811 }
1812 }
1813
1814 pub fn is_dir(&self) -> bool {
1815 matches!(self.kind, EntryKind::Dir | EntryKind::PendingDir)
1816 }
1817
1818 pub fn is_file(&self) -> bool {
1819 matches!(self.kind, EntryKind::File(_))
1820 }
1821}
1822
1823impl sum_tree::Item for Entry {
1824 type Summary = EntrySummary;
1825
1826 fn summary(&self) -> Self::Summary {
1827 let visible_count = if self.is_ignored { 0 } else { 1 };
1828 let file_count;
1829 let visible_file_count;
1830 if self.is_file() {
1831 file_count = 1;
1832 visible_file_count = visible_count;
1833 } else {
1834 file_count = 0;
1835 visible_file_count = 0;
1836 }
1837
1838 EntrySummary {
1839 max_path: self.path.clone(),
1840 count: 1,
1841 visible_count,
1842 file_count,
1843 visible_file_count,
1844 }
1845 }
1846}
1847
1848impl sum_tree::KeyedItem for Entry {
1849 type Key = PathKey;
1850
1851 fn key(&self) -> Self::Key {
1852 PathKey(self.path.clone())
1853 }
1854}
1855
1856#[derive(Clone, Debug)]
1857pub struct EntrySummary {
1858 max_path: Arc<Path>,
1859 count: usize,
1860 visible_count: usize,
1861 file_count: usize,
1862 visible_file_count: usize,
1863}
1864
1865impl Default for EntrySummary {
1866 fn default() -> Self {
1867 Self {
1868 max_path: Arc::from(Path::new("")),
1869 count: 0,
1870 visible_count: 0,
1871 file_count: 0,
1872 visible_file_count: 0,
1873 }
1874 }
1875}
1876
1877impl sum_tree::Summary for EntrySummary {
1878 type Context = ();
1879
1880 fn add_summary(&mut self, rhs: &Self, _: &()) {
1881 self.max_path = rhs.max_path.clone();
1882 self.count += rhs.count;
1883 self.visible_count += rhs.visible_count;
1884 self.file_count += rhs.file_count;
1885 self.visible_file_count += rhs.visible_file_count;
1886 }
1887}
1888
1889#[derive(Clone, Debug)]
1890struct PathEntry {
1891 id: ProjectEntryId,
1892 path: Arc<Path>,
1893 is_ignored: bool,
1894 scan_id: usize,
1895}
1896
1897impl sum_tree::Item for PathEntry {
1898 type Summary = PathEntrySummary;
1899
1900 fn summary(&self) -> Self::Summary {
1901 PathEntrySummary { max_id: self.id }
1902 }
1903}
1904
1905impl sum_tree::KeyedItem for PathEntry {
1906 type Key = ProjectEntryId;
1907
1908 fn key(&self) -> Self::Key {
1909 self.id
1910 }
1911}
1912
1913#[derive(Clone, Debug, Default)]
1914struct PathEntrySummary {
1915 max_id: ProjectEntryId,
1916}
1917
1918impl sum_tree::Summary for PathEntrySummary {
1919 type Context = ();
1920
1921 fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
1922 self.max_id = summary.max_id;
1923 }
1924}
1925
1926impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
1927 fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) {
1928 *self = summary.max_id;
1929 }
1930}
1931
1932#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
1933pub struct PathKey(Arc<Path>);
1934
1935impl Default for PathKey {
1936 fn default() -> Self {
1937 Self(Path::new("").into())
1938 }
1939}
1940
1941impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
1942 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
1943 self.0 = summary.max_path.clone();
1944 }
1945}
1946
1947struct BackgroundScanner {
1948 fs: Arc<dyn Fs>,
1949 snapshot: Arc<Mutex<LocalSnapshot>>,
1950 notify: UnboundedSender<ScanState>,
1951 executor: Arc<executor::Background>,
1952}
1953
1954impl BackgroundScanner {
1955 fn new(
1956 snapshot: Arc<Mutex<LocalSnapshot>>,
1957 notify: UnboundedSender<ScanState>,
1958 fs: Arc<dyn Fs>,
1959 executor: Arc<executor::Background>,
1960 ) -> Self {
1961 Self {
1962 fs,
1963 snapshot,
1964 notify,
1965 executor,
1966 }
1967 }
1968
1969 fn abs_path(&self) -> Arc<Path> {
1970 self.snapshot.lock().abs_path.clone()
1971 }
1972
1973 fn snapshot(&self) -> LocalSnapshot {
1974 self.snapshot.lock().clone()
1975 }
1976
1977 async fn run(mut self, events_rx: impl Stream<Item = Vec<fsevent::Event>>) {
1978 if self.notify.unbounded_send(ScanState::Initializing).is_err() {
1979 return;
1980 }
1981
1982 if let Err(err) = self.scan_dirs().await {
1983 if self
1984 .notify
1985 .unbounded_send(ScanState::Err(Arc::new(err)))
1986 .is_err()
1987 {
1988 return;
1989 }
1990 }
1991
1992 if self.notify.unbounded_send(ScanState::Idle).is_err() {
1993 return;
1994 }
1995
1996 futures::pin_mut!(events_rx);
1997
1998 while let Some(mut events) = events_rx.next().await {
1999 while let Poll::Ready(Some(additional_events)) = futures::poll!(events_rx.next()) {
2000 events.extend(additional_events);
2001 }
2002
2003 if self.notify.unbounded_send(ScanState::Updating).is_err() {
2004 break;
2005 }
2006
2007 if !self.process_events(events).await {
2008 break;
2009 }
2010
2011 if self.notify.unbounded_send(ScanState::Idle).is_err() {
2012 break;
2013 }
2014 }
2015 }
2016
2017 async fn scan_dirs(&mut self) -> Result<()> {
2018 let root_char_bag;
2019 let next_entry_id;
2020 let is_dir;
2021 {
2022 let snapshot = self.snapshot.lock();
2023 root_char_bag = snapshot.root_char_bag;
2024 next_entry_id = snapshot.next_entry_id.clone();
2025 is_dir = snapshot.root_entry().map_or(false, |e| e.is_dir())
2026 };
2027
2028 if is_dir {
2029 let path: Arc<Path> = Arc::from(Path::new(""));
2030 let abs_path = self.abs_path();
2031 let (tx, rx) = channel::unbounded();
2032 self.executor
2033 .block(tx.send(ScanJob {
2034 abs_path: abs_path.to_path_buf(),
2035 path,
2036 ignore_stack: IgnoreStack::none(),
2037 scan_queue: tx.clone(),
2038 }))
2039 .unwrap();
2040 drop(tx);
2041
2042 self.executor
2043 .scoped(|scope| {
2044 for _ in 0..self.executor.num_cpus() {
2045 scope.spawn(async {
2046 while let Ok(job) = rx.recv().await {
2047 if let Err(err) = self
2048 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2049 .await
2050 {
2051 log::error!("error scanning {:?}: {}", job.abs_path, err);
2052 }
2053 }
2054 });
2055 }
2056 })
2057 .await;
2058 }
2059
2060 Ok(())
2061 }
2062
2063 async fn scan_dir(
2064 &self,
2065 root_char_bag: CharBag,
2066 next_entry_id: Arc<AtomicUsize>,
2067 job: &ScanJob,
2068 ) -> Result<()> {
2069 let mut new_entries: Vec<Entry> = Vec::new();
2070 let mut new_jobs: Vec<ScanJob> = Vec::new();
2071 let mut ignore_stack = job.ignore_stack.clone();
2072 let mut new_ignore = None;
2073
2074 let mut child_paths = self.fs.read_dir(&job.abs_path).await?;
2075 while let Some(child_abs_path) = child_paths.next().await {
2076 let child_abs_path = match child_abs_path {
2077 Ok(child_abs_path) => child_abs_path,
2078 Err(error) => {
2079 log::error!("error processing entry {:?}", error);
2080 continue;
2081 }
2082 };
2083 let child_name = child_abs_path.file_name().unwrap();
2084 let child_path: Arc<Path> = job.path.join(child_name).into();
2085 let child_metadata = match self.fs.metadata(&child_abs_path).await {
2086 Ok(Some(metadata)) => metadata,
2087 Ok(None) => continue,
2088 Err(err) => {
2089 log::error!("error processing {:?}: {:?}", child_abs_path, err);
2090 continue;
2091 }
2092 };
2093
2094 // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
2095 if child_name == *GITIGNORE {
2096 match build_gitignore(&child_abs_path, self.fs.as_ref()) {
2097 Ok(ignore) => {
2098 let ignore = Arc::new(ignore);
2099 ignore_stack = ignore_stack.append(job.path.clone(), ignore.clone());
2100 new_ignore = Some(ignore);
2101 }
2102 Err(error) => {
2103 log::error!(
2104 "error loading .gitignore file {:?} - {:?}",
2105 child_name,
2106 error
2107 );
2108 }
2109 }
2110
2111 // Update ignore status of any child entries we've already processed to reflect the
2112 // ignore file in the current directory. Because `.gitignore` starts with a `.`,
2113 // there should rarely be too numerous. Update the ignore stack associated with any
2114 // new jobs as well.
2115 let mut new_jobs = new_jobs.iter_mut();
2116 for entry in &mut new_entries {
2117 entry.is_ignored = ignore_stack.is_path_ignored(&entry.path, entry.is_dir());
2118 if entry.is_dir() {
2119 new_jobs.next().unwrap().ignore_stack = if entry.is_ignored {
2120 IgnoreStack::all()
2121 } else {
2122 ignore_stack.clone()
2123 };
2124 }
2125 }
2126 }
2127
2128 let mut child_entry = Entry::new(
2129 child_path.clone(),
2130 &child_metadata,
2131 &next_entry_id,
2132 root_char_bag,
2133 );
2134
2135 if child_metadata.is_dir {
2136 let is_ignored = ignore_stack.is_path_ignored(&child_path, true);
2137 child_entry.is_ignored = is_ignored;
2138 new_entries.push(child_entry);
2139 new_jobs.push(ScanJob {
2140 abs_path: child_abs_path,
2141 path: child_path,
2142 ignore_stack: if is_ignored {
2143 IgnoreStack::all()
2144 } else {
2145 ignore_stack.clone()
2146 },
2147 scan_queue: job.scan_queue.clone(),
2148 });
2149 } else {
2150 child_entry.is_ignored = ignore_stack.is_path_ignored(&child_path, false);
2151 new_entries.push(child_entry);
2152 };
2153 }
2154
2155 self.snapshot
2156 .lock()
2157 .populate_dir(job.path.clone(), new_entries, new_ignore);
2158 for new_job in new_jobs {
2159 job.scan_queue.send(new_job).await.unwrap();
2160 }
2161
2162 Ok(())
2163 }
2164
2165 async fn process_events(&mut self, mut events: Vec<fsevent::Event>) -> bool {
2166 events.sort_unstable_by(|a, b| a.path.cmp(&b.path));
2167 events.dedup_by(|a, b| a.path.starts_with(&b.path));
2168
2169 let root_char_bag;
2170 let root_abs_path;
2171 let next_entry_id;
2172 {
2173 let snapshot = self.snapshot.lock();
2174 root_char_bag = snapshot.root_char_bag;
2175 root_abs_path = snapshot.abs_path.clone();
2176 next_entry_id = snapshot.next_entry_id.clone();
2177 }
2178
2179 let root_abs_path = if let Ok(abs_path) = self.fs.canonicalize(&root_abs_path).await {
2180 abs_path
2181 } else {
2182 return false;
2183 };
2184 let metadata = futures::future::join_all(
2185 events
2186 .iter()
2187 .map(|event| self.fs.metadata(&event.path))
2188 .collect::<Vec<_>>(),
2189 )
2190 .await;
2191
2192 // Hold the snapshot lock while clearing and re-inserting the root entries
2193 // for each event. This way, the snapshot is not observable to the foreground
2194 // thread while this operation is in-progress.
2195 let (scan_queue_tx, scan_queue_rx) = channel::unbounded();
2196 {
2197 let mut snapshot = self.snapshot.lock();
2198 snapshot.scan_id += 1;
2199 for event in &events {
2200 if let Ok(path) = event.path.strip_prefix(&root_abs_path) {
2201 snapshot.remove_path(&path);
2202 }
2203 }
2204
2205 for (event, metadata) in events.into_iter().zip(metadata.into_iter()) {
2206 let path: Arc<Path> = match event.path.strip_prefix(&root_abs_path) {
2207 Ok(path) => Arc::from(path.to_path_buf()),
2208 Err(_) => {
2209 log::error!(
2210 "unexpected event {:?} for root path {:?}",
2211 event.path,
2212 root_abs_path
2213 );
2214 continue;
2215 }
2216 };
2217
2218 match metadata {
2219 Ok(Some(metadata)) => {
2220 let ignore_stack = snapshot.ignore_stack_for_path(&path, metadata.is_dir);
2221 let mut fs_entry = Entry::new(
2222 path.clone(),
2223 &metadata,
2224 snapshot.next_entry_id.as_ref(),
2225 snapshot.root_char_bag,
2226 );
2227 fs_entry.is_ignored = ignore_stack.is_all();
2228 snapshot.insert_entry(fs_entry, self.fs.as_ref());
2229 if metadata.is_dir {
2230 self.executor
2231 .block(scan_queue_tx.send(ScanJob {
2232 abs_path: event.path,
2233 path,
2234 ignore_stack,
2235 scan_queue: scan_queue_tx.clone(),
2236 }))
2237 .unwrap();
2238 }
2239 }
2240 Ok(None) => {}
2241 Err(err) => {
2242 // TODO - create a special 'error' entry in the entries tree to mark this
2243 log::error!("error reading file on event {:?}", err);
2244 }
2245 }
2246 }
2247 drop(scan_queue_tx);
2248 }
2249
2250 // Scan any directories that were created as part of this event batch.
2251 self.executor
2252 .scoped(|scope| {
2253 for _ in 0..self.executor.num_cpus() {
2254 scope.spawn(async {
2255 while let Ok(job) = scan_queue_rx.recv().await {
2256 if let Err(err) = self
2257 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2258 .await
2259 {
2260 log::error!("error scanning {:?}: {}", job.abs_path, err);
2261 }
2262 }
2263 });
2264 }
2265 })
2266 .await;
2267
2268 // Attempt to detect renames only over a single batch of file-system events.
2269 self.snapshot.lock().removed_entry_ids.clear();
2270
2271 self.update_ignore_statuses().await;
2272 true
2273 }
2274
2275 async fn update_ignore_statuses(&self) {
2276 let mut snapshot = self.snapshot();
2277
2278 let mut ignores_to_update = Vec::new();
2279 let mut ignores_to_delete = Vec::new();
2280 for (parent_path, (_, scan_id)) in &snapshot.ignores {
2281 if *scan_id == snapshot.scan_id && snapshot.entry_for_path(parent_path).is_some() {
2282 ignores_to_update.push(parent_path.clone());
2283 }
2284
2285 let ignore_path = parent_path.join(&*GITIGNORE);
2286 if snapshot.entry_for_path(ignore_path).is_none() {
2287 ignores_to_delete.push(parent_path.clone());
2288 }
2289 }
2290
2291 for parent_path in ignores_to_delete {
2292 snapshot.ignores.remove(&parent_path);
2293 self.snapshot.lock().ignores.remove(&parent_path);
2294 }
2295
2296 let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
2297 ignores_to_update.sort_unstable();
2298 let mut ignores_to_update = ignores_to_update.into_iter().peekable();
2299 while let Some(parent_path) = ignores_to_update.next() {
2300 while ignores_to_update
2301 .peek()
2302 .map_or(false, |p| p.starts_with(&parent_path))
2303 {
2304 ignores_to_update.next().unwrap();
2305 }
2306
2307 let ignore_stack = snapshot.ignore_stack_for_path(&parent_path, true);
2308 ignore_queue_tx
2309 .send(UpdateIgnoreStatusJob {
2310 path: parent_path,
2311 ignore_stack,
2312 ignore_queue: ignore_queue_tx.clone(),
2313 })
2314 .await
2315 .unwrap();
2316 }
2317 drop(ignore_queue_tx);
2318
2319 self.executor
2320 .scoped(|scope| {
2321 for _ in 0..self.executor.num_cpus() {
2322 scope.spawn(async {
2323 while let Ok(job) = ignore_queue_rx.recv().await {
2324 self.update_ignore_status(job, &snapshot).await;
2325 }
2326 });
2327 }
2328 })
2329 .await;
2330 }
2331
2332 async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
2333 let mut ignore_stack = job.ignore_stack;
2334 if let Some((ignore, _)) = snapshot.ignores.get(&job.path) {
2335 ignore_stack = ignore_stack.append(job.path.clone(), ignore.clone());
2336 }
2337
2338 let mut entries_by_id_edits = Vec::new();
2339 let mut entries_by_path_edits = Vec::new();
2340 for mut entry in snapshot.child_entries(&job.path).cloned() {
2341 let was_ignored = entry.is_ignored;
2342 entry.is_ignored = ignore_stack.is_path_ignored(&entry.path, entry.is_dir());
2343 if entry.is_dir() {
2344 let child_ignore_stack = if entry.is_ignored {
2345 IgnoreStack::all()
2346 } else {
2347 ignore_stack.clone()
2348 };
2349 job.ignore_queue
2350 .send(UpdateIgnoreStatusJob {
2351 path: entry.path.clone(),
2352 ignore_stack: child_ignore_stack,
2353 ignore_queue: job.ignore_queue.clone(),
2354 })
2355 .await
2356 .unwrap();
2357 }
2358
2359 if entry.is_ignored != was_ignored {
2360 let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
2361 path_entry.scan_id = snapshot.scan_id;
2362 path_entry.is_ignored = entry.is_ignored;
2363 entries_by_id_edits.push(Edit::Insert(path_entry));
2364 entries_by_path_edits.push(Edit::Insert(entry));
2365 }
2366 }
2367
2368 let mut snapshot = self.snapshot.lock();
2369 snapshot.entries_by_path.edit(entries_by_path_edits, &());
2370 snapshot.entries_by_id.edit(entries_by_id_edits, &());
2371 }
2372}
2373
2374fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
2375 let mut result = root_char_bag;
2376 result.extend(
2377 path.to_string_lossy()
2378 .chars()
2379 .map(|c| c.to_ascii_lowercase()),
2380 );
2381 result
2382}
2383
2384struct ScanJob {
2385 abs_path: PathBuf,
2386 path: Arc<Path>,
2387 ignore_stack: Arc<IgnoreStack>,
2388 scan_queue: Sender<ScanJob>,
2389}
2390
2391struct UpdateIgnoreStatusJob {
2392 path: Arc<Path>,
2393 ignore_stack: Arc<IgnoreStack>,
2394 ignore_queue: Sender<UpdateIgnoreStatusJob>,
2395}
2396
2397pub trait WorktreeHandle {
2398 #[cfg(any(test, feature = "test-support"))]
2399 fn flush_fs_events<'a>(
2400 &self,
2401 cx: &'a gpui::TestAppContext,
2402 ) -> futures::future::LocalBoxFuture<'a, ()>;
2403}
2404
2405impl WorktreeHandle for ModelHandle<Worktree> {
2406 // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
2407 // occurred before the worktree was constructed. These events can cause the worktree to perfrom
2408 // extra directory scans, and emit extra scan-state notifications.
2409 //
2410 // This function mutates the worktree's directory and waits for those mutations to be picked up,
2411 // to ensure that all redundant FS events have already been processed.
2412 #[cfg(any(test, feature = "test-support"))]
2413 fn flush_fs_events<'a>(
2414 &self,
2415 cx: &'a gpui::TestAppContext,
2416 ) -> futures::future::LocalBoxFuture<'a, ()> {
2417 use smol::future::FutureExt;
2418
2419 let filename = "fs-event-sentinel";
2420 let tree = self.clone();
2421 let (fs, root_path) = self.read_with(cx, |tree, _| {
2422 let tree = tree.as_local().unwrap();
2423 (tree.fs.clone(), tree.abs_path().clone())
2424 });
2425
2426 async move {
2427 fs.create_file(&root_path.join(filename), Default::default())
2428 .await
2429 .unwrap();
2430 tree.condition(&cx, |tree, _| tree.entry_for_path(filename).is_some())
2431 .await;
2432
2433 fs.remove_file(&root_path.join(filename), Default::default())
2434 .await
2435 .unwrap();
2436 tree.condition(&cx, |tree, _| tree.entry_for_path(filename).is_none())
2437 .await;
2438
2439 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2440 .await;
2441 }
2442 .boxed_local()
2443 }
2444}
2445
2446#[derive(Clone, Debug)]
2447struct TraversalProgress<'a> {
2448 max_path: &'a Path,
2449 count: usize,
2450 visible_count: usize,
2451 file_count: usize,
2452 visible_file_count: usize,
2453}
2454
2455impl<'a> TraversalProgress<'a> {
2456 fn count(&self, include_dirs: bool, include_ignored: bool) -> usize {
2457 match (include_ignored, include_dirs) {
2458 (true, true) => self.count,
2459 (true, false) => self.file_count,
2460 (false, true) => self.visible_count,
2461 (false, false) => self.visible_file_count,
2462 }
2463 }
2464}
2465
2466impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
2467 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2468 self.max_path = summary.max_path.as_ref();
2469 self.count += summary.count;
2470 self.visible_count += summary.visible_count;
2471 self.file_count += summary.file_count;
2472 self.visible_file_count += summary.visible_file_count;
2473 }
2474}
2475
2476impl<'a> Default for TraversalProgress<'a> {
2477 fn default() -> Self {
2478 Self {
2479 max_path: Path::new(""),
2480 count: 0,
2481 visible_count: 0,
2482 file_count: 0,
2483 visible_file_count: 0,
2484 }
2485 }
2486}
2487
2488pub struct Traversal<'a> {
2489 cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
2490 include_ignored: bool,
2491 include_dirs: bool,
2492}
2493
2494impl<'a> Traversal<'a> {
2495 pub fn advance(&mut self) -> bool {
2496 self.advance_to_offset(self.offset() + 1)
2497 }
2498
2499 pub fn advance_to_offset(&mut self, offset: usize) -> bool {
2500 self.cursor.seek_forward(
2501 &TraversalTarget::Count {
2502 count: offset,
2503 include_dirs: self.include_dirs,
2504 include_ignored: self.include_ignored,
2505 },
2506 Bias::Right,
2507 &(),
2508 )
2509 }
2510
2511 pub fn advance_to_sibling(&mut self) -> bool {
2512 while let Some(entry) = self.cursor.item() {
2513 self.cursor.seek_forward(
2514 &TraversalTarget::PathSuccessor(&entry.path),
2515 Bias::Left,
2516 &(),
2517 );
2518 if let Some(entry) = self.cursor.item() {
2519 if (self.include_dirs || !entry.is_dir())
2520 && (self.include_ignored || !entry.is_ignored)
2521 {
2522 return true;
2523 }
2524 }
2525 }
2526 false
2527 }
2528
2529 pub fn entry(&self) -> Option<&'a Entry> {
2530 self.cursor.item()
2531 }
2532
2533 pub fn offset(&self) -> usize {
2534 self.cursor
2535 .start()
2536 .count(self.include_dirs, self.include_ignored)
2537 }
2538}
2539
2540impl<'a> Iterator for Traversal<'a> {
2541 type Item = &'a Entry;
2542
2543 fn next(&mut self) -> Option<Self::Item> {
2544 if let Some(item) = self.entry() {
2545 self.advance();
2546 Some(item)
2547 } else {
2548 None
2549 }
2550 }
2551}
2552
2553#[derive(Debug)]
2554enum TraversalTarget<'a> {
2555 Path(&'a Path),
2556 PathSuccessor(&'a Path),
2557 Count {
2558 count: usize,
2559 include_ignored: bool,
2560 include_dirs: bool,
2561 },
2562}
2563
2564impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> {
2565 fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
2566 match self {
2567 TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path),
2568 TraversalTarget::PathSuccessor(path) => {
2569 if !cursor_location.max_path.starts_with(path) {
2570 Ordering::Equal
2571 } else {
2572 Ordering::Greater
2573 }
2574 }
2575 TraversalTarget::Count {
2576 count,
2577 include_dirs,
2578 include_ignored,
2579 } => Ord::cmp(
2580 count,
2581 &cursor_location.count(*include_dirs, *include_ignored),
2582 ),
2583 }
2584 }
2585}
2586
2587struct ChildEntriesIter<'a> {
2588 parent_path: &'a Path,
2589 traversal: Traversal<'a>,
2590}
2591
2592impl<'a> Iterator for ChildEntriesIter<'a> {
2593 type Item = &'a Entry;
2594
2595 fn next(&mut self) -> Option<Self::Item> {
2596 if let Some(item) = self.traversal.entry() {
2597 if item.path.starts_with(&self.parent_path) {
2598 self.traversal.advance_to_sibling();
2599 return Some(item);
2600 }
2601 }
2602 None
2603 }
2604}
2605
2606impl<'a> From<&'a Entry> for proto::Entry {
2607 fn from(entry: &'a Entry) -> Self {
2608 Self {
2609 id: entry.id.to_proto(),
2610 is_dir: entry.is_dir(),
2611 path: entry.path.as_os_str().as_bytes().to_vec(),
2612 inode: entry.inode,
2613 mtime: Some(entry.mtime.into()),
2614 is_symlink: entry.is_symlink,
2615 is_ignored: entry.is_ignored,
2616 }
2617 }
2618}
2619
2620impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
2621 type Error = anyhow::Error;
2622
2623 fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result<Self> {
2624 if let Some(mtime) = entry.mtime {
2625 let kind = if entry.is_dir {
2626 EntryKind::Dir
2627 } else {
2628 let mut char_bag = root_char_bag.clone();
2629 char_bag.extend(
2630 String::from_utf8_lossy(&entry.path)
2631 .chars()
2632 .map(|c| c.to_ascii_lowercase()),
2633 );
2634 EntryKind::File(char_bag)
2635 };
2636 let path: Arc<Path> = PathBuf::from(OsString::from_vec(entry.path)).into();
2637 Ok(Entry {
2638 id: ProjectEntryId::from_proto(entry.id),
2639 kind,
2640 path: path.clone(),
2641 inode: entry.inode,
2642 mtime: mtime.into(),
2643 is_symlink: entry.is_symlink,
2644 is_ignored: entry.is_ignored,
2645 })
2646 } else {
2647 Err(anyhow!(
2648 "missing mtime in remote worktree entry {:?}",
2649 entry.path
2650 ))
2651 }
2652 }
2653}
2654
2655async fn send_worktree_update(client: &Arc<Client>, update: proto::UpdateWorktree) -> Result<()> {
2656 #[cfg(any(test, feature = "test-support"))]
2657 const MAX_CHUNK_SIZE: usize = 2;
2658 #[cfg(not(any(test, feature = "test-support")))]
2659 const MAX_CHUNK_SIZE: usize = 256;
2660
2661 for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
2662 client.request(update).await?;
2663 }
2664
2665 Ok(())
2666}
2667
2668#[cfg(test)]
2669mod tests {
2670 use super::*;
2671 use crate::fs::FakeFs;
2672 use anyhow::Result;
2673 use client::test::FakeHttpClient;
2674 use fs::RealFs;
2675 use gpui::TestAppContext;
2676 use rand::prelude::*;
2677 use serde_json::json;
2678 use std::{
2679 env,
2680 fmt::Write,
2681 time::{SystemTime, UNIX_EPOCH},
2682 };
2683 use util::test::temp_tree;
2684
2685 #[gpui::test]
2686 async fn test_traversal(cx: &mut TestAppContext) {
2687 let fs = FakeFs::new(cx.background());
2688 fs.insert_tree(
2689 "/root",
2690 json!({
2691 ".gitignore": "a/b\n",
2692 "a": {
2693 "b": "",
2694 "c": "",
2695 }
2696 }),
2697 )
2698 .await;
2699
2700 let http_client = FakeHttpClient::with_404_response();
2701 let client = Client::new(http_client);
2702
2703 let tree = Worktree::local(
2704 client,
2705 Arc::from(Path::new("/root")),
2706 true,
2707 fs,
2708 Default::default(),
2709 &mut cx.to_async(),
2710 )
2711 .await
2712 .unwrap();
2713 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2714 .await;
2715
2716 tree.read_with(cx, |tree, _| {
2717 assert_eq!(
2718 tree.entries(false)
2719 .map(|entry| entry.path.as_ref())
2720 .collect::<Vec<_>>(),
2721 vec![
2722 Path::new(""),
2723 Path::new(".gitignore"),
2724 Path::new("a"),
2725 Path::new("a/c"),
2726 ]
2727 );
2728 assert_eq!(
2729 tree.entries(true)
2730 .map(|entry| entry.path.as_ref())
2731 .collect::<Vec<_>>(),
2732 vec![
2733 Path::new(""),
2734 Path::new(".gitignore"),
2735 Path::new("a"),
2736 Path::new("a/b"),
2737 Path::new("a/c"),
2738 ]
2739 );
2740 })
2741 }
2742
2743 #[gpui::test]
2744 async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
2745 let dir = temp_tree(json!({
2746 ".git": {},
2747 ".gitignore": "ignored-dir\n",
2748 "tracked-dir": {
2749 "tracked-file1": "tracked contents",
2750 },
2751 "ignored-dir": {
2752 "ignored-file1": "ignored contents",
2753 }
2754 }));
2755
2756 let http_client = FakeHttpClient::with_404_response();
2757 let client = Client::new(http_client.clone());
2758
2759 let tree = Worktree::local(
2760 client,
2761 dir.path(),
2762 true,
2763 Arc::new(RealFs),
2764 Default::default(),
2765 &mut cx.to_async(),
2766 )
2767 .await
2768 .unwrap();
2769 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2770 .await;
2771 tree.flush_fs_events(&cx).await;
2772 cx.read(|cx| {
2773 let tree = tree.read(cx);
2774 let tracked = tree.entry_for_path("tracked-dir/tracked-file1").unwrap();
2775 let ignored = tree.entry_for_path("ignored-dir/ignored-file1").unwrap();
2776 assert_eq!(tracked.is_ignored, false);
2777 assert_eq!(ignored.is_ignored, true);
2778 });
2779
2780 std::fs::write(dir.path().join("tracked-dir/tracked-file2"), "").unwrap();
2781 std::fs::write(dir.path().join("ignored-dir/ignored-file2"), "").unwrap();
2782 tree.flush_fs_events(&cx).await;
2783 cx.read(|cx| {
2784 let tree = tree.read(cx);
2785 let dot_git = tree.entry_for_path(".git").unwrap();
2786 let tracked = tree.entry_for_path("tracked-dir/tracked-file2").unwrap();
2787 let ignored = tree.entry_for_path("ignored-dir/ignored-file2").unwrap();
2788 assert_eq!(tracked.is_ignored, false);
2789 assert_eq!(ignored.is_ignored, true);
2790 assert_eq!(dot_git.is_ignored, true);
2791 });
2792 }
2793
2794 #[gpui::test]
2795 async fn test_write_file(cx: &mut TestAppContext) {
2796 let dir = temp_tree(json!({
2797 ".git": {},
2798 ".gitignore": "ignored-dir\n",
2799 "tracked-dir": {},
2800 "ignored-dir": {}
2801 }));
2802
2803 let http_client = FakeHttpClient::with_404_response();
2804 let client = Client::new(http_client.clone());
2805
2806 let tree = Worktree::local(
2807 client,
2808 dir.path(),
2809 true,
2810 Arc::new(RealFs),
2811 Default::default(),
2812 &mut cx.to_async(),
2813 )
2814 .await
2815 .unwrap();
2816 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2817 .await;
2818 tree.flush_fs_events(&cx).await;
2819
2820 tree.update(cx, |tree, cx| {
2821 tree.as_local().unwrap().write_file(
2822 Path::new("tracked-dir/file.txt"),
2823 "hello".into(),
2824 cx,
2825 )
2826 })
2827 .await
2828 .unwrap();
2829 tree.update(cx, |tree, cx| {
2830 tree.as_local().unwrap().write_file(
2831 Path::new("ignored-dir/file.txt"),
2832 "world".into(),
2833 cx,
2834 )
2835 })
2836 .await
2837 .unwrap();
2838
2839 tree.read_with(cx, |tree, _| {
2840 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
2841 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
2842 assert_eq!(tracked.is_ignored, false);
2843 assert_eq!(ignored.is_ignored, true);
2844 });
2845 }
2846
2847 #[gpui::test(iterations = 100)]
2848 fn test_random(mut rng: StdRng) {
2849 let operations = env::var("OPERATIONS")
2850 .map(|o| o.parse().unwrap())
2851 .unwrap_or(40);
2852 let initial_entries = env::var("INITIAL_ENTRIES")
2853 .map(|o| o.parse().unwrap())
2854 .unwrap_or(20);
2855
2856 let root_dir = tempdir::TempDir::new("worktree-test").unwrap();
2857 for _ in 0..initial_entries {
2858 randomly_mutate_tree(root_dir.path(), 1.0, &mut rng).unwrap();
2859 }
2860 log::info!("Generated initial tree");
2861
2862 let (notify_tx, _notify_rx) = mpsc::unbounded();
2863 let fs = Arc::new(RealFs);
2864 let next_entry_id = Arc::new(AtomicUsize::new(0));
2865 let mut initial_snapshot = LocalSnapshot {
2866 abs_path: root_dir.path().into(),
2867 removed_entry_ids: Default::default(),
2868 ignores: Default::default(),
2869 next_entry_id: next_entry_id.clone(),
2870 snapshot: Snapshot {
2871 id: WorktreeId::from_usize(0),
2872 entries_by_path: Default::default(),
2873 entries_by_id: Default::default(),
2874 root_name: Default::default(),
2875 root_char_bag: Default::default(),
2876 scan_id: 0,
2877 is_complete: true,
2878 },
2879 };
2880 initial_snapshot.insert_entry(
2881 Entry::new(
2882 Path::new("").into(),
2883 &smol::block_on(fs.metadata(root_dir.path()))
2884 .unwrap()
2885 .unwrap(),
2886 &next_entry_id,
2887 Default::default(),
2888 ),
2889 fs.as_ref(),
2890 );
2891 let mut scanner = BackgroundScanner::new(
2892 Arc::new(Mutex::new(initial_snapshot.clone())),
2893 notify_tx,
2894 fs.clone(),
2895 Arc::new(gpui::executor::Background::new()),
2896 );
2897 smol::block_on(scanner.scan_dirs()).unwrap();
2898 scanner.snapshot().check_invariants();
2899
2900 let mut events = Vec::new();
2901 let mut snapshots = Vec::new();
2902 let mut mutations_len = operations;
2903 while mutations_len > 1 {
2904 if !events.is_empty() && rng.gen_bool(0.4) {
2905 let len = rng.gen_range(0..=events.len());
2906 let to_deliver = events.drain(0..len).collect::<Vec<_>>();
2907 log::info!("Delivering events: {:#?}", to_deliver);
2908 smol::block_on(scanner.process_events(to_deliver));
2909 scanner.snapshot().check_invariants();
2910 } else {
2911 events.extend(randomly_mutate_tree(root_dir.path(), 0.6, &mut rng).unwrap());
2912 mutations_len -= 1;
2913 }
2914
2915 if rng.gen_bool(0.2) {
2916 snapshots.push(scanner.snapshot());
2917 }
2918 }
2919 log::info!("Quiescing: {:#?}", events);
2920 smol::block_on(scanner.process_events(events));
2921 scanner.snapshot().check_invariants();
2922
2923 let (notify_tx, _notify_rx) = mpsc::unbounded();
2924 let mut new_scanner = BackgroundScanner::new(
2925 Arc::new(Mutex::new(initial_snapshot)),
2926 notify_tx,
2927 scanner.fs.clone(),
2928 scanner.executor.clone(),
2929 );
2930 smol::block_on(new_scanner.scan_dirs()).unwrap();
2931 assert_eq!(
2932 scanner.snapshot().to_vec(true),
2933 new_scanner.snapshot().to_vec(true)
2934 );
2935
2936 for mut prev_snapshot in snapshots {
2937 let include_ignored = rng.gen::<bool>();
2938 if !include_ignored {
2939 let mut entries_by_path_edits = Vec::new();
2940 let mut entries_by_id_edits = Vec::new();
2941 for entry in prev_snapshot
2942 .entries_by_id
2943 .cursor::<()>()
2944 .filter(|e| e.is_ignored)
2945 {
2946 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
2947 entries_by_id_edits.push(Edit::Remove(entry.id));
2948 }
2949
2950 prev_snapshot
2951 .entries_by_path
2952 .edit(entries_by_path_edits, &());
2953 prev_snapshot.entries_by_id.edit(entries_by_id_edits, &());
2954 }
2955
2956 let update = scanner
2957 .snapshot()
2958 .build_update(&prev_snapshot, 0, 0, include_ignored);
2959 prev_snapshot.apply_remote_update(update).unwrap();
2960 assert_eq!(
2961 prev_snapshot.to_vec(true),
2962 scanner.snapshot().to_vec(include_ignored)
2963 );
2964 }
2965 }
2966
2967 fn randomly_mutate_tree(
2968 root_path: &Path,
2969 insertion_probability: f64,
2970 rng: &mut impl Rng,
2971 ) -> Result<Vec<fsevent::Event>> {
2972 let root_path = root_path.canonicalize().unwrap();
2973 let (dirs, files) = read_dir_recursive(root_path.clone());
2974
2975 let mut events = Vec::new();
2976 let mut record_event = |path: PathBuf| {
2977 events.push(fsevent::Event {
2978 event_id: SystemTime::now()
2979 .duration_since(UNIX_EPOCH)
2980 .unwrap()
2981 .as_secs(),
2982 flags: fsevent::StreamFlags::empty(),
2983 path,
2984 });
2985 };
2986
2987 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
2988 let path = dirs.choose(rng).unwrap();
2989 let new_path = path.join(gen_name(rng));
2990
2991 if rng.gen() {
2992 log::info!("Creating dir {:?}", new_path.strip_prefix(root_path)?);
2993 std::fs::create_dir(&new_path)?;
2994 } else {
2995 log::info!("Creating file {:?}", new_path.strip_prefix(root_path)?);
2996 std::fs::write(&new_path, "")?;
2997 }
2998 record_event(new_path);
2999 } else if rng.gen_bool(0.05) {
3000 let ignore_dir_path = dirs.choose(rng).unwrap();
3001 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
3002
3003 let (subdirs, subfiles) = read_dir_recursive(ignore_dir_path.clone());
3004 let files_to_ignore = {
3005 let len = rng.gen_range(0..=subfiles.len());
3006 subfiles.choose_multiple(rng, len)
3007 };
3008 let dirs_to_ignore = {
3009 let len = rng.gen_range(0..subdirs.len());
3010 subdirs.choose_multiple(rng, len)
3011 };
3012
3013 let mut ignore_contents = String::new();
3014 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
3015 write!(
3016 ignore_contents,
3017 "{}\n",
3018 path_to_ignore
3019 .strip_prefix(&ignore_dir_path)?
3020 .to_str()
3021 .unwrap()
3022 )
3023 .unwrap();
3024 }
3025 log::info!(
3026 "Creating {:?} with contents:\n{}",
3027 ignore_path.strip_prefix(&root_path)?,
3028 ignore_contents
3029 );
3030 std::fs::write(&ignore_path, ignore_contents).unwrap();
3031 record_event(ignore_path);
3032 } else {
3033 let old_path = {
3034 let file_path = files.choose(rng);
3035 let dir_path = dirs[1..].choose(rng);
3036 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
3037 };
3038
3039 let is_rename = rng.gen();
3040 if is_rename {
3041 let new_path_parent = dirs
3042 .iter()
3043 .filter(|d| !d.starts_with(old_path))
3044 .choose(rng)
3045 .unwrap();
3046
3047 let overwrite_existing_dir =
3048 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
3049 let new_path = if overwrite_existing_dir {
3050 std::fs::remove_dir_all(&new_path_parent).ok();
3051 new_path_parent.to_path_buf()
3052 } else {
3053 new_path_parent.join(gen_name(rng))
3054 };
3055
3056 log::info!(
3057 "Renaming {:?} to {}{:?}",
3058 old_path.strip_prefix(&root_path)?,
3059 if overwrite_existing_dir {
3060 "overwrite "
3061 } else {
3062 ""
3063 },
3064 new_path.strip_prefix(&root_path)?
3065 );
3066 std::fs::rename(&old_path, &new_path)?;
3067 record_event(old_path.clone());
3068 record_event(new_path);
3069 } else if old_path.is_dir() {
3070 let (dirs, files) = read_dir_recursive(old_path.clone());
3071
3072 log::info!("Deleting dir {:?}", old_path.strip_prefix(&root_path)?);
3073 std::fs::remove_dir_all(&old_path).unwrap();
3074 for file in files {
3075 record_event(file);
3076 }
3077 for dir in dirs {
3078 record_event(dir);
3079 }
3080 } else {
3081 log::info!("Deleting file {:?}", old_path.strip_prefix(&root_path)?);
3082 std::fs::remove_file(old_path).unwrap();
3083 record_event(old_path.clone());
3084 }
3085 }
3086
3087 Ok(events)
3088 }
3089
3090 fn read_dir_recursive(path: PathBuf) -> (Vec<PathBuf>, Vec<PathBuf>) {
3091 let child_entries = std::fs::read_dir(&path).unwrap();
3092 let mut dirs = vec![path];
3093 let mut files = Vec::new();
3094 for child_entry in child_entries {
3095 let child_path = child_entry.unwrap().path();
3096 if child_path.is_dir() {
3097 let (child_dirs, child_files) = read_dir_recursive(child_path);
3098 dirs.extend(child_dirs);
3099 files.extend(child_files);
3100 } else {
3101 files.push(child_path);
3102 }
3103 }
3104 (dirs, files)
3105 }
3106
3107 fn gen_name(rng: &mut impl Rng) -> String {
3108 (0..6)
3109 .map(|_| rng.sample(rand::distributions::Alphanumeric))
3110 .map(char::from)
3111 .collect()
3112 }
3113
3114 impl LocalSnapshot {
3115 fn check_invariants(&self) {
3116 let mut files = self.files(true, 0);
3117 let mut visible_files = self.files(false, 0);
3118 for entry in self.entries_by_path.cursor::<()>() {
3119 if entry.is_file() {
3120 assert_eq!(files.next().unwrap().inode, entry.inode);
3121 if !entry.is_ignored {
3122 assert_eq!(visible_files.next().unwrap().inode, entry.inode);
3123 }
3124 }
3125 }
3126 assert!(files.next().is_none());
3127 assert!(visible_files.next().is_none());
3128
3129 let mut bfs_paths = Vec::new();
3130 let mut stack = vec![Path::new("")];
3131 while let Some(path) = stack.pop() {
3132 bfs_paths.push(path);
3133 let ix = stack.len();
3134 for child_entry in self.child_entries(path) {
3135 stack.insert(ix, &child_entry.path);
3136 }
3137 }
3138
3139 let dfs_paths_via_iter = self
3140 .entries_by_path
3141 .cursor::<()>()
3142 .map(|e| e.path.as_ref())
3143 .collect::<Vec<_>>();
3144 assert_eq!(bfs_paths, dfs_paths_via_iter);
3145
3146 let dfs_paths_via_traversal = self
3147 .entries(true)
3148 .map(|e| e.path.as_ref())
3149 .collect::<Vec<_>>();
3150 assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
3151
3152 for (ignore_parent_path, _) in &self.ignores {
3153 assert!(self.entry_for_path(ignore_parent_path).is_some());
3154 assert!(self
3155 .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
3156 .is_some());
3157 }
3158 }
3159
3160 fn to_vec(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
3161 let mut paths = Vec::new();
3162 for entry in self.entries_by_path.cursor::<()>() {
3163 if include_ignored || !entry.is_ignored {
3164 paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
3165 }
3166 }
3167 paths.sort_by(|a, b| a.0.cmp(&b.0));
3168 paths
3169 }
3170 }
3171}