1mod worktree_settings;
2
3use anyhow::Result;
4use encoding_rs;
5use fs::{FakeFs, Fs, PathEventKind, RealFs, RemoveOptions};
6use git::{DOT_GIT, GITIGNORE, REPO_EXCLUDE};
7use gpui::{AppContext as _, BackgroundExecutor, BorrowAppContext, Context, Task, TestAppContext};
8use parking_lot::Mutex;
9use postage::stream::Stream;
10use pretty_assertions::assert_eq;
11use rand::prelude::*;
12use rpc::{AnyProtoClient, NoopProtoClient, proto};
13use worktree::{Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle};
14
15use serde_json::json;
16use settings::{SettingsStore, WorktreeId};
17use std::{
18 cell::Cell,
19 env,
20 fmt::Write,
21 mem,
22 path::{Path, PathBuf},
23 rc::Rc,
24 sync::Arc,
25};
26use util::{
27 ResultExt, path,
28 paths::PathStyle,
29 rel_path::{RelPath, rel_path},
30 test::TempTree,
31};
32
33#[gpui::test]
34async fn test_traversal(cx: &mut TestAppContext) {
35 init_test(cx);
36 let fs = FakeFs::new(cx.background_executor.clone());
37 fs.insert_tree(
38 "/root",
39 json!({
40 ".gitignore": "a/b\n",
41 "a": {
42 "b": "",
43 "c": "",
44 }
45 }),
46 )
47 .await;
48
49 let tree = Worktree::local(
50 Path::new("/root"),
51 true,
52 fs,
53 Default::default(),
54 true,
55 WorktreeId::from_proto(0),
56 &mut cx.to_async(),
57 )
58 .await
59 .unwrap();
60 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
61 .await;
62
63 tree.read_with(cx, |tree, _| {
64 assert_eq!(
65 tree.entries(false, 0)
66 .map(|entry| entry.path.as_ref())
67 .collect::<Vec<_>>(),
68 vec![
69 rel_path(""),
70 rel_path(".gitignore"),
71 rel_path("a"),
72 rel_path("a/c"),
73 ]
74 );
75 assert_eq!(
76 tree.entries(true, 0)
77 .map(|entry| entry.path.as_ref())
78 .collect::<Vec<_>>(),
79 vec![
80 rel_path(""),
81 rel_path(".gitignore"),
82 rel_path("a"),
83 rel_path("a/b"),
84 rel_path("a/c"),
85 ]
86 );
87 })
88}
89
90#[gpui::test(iterations = 10)]
91async fn test_circular_symlinks(cx: &mut TestAppContext) {
92 init_test(cx);
93 let fs = FakeFs::new(cx.background_executor.clone());
94 fs.insert_tree(
95 "/root",
96 json!({
97 "lib": {
98 "a": {
99 "a.txt": ""
100 },
101 "b": {
102 "b.txt": ""
103 }
104 }
105 }),
106 )
107 .await;
108 fs.create_symlink("/root/lib/a/lib".as_ref(), "..".into())
109 .await
110 .unwrap();
111 fs.create_symlink("/root/lib/b/lib".as_ref(), "..".into())
112 .await
113 .unwrap();
114
115 let tree = Worktree::local(
116 Path::new("/root"),
117 true,
118 fs.clone(),
119 Default::default(),
120 true,
121 WorktreeId::from_proto(0),
122 &mut cx.to_async(),
123 )
124 .await
125 .unwrap();
126
127 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
128 .await;
129
130 tree.read_with(cx, |tree, _| {
131 assert_eq!(
132 tree.entries(false, 0)
133 .map(|entry| entry.path.as_ref())
134 .collect::<Vec<_>>(),
135 vec![
136 rel_path(""),
137 rel_path("lib"),
138 rel_path("lib/a"),
139 rel_path("lib/a/a.txt"),
140 rel_path("lib/a/lib"),
141 rel_path("lib/b"),
142 rel_path("lib/b/b.txt"),
143 rel_path("lib/b/lib"),
144 ]
145 );
146 });
147
148 fs.rename(
149 Path::new("/root/lib/a/lib"),
150 Path::new("/root/lib/a/lib-2"),
151 Default::default(),
152 )
153 .await
154 .unwrap();
155 cx.executor().run_until_parked();
156 tree.read_with(cx, |tree, _| {
157 assert_eq!(
158 tree.entries(false, 0)
159 .map(|entry| entry.path.as_ref())
160 .collect::<Vec<_>>(),
161 vec![
162 rel_path(""),
163 rel_path("lib"),
164 rel_path("lib/a"),
165 rel_path("lib/a/a.txt"),
166 rel_path("lib/a/lib-2"),
167 rel_path("lib/b"),
168 rel_path("lib/b/b.txt"),
169 rel_path("lib/b/lib"),
170 ]
171 );
172 });
173}
174
175#[gpui::test]
176async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
177 init_test(cx);
178 let fs = FakeFs::new(cx.background_executor.clone());
179 fs.insert_tree(
180 "/root",
181 json!({
182 "dir1": {
183 "deps": {
184 // symlinks here
185 },
186 "src": {
187 "a.rs": "",
188 "b.rs": "",
189 },
190 },
191 "dir2": {
192 "src": {
193 "c.rs": "",
194 "d.rs": "",
195 }
196 },
197 "dir3": {
198 "deps": {},
199 "src": {
200 "e.rs": "",
201 "f.rs": "",
202 "nested": {
203 "deep.rs": ""
204 }
205 },
206 }
207 }),
208 )
209 .await;
210
211 // These symlinks point to directories outside of the worktree's root, dir1.
212 fs.create_symlink("/root/dir1/deps/dep-dir2".as_ref(), "../../dir2".into())
213 .await
214 .unwrap();
215 fs.create_symlink("/root/dir1/deps/dep-dir3".as_ref(), "../../dir3".into())
216 .await
217 .unwrap();
218 fs.create_symlink(
219 "/root/dir1/deps/dep-dir3-alias".as_ref(),
220 "../../dir3".into(),
221 )
222 .await
223 .unwrap();
224 fs.create_symlink(
225 "/root/dir1/deps/dep-dir3-nested".as_ref(),
226 "../../dir3/src/nested".into(),
227 )
228 .await
229 .unwrap();
230
231 let tree = Worktree::local(
232 Path::new("/root/dir1"),
233 true,
234 fs.clone(),
235 Default::default(),
236 true,
237 WorktreeId::from_proto(0),
238 &mut cx.to_async(),
239 )
240 .await
241 .unwrap();
242
243 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
244 .await;
245
246 let tree_updates = Arc::new(Mutex::new(Vec::new()));
247 tree.update(cx, |_, cx| {
248 let tree_updates = tree_updates.clone();
249 cx.subscribe(&tree, move |_, _, event, _| {
250 if let Event::UpdatedEntries(update) = event {
251 tree_updates.lock().extend(
252 update
253 .iter()
254 .map(|(path, _, change)| (path.clone(), *change)),
255 );
256 }
257 })
258 .detach();
259 });
260
261 // The symlinked directories are not scanned by default.
262 tree.read_with(cx, |tree, _| {
263 assert_eq!(
264 tree.entries(true, 0)
265 .map(|entry| (entry.path.as_ref(), entry.is_external))
266 .collect::<Vec<_>>(),
267 vec![
268 (rel_path(""), false),
269 (rel_path("deps"), false),
270 (rel_path("deps/dep-dir2"), true),
271 (rel_path("deps/dep-dir3"), true),
272 (rel_path("deps/dep-dir3-alias"), true),
273 (rel_path("deps/dep-dir3-nested"), true),
274 (rel_path("src"), false),
275 (rel_path("src/a.rs"), false),
276 (rel_path("src/b.rs"), false),
277 ]
278 );
279
280 assert_eq!(
281 tree.entry_for_path(rel_path("deps/dep-dir2")).unwrap().kind,
282 EntryKind::UnloadedDir
283 );
284 });
285
286 // Expand one of the symlinked directories.
287 tree.read_with(cx, |tree, _| {
288 tree.as_local()
289 .unwrap()
290 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3").into()])
291 })
292 .recv()
293 .await;
294
295 // The expanded directory's contents are loaded. Subdirectories are
296 // not scanned yet.
297 tree.read_with(cx, |tree, _| {
298 assert_eq!(
299 tree.entries(true, 0)
300 .map(|entry| (entry.path.as_ref(), entry.is_external))
301 .collect::<Vec<_>>(),
302 vec![
303 (rel_path(""), false),
304 (rel_path("deps"), false),
305 (rel_path("deps/dep-dir2"), true),
306 (rel_path("deps/dep-dir3"), true),
307 (rel_path("deps/dep-dir3/deps"), true),
308 (rel_path("deps/dep-dir3/src"), true),
309 (rel_path("deps/dep-dir3-alias"), true),
310 (rel_path("deps/dep-dir3-nested"), true),
311 (rel_path("src"), false),
312 (rel_path("src/a.rs"), false),
313 (rel_path("src/b.rs"), false),
314 ]
315 );
316 });
317 assert_eq!(
318 mem::take(&mut *tree_updates.lock()),
319 &[
320 (rel_path("deps/dep-dir3").into(), PathChange::Loaded),
321 (rel_path("deps/dep-dir3/deps").into(), PathChange::Loaded),
322 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded)
323 ]
324 );
325
326 // Expand a subdirectory of one of the symlinked directories.
327 tree.read_with(cx, |tree, _| {
328 tree.as_local()
329 .unwrap()
330 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3/src").into()])
331 })
332 .recv()
333 .await;
334
335 // The expanded subdirectory's contents are loaded.
336 tree.read_with(cx, |tree, _| {
337 assert_eq!(
338 tree.entries(true, 0)
339 .map(|entry| (entry.path.as_ref(), entry.is_external))
340 .collect::<Vec<_>>(),
341 vec![
342 (rel_path(""), false),
343 (rel_path("deps"), false),
344 (rel_path("deps/dep-dir2"), true),
345 (rel_path("deps/dep-dir3"), true),
346 (rel_path("deps/dep-dir3/deps"), true),
347 (rel_path("deps/dep-dir3/src"), true),
348 (rel_path("deps/dep-dir3/src/e.rs"), true),
349 (rel_path("deps/dep-dir3/src/f.rs"), true),
350 (rel_path("deps/dep-dir3/src/nested"), true),
351 (rel_path("deps/dep-dir3-alias"), true),
352 (rel_path("deps/dep-dir3-nested"), true),
353 (rel_path("src"), false),
354 (rel_path("src/a.rs"), false),
355 (rel_path("src/b.rs"), false),
356 ]
357 );
358 });
359
360 assert_eq!(
361 mem::take(&mut *tree_updates.lock()),
362 &[
363 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded),
364 (
365 rel_path("deps/dep-dir3/src/e.rs").into(),
366 PathChange::Loaded
367 ),
368 (
369 rel_path("deps/dep-dir3/src/f.rs").into(),
370 PathChange::Loaded
371 ),
372 (
373 rel_path("deps/dep-dir3/src/nested").into(),
374 PathChange::Loaded
375 )
376 ]
377 );
378
379 // After an external symlink subtree is loaded, changes in the target should be reflected.
380 fs.insert_file(Path::new("/root/dir3/src/new.rs"), b"".to_vec())
381 .await;
382
383 wait_for_condition(cx, |cx| {
384 tree.read_with(cx, |tree, _| {
385 tree.entry_for_path(rel_path("deps/dep-dir3/src/new.rs"))
386 .is_some()
387 })
388 })
389 .await;
390
391 tree.read_with(cx, |tree, _| {
392 assert!(
393 tree.entry_for_path(rel_path("deps/dep-dir3/src/new.rs"))
394 .is_some()
395 );
396 });
397
398 tree.read_with(cx, |tree, _| {
399 tree.as_local()
400 .unwrap()
401 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3-alias").into()])
402 })
403 .recv()
404 .await;
405
406 tree.read_with(cx, |tree, _| {
407 tree.as_local()
408 .unwrap()
409 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3-alias/src").into()])
410 })
411 .recv()
412 .await;
413
414 tree.read_with(cx, |tree, _| {
415 tree.as_local()
416 .unwrap()
417 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3-nested").into()])
418 })
419 .recv()
420 .await;
421 // Create a file in the shared target subtree. Because dep-dir3 and dep-dir3-alias both
422 // point to the same target, both logical paths should observe the new file.
423 fs.insert_file(Path::new("/root/dir3/src/shared-new.rs"), b"".to_vec())
424 .await;
425
426 wait_for_condition(cx, |cx| {
427 tree.read_with(cx, |tree, _| {
428 tree.entry_for_path(rel_path("deps/dep-dir3/src/shared-new.rs"))
429 .is_some()
430 && tree
431 .entry_for_path(rel_path("deps/dep-dir3-alias/src/shared-new.rs"))
432 .is_some()
433 })
434 })
435 .await;
436
437 tree.read_with(cx, |tree, _| {
438 assert!(
439 tree.entry_for_path(rel_path("deps/dep-dir3/src/shared-new.rs"))
440 .is_some()
441 );
442 assert!(
443 tree.entry_for_path(rel_path("deps/dep-dir3-alias/src/shared-new.rs"))
444 .is_some()
445 );
446 });
447
448 // Create a file under the more specific nested target. Longest-prefix matching means this should appear under dep-dir3-nested
449 fs.insert_file(
450 Path::new("/root/dir3/src/nested/longest-prefix.rs"),
451 b"".to_vec(),
452 )
453 .await;
454
455 wait_for_condition(cx, |cx| {
456 tree.read_with(cx, |tree, _| {
457 tree.entry_for_path(rel_path("deps/dep-dir3-nested/longest-prefix.rs"))
458 .is_some()
459 })
460 })
461 .await;
462
463 tree.read_with(cx, |tree, _| {
464 assert!(
465 tree.entry_for_path(rel_path("deps/dep-dir3-nested/longest-prefix.rs"))
466 .is_some()
467 );
468 assert!(
469 tree.entry_for_path(rel_path("deps/dep-dir3/src/nested/longest-prefix.rs"))
470 .is_none()
471 );
472 assert!(
473 tree.entry_for_path(rel_path("deps/dep-dir3-alias/src/nested/longest-prefix.rs"))
474 .is_none()
475 );
476 });
477}
478
479#[gpui::test]
480async fn test_symlinked_dir_inside_project(cx: &mut TestAppContext) {
481 init_test(cx);
482 let fs = FakeFs::new(cx.background_executor.clone());
483
484 fs.insert_tree(
485 "/root",
486 json!({
487 "project": {
488 "real-dir": {
489 "existing.rs": "",
490 "nested": {
491 "deep.rs": ""
492 }
493 },
494 "links": {}
495 }
496 }),
497 )
498 .await;
499
500 fs.create_symlink(
501 "/root/project/links/internal".as_ref(),
502 "../real-dir".into(),
503 )
504 .await
505 .unwrap();
506
507 let tree = Worktree::local(
508 Path::new("/root/project"),
509 true,
510 fs.clone(),
511 Default::default(),
512 true,
513 WorktreeId::from_proto(0),
514 &mut cx.to_async(),
515 )
516 .await
517 .unwrap();
518
519 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
520 .await;
521
522 tree.read_with(cx, |tree, _| {
523 assert_eq!(
524 tree.entries(true, 0)
525 .map(|entry| (entry.path.as_ref(), entry.is_external))
526 .collect::<Vec<_>>(),
527 vec![
528 (rel_path(""), false),
529 (rel_path("links"), false),
530 (rel_path("links/internal"), false),
531 (rel_path("links/internal/existing.rs"), false),
532 (rel_path("links/internal/nested"), false),
533 (rel_path("links/internal/nested/deep.rs"), false),
534 (rel_path("real-dir"), false),
535 (rel_path("real-dir/existing.rs"), false),
536 (rel_path("real-dir/nested"), false),
537 (rel_path("real-dir/nested/deep.rs"), false),
538 ]
539 );
540
541 assert_eq!(
542 tree.entry_for_path(rel_path("links/internal"))
543 .unwrap()
544 .kind,
545 EntryKind::Dir
546 );
547 });
548
549 fs.insert_file(Path::new("/root/project/real-dir/new.txt"), b"".to_vec())
550 .await;
551 wait_for_condition(cx, |cx| {
552 tree.read_with(cx, |tree, _| {
553 tree.entry_for_path(rel_path("links/internal/new.txt"))
554 .is_some()
555 })
556 })
557 .await;
558
559 tree.read_with(cx, |tree, _| {
560 assert!(
561 tree.entry_for_path(rel_path("links/internal/new.txt"))
562 .is_some()
563 );
564 });
565
566 fs.insert_file(
567 Path::new("/root/project/real-dir/nested/inner.txt"),
568 b"".to_vec(),
569 )
570 .await;
571 wait_for_condition(cx, |cx| {
572 tree.read_with(cx, |tree, _| {
573 tree.entry_for_path(rel_path("links/internal/nested/inner.txt"))
574 .is_some()
575 })
576 })
577 .await;
578
579 tree.read_with(cx, |tree, _| {
580 assert!(
581 tree.entry_for_path(rel_path("links/internal/nested/inner.txt"))
582 .is_some()
583 );
584 });
585}
586
587#[cfg(target_os = "macos")]
588#[gpui::test]
589async fn test_renaming_case_only(cx: &mut TestAppContext) {
590 cx.executor().allow_parking();
591 init_test(cx);
592
593 const OLD_NAME: &str = "aaa.rs";
594 const NEW_NAME: &str = "AAA.rs";
595
596 let fs = Arc::new(RealFs::new(None, cx.executor()));
597 let temp_root = TempTree::new(json!({
598 OLD_NAME: "",
599 }));
600
601 let tree = Worktree::local(
602 temp_root.path(),
603 true,
604 fs.clone(),
605 Default::default(),
606 true,
607 WorktreeId::from_proto(0),
608 &mut cx.to_async(),
609 )
610 .await
611 .unwrap();
612
613 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
614 .await;
615 tree.read_with(cx, |tree, _| {
616 assert_eq!(
617 tree.entries(true, 0)
618 .map(|entry| entry.path.as_ref())
619 .collect::<Vec<_>>(),
620 vec![rel_path(""), rel_path(OLD_NAME)]
621 );
622 });
623
624 fs.rename(
625 &temp_root.path().join(OLD_NAME),
626 &temp_root.path().join(NEW_NAME),
627 fs::RenameOptions {
628 overwrite: true,
629 ignore_if_exists: true,
630 create_parents: false,
631 },
632 )
633 .await
634 .unwrap();
635
636 tree.flush_fs_events(cx).await;
637
638 tree.read_with(cx, |tree, _| {
639 assert_eq!(
640 tree.entries(true, 0)
641 .map(|entry| entry.path.as_ref())
642 .collect::<Vec<_>>(),
643 vec![rel_path(""), rel_path(NEW_NAME)]
644 );
645 });
646}
647
648#[gpui::test]
649async fn test_root_rescan_reconciles_stale_state(cx: &mut TestAppContext) {
650 init_test(cx);
651 let fs = FakeFs::new(cx.background_executor.clone());
652 fs.insert_tree(
653 "/root",
654 json!({
655 "old.txt": "",
656 }),
657 )
658 .await;
659
660 let tree = Worktree::local(
661 Path::new("/root"),
662 true,
663 fs.clone(),
664 Default::default(),
665 true,
666 WorktreeId::from_proto(0),
667 &mut cx.to_async(),
668 )
669 .await
670 .unwrap();
671
672 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
673 .await;
674
675 tree.read_with(cx, |tree, _| {
676 assert_eq!(
677 tree.entries(true, 0)
678 .map(|entry| entry.path.as_ref())
679 .collect::<Vec<_>>(),
680 vec![rel_path(""), rel_path("old.txt")]
681 );
682 });
683
684 fs.pause_events();
685 fs.remove_file(Path::new("/root/old.txt"), RemoveOptions::default())
686 .await
687 .unwrap();
688 fs.insert_file(Path::new("/root/new.txt"), Vec::new()).await;
689 assert_eq!(fs.buffered_event_count(), 2);
690 fs.clear_buffered_events();
691
692 tree.read_with(cx, |tree, _| {
693 assert!(tree.entry_for_path(rel_path("old.txt")).is_some());
694 assert!(tree.entry_for_path(rel_path("new.txt")).is_none());
695 });
696
697 fs.emit_fs_event("/root", Some(fs::PathEventKind::Rescan));
698 fs.unpause_events_and_flush();
699 tree.flush_fs_events(cx).await;
700
701 tree.read_with(cx, |tree, _| {
702 assert!(tree.entry_for_path(rel_path("old.txt")).is_none());
703 assert!(tree.entry_for_path(rel_path("new.txt")).is_some());
704 assert_eq!(
705 tree.entries(true, 0)
706 .map(|entry| entry.path.as_ref())
707 .collect::<Vec<_>>(),
708 vec![rel_path(""), rel_path("new.txt")]
709 );
710 });
711}
712
713#[gpui::test]
714async fn test_subtree_rescan_reports_unchanged_descendants_as_updated(cx: &mut TestAppContext) {
715 init_test(cx);
716 let fs = FakeFs::new(cx.background_executor.clone());
717 fs.insert_tree(
718 "/root",
719 json!({
720 "dir": {
721 "child.txt": "",
722 "nested": {
723 "grandchild.txt": "",
724 },
725 "remove": {
726 "removed.txt": "",
727 }
728 },
729 "other.txt": "",
730 }),
731 )
732 .await;
733
734 let tree = Worktree::local(
735 Path::new("/root"),
736 true,
737 fs.clone(),
738 Default::default(),
739 true,
740 WorktreeId::from_proto(0),
741 &mut cx.to_async(),
742 )
743 .await
744 .unwrap();
745
746 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
747 .await;
748
749 let tree_updates = Arc::new(Mutex::new(Vec::new()));
750 tree.update(cx, |_, cx| {
751 let tree_updates = tree_updates.clone();
752 cx.subscribe(&tree, move |_, _, event, _| {
753 if let Event::UpdatedEntries(update) = event {
754 tree_updates.lock().extend(
755 update
756 .iter()
757 .filter(|(path, _, _)| path.as_ref() != rel_path("fs-event-sentinel"))
758 .map(|(path, _, change)| (path.clone(), *change)),
759 );
760 }
761 })
762 .detach();
763 });
764 fs.pause_events();
765 fs.insert_file("/root/dir/new.txt", b"new content".to_vec())
766 .await;
767 fs.remove_dir(
768 "/root/dir/remove".as_ref(),
769 RemoveOptions {
770 recursive: true,
771 ignore_if_not_exists: false,
772 },
773 )
774 .await
775 .unwrap();
776 fs.clear_buffered_events();
777 fs.unpause_events_and_flush();
778
779 fs.emit_fs_event("/root/dir", Some(fs::PathEventKind::Rescan));
780 tree.flush_fs_events(cx).await;
781
782 assert_eq!(
783 mem::take(&mut *tree_updates.lock()),
784 &[
785 (rel_path("dir").into(), PathChange::Updated),
786 (rel_path("dir/child.txt").into(), PathChange::Updated),
787 (rel_path("dir/nested").into(), PathChange::Updated),
788 (
789 rel_path("dir/nested/grandchild.txt").into(),
790 PathChange::Updated
791 ),
792 (rel_path("dir/new.txt").into(), PathChange::Added),
793 (rel_path("dir/remove").into(), PathChange::Removed),
794 (
795 rel_path("dir/remove/removed.txt").into(),
796 PathChange::Removed
797 ),
798 ]
799 );
800
801 tree.read_with(cx, |tree, _| {
802 assert!(tree.entry_for_path(rel_path("other.txt")).is_some());
803 });
804}
805
806#[gpui::test]
807async fn test_open_gitignored_files(cx: &mut TestAppContext) {
808 init_test(cx);
809 let fs = FakeFs::new(cx.background_executor.clone());
810 fs.insert_tree(
811 "/root",
812 json!({
813 ".gitignore": "node_modules\n",
814 "one": {
815 "node_modules": {
816 "a": {
817 "a1.js": "a1",
818 "a2.js": "a2",
819 },
820 "b": {
821 "b1.js": "b1",
822 "b2.js": "b2",
823 },
824 "c": {
825 "c1.js": "c1",
826 "c2.js": "c2",
827 }
828 },
829 },
830 "two": {
831 "x.js": "",
832 "y.js": "",
833 },
834 }),
835 )
836 .await;
837
838 let tree = Worktree::local(
839 Path::new("/root"),
840 true,
841 fs.clone(),
842 Default::default(),
843 true,
844 WorktreeId::from_proto(0),
845 &mut cx.to_async(),
846 )
847 .await
848 .unwrap();
849
850 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
851 .await;
852
853 tree.read_with(cx, |tree, _| {
854 assert_eq!(
855 tree.entries(true, 0)
856 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
857 .collect::<Vec<_>>(),
858 vec![
859 (rel_path(""), false),
860 (rel_path(".gitignore"), false),
861 (rel_path("one"), false),
862 (rel_path("one/node_modules"), true),
863 (rel_path("two"), false),
864 (rel_path("two/x.js"), false),
865 (rel_path("two/y.js"), false),
866 ]
867 );
868 });
869
870 // Open a file that is nested inside of a gitignored directory that
871 // has not yet been expanded.
872 let prev_read_dir_count = fs.read_dir_call_count();
873 let loaded = tree
874 .update(cx, |tree, cx| {
875 tree.load_file(rel_path("one/node_modules/b/b1.js"), cx)
876 })
877 .await
878 .unwrap();
879
880 tree.read_with(cx, |tree, _| {
881 assert_eq!(
882 tree.entries(true, 0)
883 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
884 .collect::<Vec<_>>(),
885 vec![
886 (rel_path(""), false),
887 (rel_path(".gitignore"), false),
888 (rel_path("one"), false),
889 (rel_path("one/node_modules"), true),
890 (rel_path("one/node_modules/a"), true),
891 (rel_path("one/node_modules/b"), true),
892 (rel_path("one/node_modules/b/b1.js"), true),
893 (rel_path("one/node_modules/b/b2.js"), true),
894 (rel_path("one/node_modules/c"), true),
895 (rel_path("two"), false),
896 (rel_path("two/x.js"), false),
897 (rel_path("two/y.js"), false),
898 ]
899 );
900
901 assert_eq!(
902 loaded.file.path.as_ref(),
903 rel_path("one/node_modules/b/b1.js")
904 );
905
906 // Only the newly-expanded directories are scanned.
907 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
908 });
909
910 // Open another file in a different subdirectory of the same
911 // gitignored directory.
912 let prev_read_dir_count = fs.read_dir_call_count();
913 let loaded = tree
914 .update(cx, |tree, cx| {
915 tree.load_file(rel_path("one/node_modules/a/a2.js"), cx)
916 })
917 .await
918 .unwrap();
919
920 tree.read_with(cx, |tree, _| {
921 assert_eq!(
922 tree.entries(true, 0)
923 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
924 .collect::<Vec<_>>(),
925 vec![
926 (rel_path(""), false),
927 (rel_path(".gitignore"), false),
928 (rel_path("one"), false),
929 (rel_path("one/node_modules"), true),
930 (rel_path("one/node_modules/a"), true),
931 (rel_path("one/node_modules/a/a1.js"), true),
932 (rel_path("one/node_modules/a/a2.js"), true),
933 (rel_path("one/node_modules/b"), true),
934 (rel_path("one/node_modules/b/b1.js"), true),
935 (rel_path("one/node_modules/b/b2.js"), true),
936 (rel_path("one/node_modules/c"), true),
937 (rel_path("two"), false),
938 (rel_path("two/x.js"), false),
939 (rel_path("two/y.js"), false),
940 ]
941 );
942
943 assert_eq!(
944 loaded.file.path.as_ref(),
945 rel_path("one/node_modules/a/a2.js")
946 );
947
948 // Only the newly-expanded directory is scanned.
949 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
950 });
951
952 let path = PathBuf::from("/root/one/node_modules/c/lib");
953
954 // No work happens when files and directories change within an unloaded directory.
955 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
956 // When we open a directory, we check each ancestor whether it's a git
957 // repository. That means we have an fs.metadata call per ancestor that we
958 // need to subtract here.
959 let ancestors = path.ancestors().count();
960
961 fs.create_dir(path.as_ref()).await.unwrap();
962 cx.executor().run_until_parked();
963
964 assert_eq!(
965 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count - ancestors,
966 0
967 );
968}
969
970#[gpui::test]
971async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
972 init_test(cx);
973 let fs = FakeFs::new(cx.background_executor.clone());
974 fs.insert_tree(
975 "/root",
976 json!({
977 ".gitignore": "node_modules\n",
978 "a": {
979 "a.js": "",
980 },
981 "b": {
982 "b.js": "",
983 },
984 "node_modules": {
985 "c": {
986 "c.js": "",
987 },
988 "d": {
989 "d.js": "",
990 "e": {
991 "e1.js": "",
992 "e2.js": "",
993 },
994 "f": {
995 "f1.js": "",
996 "f2.js": "",
997 }
998 },
999 },
1000 }),
1001 )
1002 .await;
1003
1004 let tree = Worktree::local(
1005 Path::new("/root"),
1006 true,
1007 fs.clone(),
1008 Default::default(),
1009 true,
1010 WorktreeId::from_proto(0),
1011 &mut cx.to_async(),
1012 )
1013 .await
1014 .unwrap();
1015
1016 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1017 .await;
1018
1019 // Open a file within the gitignored directory, forcing some of its
1020 // subdirectories to be read, but not all.
1021 let read_dir_count_1 = fs.read_dir_call_count();
1022 tree.read_with(cx, |tree, _| {
1023 tree.as_local()
1024 .unwrap()
1025 .refresh_entries_for_paths(vec![rel_path("node_modules/d/d.js").into()])
1026 })
1027 .recv()
1028 .await;
1029
1030 // Those subdirectories are now loaded.
1031 tree.read_with(cx, |tree, _| {
1032 assert_eq!(
1033 tree.entries(true, 0)
1034 .map(|e| (e.path.as_ref(), e.is_ignored))
1035 .collect::<Vec<_>>(),
1036 &[
1037 (rel_path(""), false),
1038 (rel_path(".gitignore"), false),
1039 (rel_path("a"), false),
1040 (rel_path("a/a.js"), false),
1041 (rel_path("b"), false),
1042 (rel_path("b/b.js"), false),
1043 (rel_path("node_modules"), true),
1044 (rel_path("node_modules/c"), true),
1045 (rel_path("node_modules/d"), true),
1046 (rel_path("node_modules/d/d.js"), true),
1047 (rel_path("node_modules/d/e"), true),
1048 (rel_path("node_modules/d/f"), true),
1049 ]
1050 );
1051 });
1052 let read_dir_count_2 = fs.read_dir_call_count();
1053 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
1054
1055 // Update the gitignore so that node_modules is no longer ignored,
1056 // but a subdirectory is ignored
1057 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
1058 .await
1059 .unwrap();
1060 cx.executor().run_until_parked();
1061
1062 // All of the directories that are no longer ignored are now loaded.
1063 tree.read_with(cx, |tree, _| {
1064 assert_eq!(
1065 tree.entries(true, 0)
1066 .map(|e| (e.path.as_ref(), e.is_ignored))
1067 .collect::<Vec<_>>(),
1068 &[
1069 (rel_path(""), false),
1070 (rel_path(".gitignore"), false),
1071 (rel_path("a"), false),
1072 (rel_path("a/a.js"), false),
1073 (rel_path("b"), false),
1074 (rel_path("b/b.js"), false),
1075 // This directory is no longer ignored
1076 (rel_path("node_modules"), false),
1077 (rel_path("node_modules/c"), false),
1078 (rel_path("node_modules/c/c.js"), false),
1079 (rel_path("node_modules/d"), false),
1080 (rel_path("node_modules/d/d.js"), false),
1081 // This subdirectory is now ignored
1082 (rel_path("node_modules/d/e"), true),
1083 (rel_path("node_modules/d/f"), false),
1084 (rel_path("node_modules/d/f/f1.js"), false),
1085 (rel_path("node_modules/d/f/f2.js"), false),
1086 ]
1087 );
1088 });
1089
1090 // Each of the newly-loaded directories is scanned only once.
1091 let read_dir_count_3 = fs.read_dir_call_count();
1092 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
1093}
1094
1095#[gpui::test]
1096async fn test_write_file(cx: &mut TestAppContext) {
1097 init_test(cx);
1098 cx.executor().allow_parking();
1099 let dir = TempTree::new(json!({
1100 ".git": {},
1101 ".gitignore": "ignored-dir\n",
1102 "tracked-dir": {},
1103 "ignored-dir": {}
1104 }));
1105
1106 let worktree = Worktree::local(
1107 dir.path(),
1108 true,
1109 Arc::new(RealFs::new(None, cx.executor())),
1110 Default::default(),
1111 true,
1112 WorktreeId::from_proto(0),
1113 &mut cx.to_async(),
1114 )
1115 .await
1116 .unwrap();
1117
1118 #[cfg(not(target_os = "macos"))]
1119 fs::fs_watcher::global(|_| {}).unwrap();
1120
1121 cx.read(|cx| worktree.read(cx).as_local().unwrap().scan_complete())
1122 .await;
1123 worktree.flush_fs_events(cx).await;
1124
1125 worktree
1126 .update(cx, |tree, cx| {
1127 tree.write_file(
1128 rel_path("tracked-dir/file.txt").into(),
1129 "hello".into(),
1130 Default::default(),
1131 encoding_rs::UTF_8,
1132 false,
1133 cx,
1134 )
1135 })
1136 .await
1137 .unwrap();
1138 worktree
1139 .update(cx, |tree, cx| {
1140 tree.write_file(
1141 rel_path("ignored-dir/file.txt").into(),
1142 "world".into(),
1143 Default::default(),
1144 encoding_rs::UTF_8,
1145 false,
1146 cx,
1147 )
1148 })
1149 .await
1150 .unwrap();
1151 worktree.read_with(cx, |tree, _| {
1152 let tracked = tree
1153 .entry_for_path(rel_path("tracked-dir/file.txt"))
1154 .unwrap();
1155 let ignored = tree
1156 .entry_for_path(rel_path("ignored-dir/file.txt"))
1157 .unwrap();
1158 assert!(!tracked.is_ignored);
1159 assert!(ignored.is_ignored);
1160 });
1161}
1162
1163#[gpui::test]
1164async fn test_file_scan_inclusions(cx: &mut TestAppContext) {
1165 init_test(cx);
1166 cx.executor().allow_parking();
1167 let dir = TempTree::new(json!({
1168 ".gitignore": "**/target\n/node_modules\ntop_level.txt\n",
1169 "target": {
1170 "index": "blah2"
1171 },
1172 "node_modules": {
1173 ".DS_Store": "",
1174 "prettier": {
1175 "package.json": "{}",
1176 },
1177 "package.json": "//package.json"
1178 },
1179 "src": {
1180 ".DS_Store": "",
1181 "foo": {
1182 "foo.rs": "mod another;\n",
1183 "another.rs": "// another",
1184 },
1185 "bar": {
1186 "bar.rs": "// bar",
1187 },
1188 "lib.rs": "mod foo;\nmod bar;\n",
1189 },
1190 "top_level.txt": "top level file",
1191 ".DS_Store": "",
1192 }));
1193 cx.update(|cx| {
1194 cx.update_global::<SettingsStore, _>(|store, cx| {
1195 store.update_user_settings(cx, |settings| {
1196 settings.project.worktree.file_scan_exclusions = Some(vec![]);
1197 settings.project.worktree.file_scan_inclusions = Some(vec![
1198 "node_modules/**/package.json".to_string(),
1199 "**/.DS_Store".to_string(),
1200 ]);
1201 });
1202 });
1203 });
1204
1205 let tree = Worktree::local(
1206 dir.path(),
1207 true,
1208 Arc::new(RealFs::new(None, cx.executor())),
1209 Default::default(),
1210 true,
1211 WorktreeId::from_proto(0),
1212 &mut cx.to_async(),
1213 )
1214 .await
1215 .unwrap();
1216 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1217 .await;
1218 tree.flush_fs_events(cx).await;
1219 tree.read_with(cx, |tree, _| {
1220 // Assert that file_scan_inclusions overrides file_scan_exclusions.
1221 check_worktree_entries(
1222 tree,
1223 &[],
1224 &["target", "node_modules"],
1225 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
1226 &[
1227 "node_modules/prettier/package.json",
1228 ".DS_Store",
1229 "node_modules/.DS_Store",
1230 "src/.DS_Store",
1231 ],
1232 )
1233 });
1234}
1235
1236#[gpui::test]
1237async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext) {
1238 init_test(cx);
1239 cx.executor().allow_parking();
1240 let dir = TempTree::new(json!({
1241 ".gitignore": "**/target\n/node_modules\n",
1242 "target": {
1243 "index": "blah2"
1244 },
1245 "node_modules": {
1246 ".DS_Store": "",
1247 "prettier": {
1248 "package.json": "{}",
1249 },
1250 },
1251 "src": {
1252 ".DS_Store": "",
1253 "foo": {
1254 "foo.rs": "mod another;\n",
1255 "another.rs": "// another",
1256 },
1257 },
1258 ".DS_Store": "",
1259 }));
1260
1261 cx.update(|cx| {
1262 cx.update_global::<SettingsStore, _>(|store, cx| {
1263 store.update_user_settings(cx, |settings| {
1264 settings.project.worktree.file_scan_exclusions =
1265 Some(vec!["**/.DS_Store".to_string()]);
1266 settings.project.worktree.file_scan_inclusions =
1267 Some(vec!["**/.DS_Store".to_string()]);
1268 });
1269 });
1270 });
1271
1272 let tree = Worktree::local(
1273 dir.path(),
1274 true,
1275 Arc::new(RealFs::new(None, cx.executor())),
1276 Default::default(),
1277 true,
1278 WorktreeId::from_proto(0),
1279 &mut cx.to_async(),
1280 )
1281 .await
1282 .unwrap();
1283 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1284 .await;
1285 tree.flush_fs_events(cx).await;
1286 tree.read_with(cx, |tree, _| {
1287 // Assert that file_scan_inclusions overrides file_scan_exclusions.
1288 check_worktree_entries(
1289 tree,
1290 &[".DS_Store, src/.DS_Store"],
1291 &["target", "node_modules"],
1292 &["src/foo/another.rs", "src/foo/foo.rs", ".gitignore"],
1293 &[],
1294 )
1295 });
1296}
1297
1298#[gpui::test]
1299async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppContext) {
1300 init_test(cx);
1301 cx.executor().allow_parking();
1302 let dir = TempTree::new(json!({
1303 ".gitignore": "**/target\n/node_modules/\n",
1304 "target": {
1305 "index": "blah2"
1306 },
1307 "node_modules": {
1308 ".DS_Store": "",
1309 "prettier": {
1310 "package.json": "{}",
1311 },
1312 },
1313 "src": {
1314 ".DS_Store": "",
1315 "foo": {
1316 "foo.rs": "mod another;\n",
1317 "another.rs": "// another",
1318 },
1319 },
1320 ".DS_Store": "",
1321 }));
1322
1323 cx.update(|cx| {
1324 cx.update_global::<SettingsStore, _>(|store, cx| {
1325 store.update_user_settings(cx, |settings| {
1326 settings.project.worktree.file_scan_exclusions = Some(vec![]);
1327 settings.project.worktree.file_scan_inclusions =
1328 Some(vec!["node_modules/**".to_string()]);
1329 });
1330 });
1331 });
1332 let tree = Worktree::local(
1333 dir.path(),
1334 true,
1335 Arc::new(RealFs::new(None, cx.executor())),
1336 Default::default(),
1337 true,
1338 WorktreeId::from_proto(0),
1339 &mut cx.to_async(),
1340 )
1341 .await
1342 .unwrap();
1343 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1344 .await;
1345 tree.flush_fs_events(cx).await;
1346
1347 tree.read_with(cx, |tree, _| {
1348 assert!(
1349 tree.entry_for_path(rel_path("node_modules"))
1350 .is_some_and(|f| f.is_always_included)
1351 );
1352 assert!(
1353 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
1354 .is_some_and(|f| f.is_always_included)
1355 );
1356 });
1357
1358 cx.update(|cx| {
1359 cx.update_global::<SettingsStore, _>(|store, cx| {
1360 store.update_user_settings(cx, |settings| {
1361 settings.project.worktree.file_scan_exclusions = Some(vec![]);
1362 settings.project.worktree.file_scan_inclusions = Some(vec![]);
1363 });
1364 });
1365 });
1366 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1367 .await;
1368 tree.flush_fs_events(cx).await;
1369
1370 tree.read_with(cx, |tree, _| {
1371 assert!(
1372 tree.entry_for_path(rel_path("node_modules"))
1373 .is_some_and(|f| !f.is_always_included)
1374 );
1375 assert!(
1376 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
1377 .is_some_and(|f| !f.is_always_included)
1378 );
1379 });
1380}
1381
1382#[gpui::test]
1383async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
1384 init_test(cx);
1385 cx.executor().allow_parking();
1386 let dir = TempTree::new(json!({
1387 ".gitignore": "**/target\n/node_modules\n",
1388 "target": {
1389 "index": "blah2"
1390 },
1391 "node_modules": {
1392 ".DS_Store": "",
1393 "prettier": {
1394 "package.json": "{}",
1395 },
1396 },
1397 "src": {
1398 ".DS_Store": "",
1399 "foo": {
1400 "foo.rs": "mod another;\n",
1401 "another.rs": "// another",
1402 },
1403 "bar": {
1404 "bar.rs": "// bar",
1405 },
1406 "lib.rs": "mod foo;\nmod bar;\n",
1407 },
1408 ".DS_Store": "",
1409 }));
1410 cx.update(|cx| {
1411 cx.update_global::<SettingsStore, _>(|store, cx| {
1412 store.update_user_settings(cx, |settings| {
1413 settings.project.worktree.file_scan_exclusions =
1414 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
1415 });
1416 });
1417 });
1418
1419 let tree = Worktree::local(
1420 dir.path(),
1421 true,
1422 Arc::new(RealFs::new(None, cx.executor())),
1423 Default::default(),
1424 true,
1425 WorktreeId::from_proto(0),
1426 &mut cx.to_async(),
1427 )
1428 .await
1429 .unwrap();
1430 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1431 .await;
1432 tree.flush_fs_events(cx).await;
1433 tree.read_with(cx, |tree, _| {
1434 check_worktree_entries(
1435 tree,
1436 &[
1437 "src/foo/foo.rs",
1438 "src/foo/another.rs",
1439 "node_modules/.DS_Store",
1440 "src/.DS_Store",
1441 ".DS_Store",
1442 ],
1443 &["target", "node_modules"],
1444 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
1445 &[],
1446 )
1447 });
1448
1449 cx.update(|cx| {
1450 cx.update_global::<SettingsStore, _>(|store, cx| {
1451 store.update_user_settings(cx, |settings| {
1452 settings.project.worktree.file_scan_exclusions =
1453 Some(vec!["**/node_modules/**".to_string()]);
1454 });
1455 });
1456 });
1457 tree.flush_fs_events(cx).await;
1458 cx.executor().run_until_parked();
1459 tree.read_with(cx, |tree, _| {
1460 check_worktree_entries(
1461 tree,
1462 &[
1463 "node_modules/prettier/package.json",
1464 "node_modules/.DS_Store",
1465 "node_modules",
1466 ],
1467 &["target"],
1468 &[
1469 ".gitignore",
1470 "src/lib.rs",
1471 "src/bar/bar.rs",
1472 "src/foo/foo.rs",
1473 "src/foo/another.rs",
1474 "src/.DS_Store",
1475 ".DS_Store",
1476 ],
1477 &[],
1478 )
1479 });
1480}
1481
1482#[gpui::test]
1483async fn test_hidden_files(cx: &mut TestAppContext) {
1484 init_test(cx);
1485 cx.executor().allow_parking();
1486 let dir = TempTree::new(json!({
1487 ".gitignore": "**/target\n",
1488 ".hidden_file": "content",
1489 ".hidden_dir": {
1490 "nested.rs": "code",
1491 },
1492 "src": {
1493 "visible.rs": "code",
1494 },
1495 "logs": {
1496 "app.log": "logs",
1497 "debug.log": "logs",
1498 },
1499 "visible.txt": "content",
1500 }));
1501
1502 let tree = Worktree::local(
1503 dir.path(),
1504 true,
1505 Arc::new(RealFs::new(None, cx.executor())),
1506 Default::default(),
1507 true,
1508 WorktreeId::from_proto(0),
1509 &mut cx.to_async(),
1510 )
1511 .await
1512 .unwrap();
1513 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1514 .await;
1515 tree.flush_fs_events(cx).await;
1516
1517 tree.read_with(cx, |tree, _| {
1518 assert_eq!(
1519 tree.entries(true, 0)
1520 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1521 .collect::<Vec<_>>(),
1522 vec![
1523 (rel_path(""), false),
1524 (rel_path(".gitignore"), true),
1525 (rel_path(".hidden_dir"), true),
1526 (rel_path(".hidden_dir/nested.rs"), true),
1527 (rel_path(".hidden_file"), true),
1528 (rel_path("logs"), false),
1529 (rel_path("logs/app.log"), false),
1530 (rel_path("logs/debug.log"), false),
1531 (rel_path("src"), false),
1532 (rel_path("src/visible.rs"), false),
1533 (rel_path("visible.txt"), false),
1534 ]
1535 );
1536 });
1537
1538 cx.update(|cx| {
1539 cx.update_global::<SettingsStore, _>(|store, cx| {
1540 store.update_user_settings(cx, |settings| {
1541 settings.project.worktree.hidden_files = Some(vec!["**/*.log".to_string()]);
1542 });
1543 });
1544 });
1545 tree.flush_fs_events(cx).await;
1546 cx.executor().run_until_parked();
1547
1548 tree.read_with(cx, |tree, _| {
1549 assert_eq!(
1550 tree.entries(true, 0)
1551 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1552 .collect::<Vec<_>>(),
1553 vec![
1554 (rel_path(""), false),
1555 (rel_path(".gitignore"), false),
1556 (rel_path(".hidden_dir"), false),
1557 (rel_path(".hidden_dir/nested.rs"), false),
1558 (rel_path(".hidden_file"), false),
1559 (rel_path("logs"), false),
1560 (rel_path("logs/app.log"), true),
1561 (rel_path("logs/debug.log"), true),
1562 (rel_path("src"), false),
1563 (rel_path("src/visible.rs"), false),
1564 (rel_path("visible.txt"), false),
1565 ]
1566 );
1567 });
1568}
1569
1570#[gpui::test]
1571async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
1572 init_test(cx);
1573 cx.executor().allow_parking();
1574 let dir = TempTree::new(json!({
1575 ".git": {
1576 "HEAD": "ref: refs/heads/main\n",
1577 "foo": "bar",
1578 },
1579 ".gitignore": "**/target\n/node_modules\ntest_output\n",
1580 "target": {
1581 "index": "blah2"
1582 },
1583 "node_modules": {
1584 ".DS_Store": "",
1585 "prettier": {
1586 "package.json": "{}",
1587 },
1588 },
1589 "src": {
1590 ".DS_Store": "",
1591 "foo": {
1592 "foo.rs": "mod another;\n",
1593 "another.rs": "// another",
1594 },
1595 "bar": {
1596 "bar.rs": "// bar",
1597 },
1598 "lib.rs": "mod foo;\nmod bar;\n",
1599 },
1600 ".DS_Store": "",
1601 }));
1602 cx.update(|cx| {
1603 cx.update_global::<SettingsStore, _>(|store, cx| {
1604 store.update_user_settings(cx, |settings| {
1605 settings.project.worktree.file_scan_exclusions = Some(vec![
1606 "**/.git".to_string(),
1607 "node_modules/".to_string(),
1608 "build_output".to_string(),
1609 ]);
1610 });
1611 });
1612 });
1613
1614 let tree = Worktree::local(
1615 dir.path(),
1616 true,
1617 Arc::new(RealFs::new(None, cx.executor())),
1618 Default::default(),
1619 true,
1620 WorktreeId::from_proto(0),
1621 &mut cx.to_async(),
1622 )
1623 .await
1624 .unwrap();
1625 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1626 .await;
1627 tree.flush_fs_events(cx).await;
1628 tree.read_with(cx, |tree, _| {
1629 check_worktree_entries(
1630 tree,
1631 &[
1632 ".git/HEAD",
1633 ".git/foo",
1634 "node_modules",
1635 "node_modules/.DS_Store",
1636 "node_modules/prettier",
1637 "node_modules/prettier/package.json",
1638 ],
1639 &["target"],
1640 &[
1641 ".DS_Store",
1642 "src/.DS_Store",
1643 "src/lib.rs",
1644 "src/foo/foo.rs",
1645 "src/foo/another.rs",
1646 "src/bar/bar.rs",
1647 ".gitignore",
1648 ],
1649 &[],
1650 )
1651 });
1652
1653 let new_excluded_dir = dir.path().join("build_output");
1654 let new_ignored_dir = dir.path().join("test_output");
1655 std::fs::create_dir_all(&new_excluded_dir)
1656 .unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
1657 std::fs::create_dir_all(&new_ignored_dir)
1658 .unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
1659 let node_modules_dir = dir.path().join("node_modules");
1660 let dot_git_dir = dir.path().join(".git");
1661 let src_dir = dir.path().join("src");
1662 for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
1663 assert!(
1664 existing_dir.is_dir(),
1665 "Expect {existing_dir:?} to be present in the FS already"
1666 );
1667 }
1668
1669 for directory_for_new_file in [
1670 new_excluded_dir,
1671 new_ignored_dir,
1672 node_modules_dir,
1673 dot_git_dir,
1674 src_dir,
1675 ] {
1676 std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
1677 .unwrap_or_else(|e| {
1678 panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
1679 });
1680 }
1681 tree.flush_fs_events(cx).await;
1682
1683 tree.read_with(cx, |tree, _| {
1684 check_worktree_entries(
1685 tree,
1686 &[
1687 ".git/HEAD",
1688 ".git/foo",
1689 ".git/new_file",
1690 "node_modules",
1691 "node_modules/.DS_Store",
1692 "node_modules/prettier",
1693 "node_modules/prettier/package.json",
1694 "node_modules/new_file",
1695 "build_output",
1696 "build_output/new_file",
1697 "test_output/new_file",
1698 ],
1699 &["target", "test_output"],
1700 &[
1701 ".DS_Store",
1702 "src/.DS_Store",
1703 "src/lib.rs",
1704 "src/foo/foo.rs",
1705 "src/foo/another.rs",
1706 "src/bar/bar.rs",
1707 "src/new_file",
1708 ".gitignore",
1709 ],
1710 &[],
1711 )
1712 });
1713}
1714
1715#[gpui::test]
1716async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
1717 init_test(cx);
1718 cx.executor().allow_parking();
1719 let dir = TempTree::new(json!({
1720 ".git": {
1721 "HEAD": "ref: refs/heads/main\n",
1722 "foo": "foo contents",
1723 },
1724 }));
1725 let dot_git_worktree_dir = dir.path().join(".git");
1726
1727 let tree = Worktree::local(
1728 dot_git_worktree_dir.clone(),
1729 true,
1730 Arc::new(RealFs::new(None, cx.executor())),
1731 Default::default(),
1732 true,
1733 WorktreeId::from_proto(0),
1734 &mut cx.to_async(),
1735 )
1736 .await
1737 .unwrap();
1738 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1739 .await;
1740 tree.flush_fs_events(cx).await;
1741 tree.read_with(cx, |tree, _| {
1742 check_worktree_entries(tree, &[], &["HEAD", "foo"], &[], &[])
1743 });
1744
1745 std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents")
1746 .unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}"));
1747 tree.flush_fs_events(cx).await;
1748 tree.read_with(cx, |tree, _| {
1749 check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[], &[])
1750 });
1751}
1752
1753#[gpui::test(iterations = 30)]
1754async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
1755 init_test(cx);
1756 let fs = FakeFs::new(cx.background_executor.clone());
1757 fs.insert_tree(
1758 "/root",
1759 json!({
1760 "b": {},
1761 "c": {},
1762 "d": {},
1763 }),
1764 )
1765 .await;
1766
1767 let tree = Worktree::local(
1768 "/root".as_ref(),
1769 true,
1770 fs,
1771 Default::default(),
1772 true,
1773 WorktreeId::from_proto(0),
1774 &mut cx.to_async(),
1775 )
1776 .await
1777 .unwrap();
1778
1779 let snapshot1 = tree.update(cx, |tree, cx| {
1780 let tree = tree.as_local_mut().unwrap();
1781 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1782 tree.observe_updates(0, cx, {
1783 let snapshot = snapshot.clone();
1784 let settings = tree.settings();
1785 move |update| {
1786 snapshot
1787 .lock()
1788 .apply_remote_update(update, &settings.file_scan_inclusions);
1789 async { true }
1790 }
1791 });
1792 snapshot
1793 });
1794
1795 let entry = tree
1796 .update(cx, |tree, cx| {
1797 tree.as_local_mut()
1798 .unwrap()
1799 .create_entry(rel_path("a/e").into(), true, None, cx)
1800 })
1801 .await
1802 .unwrap()
1803 .into_included()
1804 .unwrap();
1805 assert!(entry.is_dir());
1806
1807 cx.executor().run_until_parked();
1808 tree.read_with(cx, |tree, _| {
1809 assert_eq!(
1810 tree.entry_for_path(rel_path("a/e")).unwrap().kind,
1811 EntryKind::Dir
1812 );
1813 });
1814
1815 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1816 assert_eq!(
1817 snapshot1.lock().entries(true, 0).collect::<Vec<_>>(),
1818 snapshot2.entries(true, 0).collect::<Vec<_>>()
1819 );
1820}
1821
1822#[gpui::test]
1823async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1824 init_test(cx);
1825 cx.executor().allow_parking();
1826
1827 let fs_fake = FakeFs::new(cx.background_executor.clone());
1828 fs_fake
1829 .insert_tree(
1830 "/root",
1831 json!({
1832 "a": {},
1833 }),
1834 )
1835 .await;
1836
1837 let tree_fake = Worktree::local(
1838 "/root".as_ref(),
1839 true,
1840 fs_fake,
1841 Default::default(),
1842 true,
1843 WorktreeId::from_proto(0),
1844 &mut cx.to_async(),
1845 )
1846 .await
1847 .unwrap();
1848
1849 let entry = tree_fake
1850 .update(cx, |tree, cx| {
1851 tree.as_local_mut().unwrap().create_entry(
1852 rel_path("a/b/c/d.txt").into(),
1853 false,
1854 None,
1855 cx,
1856 )
1857 })
1858 .await
1859 .unwrap()
1860 .into_included()
1861 .unwrap();
1862 assert!(entry.is_file());
1863
1864 cx.executor().run_until_parked();
1865 tree_fake.read_with(cx, |tree, _| {
1866 assert!(
1867 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1868 .unwrap()
1869 .is_file()
1870 );
1871 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1872 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1873 });
1874
1875 let fs_real = Arc::new(RealFs::new(None, cx.executor()));
1876 let temp_root = TempTree::new(json!({
1877 "a": {}
1878 }));
1879
1880 let tree_real = Worktree::local(
1881 temp_root.path(),
1882 true,
1883 fs_real,
1884 Default::default(),
1885 true,
1886 WorktreeId::from_proto(0),
1887 &mut cx.to_async(),
1888 )
1889 .await
1890 .unwrap();
1891
1892 let entry = tree_real
1893 .update(cx, |tree, cx| {
1894 tree.as_local_mut().unwrap().create_entry(
1895 rel_path("a/b/c/d.txt").into(),
1896 false,
1897 None,
1898 cx,
1899 )
1900 })
1901 .await
1902 .unwrap()
1903 .into_included()
1904 .unwrap();
1905 assert!(entry.is_file());
1906
1907 cx.executor().run_until_parked();
1908 tree_real.read_with(cx, |tree, _| {
1909 assert!(
1910 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1911 .unwrap()
1912 .is_file()
1913 );
1914 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1915 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1916 });
1917
1918 // Test smallest change
1919 let entry = tree_real
1920 .update(cx, |tree, cx| {
1921 tree.as_local_mut().unwrap().create_entry(
1922 rel_path("a/b/c/e.txt").into(),
1923 false,
1924 None,
1925 cx,
1926 )
1927 })
1928 .await
1929 .unwrap()
1930 .into_included()
1931 .unwrap();
1932 assert!(entry.is_file());
1933
1934 cx.executor().run_until_parked();
1935 tree_real.read_with(cx, |tree, _| {
1936 assert!(
1937 tree.entry_for_path(rel_path("a/b/c/e.txt"))
1938 .unwrap()
1939 .is_file()
1940 );
1941 });
1942
1943 // Test largest change
1944 let entry = tree_real
1945 .update(cx, |tree, cx| {
1946 tree.as_local_mut().unwrap().create_entry(
1947 rel_path("d/e/f/g.txt").into(),
1948 false,
1949 None,
1950 cx,
1951 )
1952 })
1953 .await
1954 .unwrap()
1955 .into_included()
1956 .unwrap();
1957 assert!(entry.is_file());
1958
1959 cx.executor().run_until_parked();
1960 tree_real.read_with(cx, |tree, _| {
1961 assert!(
1962 tree.entry_for_path(rel_path("d/e/f/g.txt"))
1963 .unwrap()
1964 .is_file()
1965 );
1966 assert!(tree.entry_for_path(rel_path("d/e/f")).unwrap().is_dir());
1967 assert!(tree.entry_for_path(rel_path("d/e")).unwrap().is_dir());
1968 assert!(tree.entry_for_path(rel_path("d")).unwrap().is_dir());
1969 });
1970}
1971
1972#[gpui::test]
1973async fn test_create_file_in_expanded_gitignored_dir(cx: &mut TestAppContext) {
1974 // Tests the behavior of our worktree refresh when a file in a gitignored directory
1975 // is created.
1976 init_test(cx);
1977 let fs = FakeFs::new(cx.background_executor.clone());
1978 fs.insert_tree(
1979 "/root",
1980 json!({
1981 ".gitignore": "ignored_dir\n",
1982 "ignored_dir": {
1983 "existing_file.txt": "existing content",
1984 "another_file.txt": "another content",
1985 },
1986 }),
1987 )
1988 .await;
1989
1990 let tree = Worktree::local(
1991 Path::new("/root"),
1992 true,
1993 fs.clone(),
1994 Default::default(),
1995 true,
1996 WorktreeId::from_proto(0),
1997 &mut cx.to_async(),
1998 )
1999 .await
2000 .unwrap();
2001
2002 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2003 .await;
2004
2005 tree.read_with(cx, |tree, _| {
2006 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
2007 assert!(ignored_dir.is_ignored);
2008 assert_eq!(ignored_dir.kind, EntryKind::UnloadedDir);
2009 });
2010
2011 tree.update(cx, |tree, cx| {
2012 tree.load_file(rel_path("ignored_dir/existing_file.txt"), cx)
2013 })
2014 .await
2015 .unwrap();
2016
2017 tree.read_with(cx, |tree, _| {
2018 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
2019 assert!(ignored_dir.is_ignored);
2020 assert_eq!(ignored_dir.kind, EntryKind::Dir);
2021
2022 assert!(
2023 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
2024 .is_some()
2025 );
2026 assert!(
2027 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
2028 .is_some()
2029 );
2030 });
2031
2032 let entry = tree
2033 .update(cx, |tree, cx| {
2034 tree.create_entry(rel_path("ignored_dir/new_file.txt").into(), false, None, cx)
2035 })
2036 .await
2037 .unwrap();
2038 assert!(entry.into_included().is_some());
2039
2040 cx.executor().run_until_parked();
2041
2042 tree.read_with(cx, |tree, _| {
2043 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
2044 assert!(ignored_dir.is_ignored);
2045 assert_eq!(
2046 ignored_dir.kind,
2047 EntryKind::Dir,
2048 "ignored_dir should still be loaded, not UnloadedDir"
2049 );
2050
2051 assert!(
2052 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
2053 .is_some(),
2054 "existing_file.txt should still be visible"
2055 );
2056 assert!(
2057 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
2058 .is_some(),
2059 "another_file.txt should still be visible"
2060 );
2061 assert!(
2062 tree.entry_for_path(rel_path("ignored_dir/new_file.txt"))
2063 .is_some(),
2064 "new_file.txt should be visible"
2065 );
2066 });
2067}
2068
2069#[gpui::test]
2070async fn test_fs_event_for_gitignored_dir_does_not_lose_contents(cx: &mut TestAppContext) {
2071 // Tests the behavior of our worktree refresh when a directory modification for a gitignored directory
2072 // is triggered.
2073 init_test(cx);
2074 let fs = FakeFs::new(cx.background_executor.clone());
2075 fs.insert_tree(
2076 "/root",
2077 json!({
2078 ".gitignore": "ignored_dir\n",
2079 "ignored_dir": {
2080 "file1.txt": "content1",
2081 "file2.txt": "content2",
2082 },
2083 }),
2084 )
2085 .await;
2086
2087 let tree = Worktree::local(
2088 Path::new("/root"),
2089 true,
2090 fs.clone(),
2091 Default::default(),
2092 true,
2093 WorktreeId::from_proto(0),
2094 &mut cx.to_async(),
2095 )
2096 .await
2097 .unwrap();
2098
2099 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2100 .await;
2101
2102 // Load a file to expand the ignored directory
2103 tree.update(cx, |tree, cx| {
2104 tree.load_file(rel_path("ignored_dir/file1.txt"), cx)
2105 })
2106 .await
2107 .unwrap();
2108
2109 tree.read_with(cx, |tree, _| {
2110 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
2111 assert_eq!(ignored_dir.kind, EntryKind::Dir);
2112 assert!(
2113 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
2114 .is_some()
2115 );
2116 assert!(
2117 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
2118 .is_some()
2119 );
2120 });
2121
2122 fs.emit_fs_event("/root/ignored_dir", Some(fs::PathEventKind::Changed));
2123 tree.flush_fs_events(cx).await;
2124
2125 tree.read_with(cx, |tree, _| {
2126 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
2127 assert_eq!(
2128 ignored_dir.kind,
2129 EntryKind::Dir,
2130 "ignored_dir should still be loaded (Dir), not UnloadedDir"
2131 );
2132 assert!(
2133 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
2134 .is_some(),
2135 "file1.txt should still be visible after directory fs event"
2136 );
2137 assert!(
2138 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
2139 .is_some(),
2140 "file2.txt should still be visible after directory fs event"
2141 );
2142 });
2143}
2144
2145#[gpui::test(iterations = 100)]
2146async fn test_random_worktree_operations_during_initial_scan(
2147 cx: &mut TestAppContext,
2148 mut rng: StdRng,
2149) {
2150 init_test(cx);
2151 let operations = env::var("OPERATIONS")
2152 .map(|o| o.parse().unwrap())
2153 .unwrap_or(5);
2154 let initial_entries = env::var("INITIAL_ENTRIES")
2155 .map(|o| o.parse().unwrap())
2156 .unwrap_or(20);
2157
2158 let root_dir = Path::new(path!("/test"));
2159 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
2160 fs.as_fake().insert_tree(root_dir, json!({})).await;
2161 for _ in 0..initial_entries {
2162 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
2163 }
2164 log::info!("generated initial tree");
2165
2166 let worktree = Worktree::local(
2167 root_dir,
2168 true,
2169 fs.clone(),
2170 Default::default(),
2171 true,
2172 WorktreeId::from_proto(0),
2173 &mut cx.to_async(),
2174 )
2175 .await
2176 .unwrap();
2177
2178 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
2179 let updates = Arc::new(Mutex::new(Vec::new()));
2180 worktree.update(cx, |tree, cx| {
2181 check_worktree_change_events(tree, cx);
2182
2183 tree.as_local_mut().unwrap().observe_updates(0, cx, {
2184 let updates = updates.clone();
2185 move |update| {
2186 updates.lock().push(update);
2187 async { true }
2188 }
2189 });
2190 });
2191
2192 for _ in 0..operations {
2193 worktree
2194 .update(cx, |worktree, cx| {
2195 randomly_mutate_worktree(worktree, &mut rng, cx)
2196 })
2197 .await
2198 .log_err();
2199 worktree.read_with(cx, |tree, _| {
2200 tree.as_local().unwrap().snapshot().check_invariants(true)
2201 });
2202
2203 if rng.random_bool(0.6) {
2204 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
2205 }
2206 }
2207
2208 worktree
2209 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
2210 .await;
2211
2212 cx.executor().run_until_parked();
2213
2214 let final_snapshot = worktree.read_with(cx, |tree, _| {
2215 let tree = tree.as_local().unwrap();
2216 let snapshot = tree.snapshot();
2217 snapshot.check_invariants(true);
2218 snapshot
2219 });
2220
2221 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
2222
2223 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
2224 let mut updated_snapshot = snapshot.clone();
2225 for update in updates.lock().iter() {
2226 if update.scan_id >= updated_snapshot.scan_id() as u64 {
2227 updated_snapshot
2228 .apply_remote_update(update.clone(), &settings.file_scan_inclusions);
2229 }
2230 }
2231
2232 assert_eq!(
2233 updated_snapshot.entries(true, 0).collect::<Vec<_>>(),
2234 final_snapshot.entries(true, 0).collect::<Vec<_>>(),
2235 "wrong updates after snapshot {i}: {updates:#?}",
2236 );
2237 }
2238}
2239
2240#[gpui::test(iterations = 100)]
2241async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
2242 init_test(cx);
2243 let operations = env::var("OPERATIONS")
2244 .map(|o| o.parse().unwrap())
2245 .unwrap_or(40);
2246 let initial_entries = env::var("INITIAL_ENTRIES")
2247 .map(|o| o.parse().unwrap())
2248 .unwrap_or(20);
2249
2250 let root_dir = Path::new(path!("/test"));
2251 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
2252 fs.as_fake().insert_tree(root_dir, json!({})).await;
2253 for _ in 0..initial_entries {
2254 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
2255 }
2256 log::info!("generated initial tree");
2257
2258 let worktree = Worktree::local(
2259 root_dir,
2260 true,
2261 fs.clone(),
2262 Default::default(),
2263 true,
2264 WorktreeId::from_proto(0),
2265 &mut cx.to_async(),
2266 )
2267 .await
2268 .unwrap();
2269
2270 let updates = Arc::new(Mutex::new(Vec::new()));
2271 worktree.update(cx, |tree, cx| {
2272 check_worktree_change_events(tree, cx);
2273
2274 tree.as_local_mut().unwrap().observe_updates(0, cx, {
2275 let updates = updates.clone();
2276 move |update| {
2277 updates.lock().push(update);
2278 async { true }
2279 }
2280 });
2281 });
2282
2283 worktree
2284 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
2285 .await;
2286
2287 fs.as_fake().pause_events();
2288 let mut snapshots = Vec::new();
2289 let mut mutations_len = operations;
2290 while mutations_len > 1 {
2291 if rng.random_bool(0.2) {
2292 worktree
2293 .update(cx, |worktree, cx| {
2294 randomly_mutate_worktree(worktree, &mut rng, cx)
2295 })
2296 .await
2297 .log_err();
2298 } else {
2299 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
2300 }
2301
2302 let buffered_event_count = fs.as_fake().buffered_event_count();
2303 if buffered_event_count > 0 && rng.random_bool(0.3) {
2304 let len = rng.random_range(0..=buffered_event_count);
2305 log::info!("flushing {} events", len);
2306 fs.as_fake().flush_events(len);
2307 } else {
2308 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
2309 mutations_len -= 1;
2310 }
2311
2312 cx.executor().run_until_parked();
2313 if rng.random_bool(0.2) {
2314 log::info!("storing snapshot {}", snapshots.len());
2315 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2316 snapshots.push(snapshot);
2317 }
2318 }
2319
2320 log::info!("quiescing");
2321 fs.as_fake().flush_events(usize::MAX);
2322 cx.executor().run_until_parked();
2323
2324 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2325 snapshot.check_invariants(true);
2326 let expanded_paths = snapshot
2327 .expanded_entries()
2328 .map(|e| e.path.clone())
2329 .collect::<Vec<_>>();
2330
2331 {
2332 let new_worktree = Worktree::local(
2333 root_dir,
2334 true,
2335 fs.clone(),
2336 Default::default(),
2337 true,
2338 WorktreeId::from_proto(0),
2339 &mut cx.to_async(),
2340 )
2341 .await
2342 .unwrap();
2343 new_worktree
2344 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
2345 .await;
2346 new_worktree
2347 .update(cx, |tree, _| {
2348 tree.as_local_mut()
2349 .unwrap()
2350 .refresh_entries_for_paths(expanded_paths)
2351 })
2352 .recv()
2353 .await;
2354 let new_snapshot =
2355 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2356 assert_eq!(
2357 snapshot.entries_without_ids(true),
2358 new_snapshot.entries_without_ids(true)
2359 );
2360 }
2361
2362 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
2363
2364 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
2365 for update in updates.lock().iter() {
2366 if update.scan_id >= prev_snapshot.scan_id() as u64 {
2367 prev_snapshot.apply_remote_update(update.clone(), &settings.file_scan_inclusions);
2368 }
2369 }
2370
2371 assert_eq!(
2372 prev_snapshot
2373 .entries(true, 0)
2374 .map(ignore_pending_dir)
2375 .collect::<Vec<_>>(),
2376 snapshot
2377 .entries(true, 0)
2378 .map(ignore_pending_dir)
2379 .collect::<Vec<_>>(),
2380 "wrong updates after snapshot {i}: {updates:#?}",
2381 );
2382 }
2383
2384 fn ignore_pending_dir(entry: &Entry) -> Entry {
2385 let mut entry = entry.clone();
2386 if entry.kind.is_dir() {
2387 entry.kind = EntryKind::Dir
2388 }
2389 entry
2390 }
2391}
2392
2393// The worktree's `UpdatedEntries` event can be used to follow along with
2394// all changes to the worktree's snapshot.
2395fn check_worktree_change_events(tree: &mut Worktree, cx: &mut Context<Worktree>) {
2396 let mut entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
2397 cx.subscribe(&cx.entity(), move |tree, _, event, _| {
2398 if let Event::UpdatedEntries(changes) = event {
2399 for (path, _, change_type) in changes.iter() {
2400 let entry = tree.entry_for_path(path).cloned();
2401 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
2402 Ok(ix) | Err(ix) => ix,
2403 };
2404 match change_type {
2405 PathChange::Added => entries.insert(ix, entry.unwrap()),
2406 PathChange::Removed => drop(entries.remove(ix)),
2407 PathChange::Updated => {
2408 let entry = entry.unwrap();
2409 let existing_entry = entries.get_mut(ix).unwrap();
2410 assert_eq!(existing_entry.path, entry.path);
2411 *existing_entry = entry;
2412 }
2413 PathChange::AddedOrUpdated | PathChange::Loaded => {
2414 let entry = entry.unwrap();
2415 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
2416 *entries.get_mut(ix).unwrap() = entry;
2417 } else {
2418 entries.insert(ix, entry);
2419 }
2420 }
2421 }
2422 }
2423
2424 let new_entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
2425 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
2426 }
2427 })
2428 .detach();
2429}
2430
2431fn randomly_mutate_worktree(
2432 worktree: &mut Worktree,
2433 rng: &mut impl Rng,
2434 cx: &mut Context<Worktree>,
2435) -> Task<Result<()>> {
2436 log::info!("mutating worktree");
2437 let worktree = worktree.as_local_mut().unwrap();
2438 let snapshot = worktree.snapshot();
2439 let entry = snapshot.entries(false, 0).choose(rng).unwrap();
2440
2441 match rng.random_range(0_u32..100) {
2442 0..=33 if entry.path.as_ref() != RelPath::empty() => {
2443 log::info!("deleting entry {:?} ({})", entry.path, entry.id.to_usize());
2444 let task = worktree
2445 .delete_entry(entry.id, false, cx)
2446 .unwrap_or_else(|| Task::ready(Ok(None)));
2447
2448 cx.background_spawn(async move {
2449 task.await?;
2450 Ok(())
2451 })
2452 }
2453 _ => {
2454 if entry.is_dir() {
2455 let child_path = entry.path.join(rel_path(&random_filename(rng)));
2456 let is_dir = rng.random_bool(0.3);
2457 log::info!(
2458 "creating {} at {:?}",
2459 if is_dir { "dir" } else { "file" },
2460 child_path,
2461 );
2462 let task = worktree.create_entry(child_path, is_dir, None, cx);
2463 cx.background_spawn(async move {
2464 task.await?;
2465 Ok(())
2466 })
2467 } else {
2468 log::info!(
2469 "overwriting file {:?} ({})",
2470 &entry.path,
2471 entry.id.to_usize()
2472 );
2473 let task = worktree.write_file(
2474 entry.path.clone(),
2475 "".into(),
2476 Default::default(),
2477 encoding_rs::UTF_8,
2478 false,
2479 cx,
2480 );
2481 cx.background_spawn(async move {
2482 task.await?;
2483 Ok(())
2484 })
2485 }
2486 }
2487 }
2488}
2489
2490async fn randomly_mutate_fs(
2491 fs: &Arc<dyn Fs>,
2492 root_path: &Path,
2493 insertion_probability: f64,
2494 rng: &mut impl Rng,
2495) {
2496 log::info!("mutating fs");
2497 let mut files = Vec::new();
2498 let mut dirs = Vec::new();
2499 for path in fs.as_fake().paths(false) {
2500 if path.starts_with(root_path) {
2501 if fs.is_file(&path).await {
2502 files.push(path);
2503 } else {
2504 dirs.push(path);
2505 }
2506 }
2507 }
2508
2509 if (files.is_empty() && dirs.len() == 1) || rng.random_bool(insertion_probability) {
2510 let path = dirs.choose(rng).unwrap();
2511 let new_path = path.join(random_filename(rng));
2512
2513 if rng.random() {
2514 log::info!(
2515 "creating dir {:?}",
2516 new_path.strip_prefix(root_path).unwrap()
2517 );
2518 fs.create_dir(&new_path).await.unwrap();
2519 } else {
2520 log::info!(
2521 "creating file {:?}",
2522 new_path.strip_prefix(root_path).unwrap()
2523 );
2524 fs.create_file(&new_path, Default::default()).await.unwrap();
2525 }
2526 } else if rng.random_bool(0.05) {
2527 let ignore_dir_path = dirs.choose(rng).unwrap();
2528 let ignore_path = ignore_dir_path.join(GITIGNORE);
2529
2530 let subdirs = dirs
2531 .iter()
2532 .filter(|d| d.starts_with(ignore_dir_path))
2533 .cloned()
2534 .collect::<Vec<_>>();
2535 let subfiles = files
2536 .iter()
2537 .filter(|d| d.starts_with(ignore_dir_path))
2538 .cloned()
2539 .collect::<Vec<_>>();
2540 let files_to_ignore = {
2541 let len = rng.random_range(0..=subfiles.len());
2542 subfiles.choose_multiple(rng, len)
2543 };
2544 let dirs_to_ignore = {
2545 let len = rng.random_range(0..subdirs.len());
2546 subdirs.choose_multiple(rng, len)
2547 };
2548
2549 let mut ignore_contents = String::new();
2550 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
2551 writeln!(
2552 ignore_contents,
2553 "{}",
2554 path_to_ignore
2555 .strip_prefix(ignore_dir_path)
2556 .unwrap()
2557 .to_str()
2558 .unwrap()
2559 )
2560 .unwrap();
2561 }
2562 log::info!(
2563 "creating gitignore {:?} with contents:\n{}",
2564 ignore_path.strip_prefix(root_path).unwrap(),
2565 ignore_contents
2566 );
2567 fs.save(
2568 &ignore_path,
2569 &ignore_contents.as_str().into(),
2570 Default::default(),
2571 )
2572 .await
2573 .unwrap();
2574 } else {
2575 let old_path = {
2576 let file_path = files.choose(rng);
2577 let dir_path = dirs[1..].choose(rng);
2578 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
2579 };
2580
2581 let is_rename = rng.random();
2582 if is_rename {
2583 let new_path_parent = dirs
2584 .iter()
2585 .filter(|d| !d.starts_with(old_path))
2586 .choose(rng)
2587 .unwrap();
2588
2589 let overwrite_existing_dir =
2590 !old_path.starts_with(new_path_parent) && rng.random_bool(0.3);
2591 let new_path = if overwrite_existing_dir {
2592 fs.remove_dir(
2593 new_path_parent,
2594 RemoveOptions {
2595 recursive: true,
2596 ignore_if_not_exists: true,
2597 },
2598 )
2599 .await
2600 .unwrap();
2601 new_path_parent.to_path_buf()
2602 } else {
2603 new_path_parent.join(random_filename(rng))
2604 };
2605
2606 log::info!(
2607 "renaming {:?} to {}{:?}",
2608 old_path.strip_prefix(root_path).unwrap(),
2609 if overwrite_existing_dir {
2610 "overwrite "
2611 } else {
2612 ""
2613 },
2614 new_path.strip_prefix(root_path).unwrap()
2615 );
2616 fs.rename(
2617 old_path,
2618 &new_path,
2619 fs::RenameOptions {
2620 overwrite: true,
2621 ignore_if_exists: true,
2622 create_parents: false,
2623 },
2624 )
2625 .await
2626 .unwrap();
2627 } else if fs.is_file(old_path).await {
2628 log::info!(
2629 "deleting file {:?}",
2630 old_path.strip_prefix(root_path).unwrap()
2631 );
2632 fs.remove_file(old_path, Default::default()).await.unwrap();
2633 } else {
2634 log::info!(
2635 "deleting dir {:?}",
2636 old_path.strip_prefix(root_path).unwrap()
2637 );
2638 fs.remove_dir(
2639 old_path,
2640 RemoveOptions {
2641 recursive: true,
2642 ignore_if_not_exists: true,
2643 },
2644 )
2645 .await
2646 .unwrap();
2647 }
2648 }
2649}
2650
2651fn random_filename(rng: &mut impl Rng) -> String {
2652 (0..6)
2653 .map(|_| rng.sample(rand::distr::Alphanumeric))
2654 .map(char::from)
2655 .collect()
2656}
2657
2658#[gpui::test]
2659async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
2660 init_test(cx);
2661 let fs = FakeFs::new(cx.background_executor.clone());
2662 fs.insert_tree("/", json!({".env": "PRIVATE=secret\n"}))
2663 .await;
2664 let tree = Worktree::local(
2665 Path::new("/.env"),
2666 true,
2667 fs.clone(),
2668 Default::default(),
2669 true,
2670 WorktreeId::from_proto(0),
2671 &mut cx.to_async(),
2672 )
2673 .await
2674 .unwrap();
2675 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2676 .await;
2677 tree.read_with(cx, |tree, _| {
2678 let entry = tree.entry_for_path(rel_path("")).unwrap();
2679 assert!(entry.is_private);
2680 });
2681}
2682
2683#[gpui::test]
2684async fn test_repository_above_root(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2685 init_test(cx);
2686
2687 let fs = FakeFs::new(executor);
2688 fs.insert_tree(
2689 path!("/root"),
2690 json!({
2691 ".git": {},
2692 "subproject": {
2693 "a.txt": "A"
2694 }
2695 }),
2696 )
2697 .await;
2698 let worktree = Worktree::local(
2699 path!("/root/subproject").as_ref(),
2700 true,
2701 fs.clone(),
2702 Arc::default(),
2703 true,
2704 WorktreeId::from_proto(0),
2705 &mut cx.to_async(),
2706 )
2707 .await
2708 .unwrap();
2709 worktree
2710 .update(cx, |worktree, _| {
2711 worktree.as_local().unwrap().scan_complete()
2712 })
2713 .await;
2714 cx.run_until_parked();
2715 let repos = worktree.update(cx, |worktree, _| {
2716 worktree.as_local().unwrap().repositories()
2717 });
2718 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2719
2720 fs.touch_path(path!("/root/subproject")).await;
2721 worktree
2722 .update(cx, |worktree, _| {
2723 worktree.as_local().unwrap().scan_complete()
2724 })
2725 .await;
2726 cx.run_until_parked();
2727
2728 let repos = worktree.update(cx, |worktree, _| {
2729 worktree.as_local().unwrap().repositories()
2730 });
2731 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2732}
2733
2734#[gpui::test]
2735async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2736 init_test(cx);
2737
2738 let home = paths::home_dir();
2739 let fs = FakeFs::new(executor);
2740 fs.insert_tree(
2741 home,
2742 json!({
2743 ".config": {
2744 "git": {
2745 "ignore": "foo\n/bar\nbaz\n"
2746 }
2747 },
2748 "project": {
2749 ".git": {},
2750 ".gitignore": "!baz",
2751 "foo": "",
2752 "bar": "",
2753 "sub": {
2754 "bar": "",
2755 },
2756 "subrepo": {
2757 ".git": {},
2758 "bar": ""
2759 },
2760 "baz": ""
2761 }
2762 }),
2763 )
2764 .await;
2765 let worktree = Worktree::local(
2766 home.join("project"),
2767 true,
2768 fs.clone(),
2769 Arc::default(),
2770 true,
2771 WorktreeId::from_proto(0),
2772 &mut cx.to_async(),
2773 )
2774 .await
2775 .unwrap();
2776 worktree
2777 .update(cx, |worktree, _| {
2778 worktree.as_local().unwrap().scan_complete()
2779 })
2780 .await;
2781 cx.run_until_parked();
2782
2783 // .gitignore overrides excludesFile, and anchored paths in excludesFile are resolved
2784 // relative to the nearest containing repository
2785 worktree.update(cx, |worktree, _cx| {
2786 check_worktree_entries(
2787 worktree,
2788 &[],
2789 &["foo", "bar", "subrepo/bar"],
2790 &["sub/bar", "baz"],
2791 &[],
2792 );
2793 });
2794
2795 // Ignore statuses are updated when excludesFile changes
2796 fs.write(
2797 &home.join(".config").join("git").join("ignore"),
2798 "/bar\nbaz\n".as_bytes(),
2799 )
2800 .await
2801 .unwrap();
2802 worktree
2803 .update(cx, |worktree, _| {
2804 worktree.as_local().unwrap().scan_complete()
2805 })
2806 .await;
2807 cx.run_until_parked();
2808
2809 worktree.update(cx, |worktree, _cx| {
2810 check_worktree_entries(
2811 worktree,
2812 &[],
2813 &["bar", "subrepo/bar"],
2814 &["foo", "sub/bar", "baz"],
2815 &[],
2816 );
2817 });
2818
2819 // Statuses are updated when .git added/removed
2820 fs.remove_dir(
2821 &home.join("project").join("subrepo").join(".git"),
2822 RemoveOptions {
2823 recursive: true,
2824 ..Default::default()
2825 },
2826 )
2827 .await
2828 .unwrap();
2829 worktree
2830 .update(cx, |worktree, _| {
2831 worktree.as_local().unwrap().scan_complete()
2832 })
2833 .await;
2834 cx.run_until_parked();
2835
2836 worktree.update(cx, |worktree, _cx| {
2837 check_worktree_entries(
2838 worktree,
2839 &[],
2840 &["bar"],
2841 &["foo", "sub/bar", "baz", "subrepo/bar"],
2842 &[],
2843 );
2844 });
2845}
2846
2847#[gpui::test]
2848async fn test_repo_exclude(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2849 init_test(cx);
2850
2851 let fs = FakeFs::new(executor);
2852 let project_dir = Path::new(path!("/project"));
2853 fs.insert_tree(
2854 project_dir,
2855 json!({
2856 ".git": {
2857 "info": {
2858 "exclude": ".env.*"
2859 }
2860 },
2861 ".env.example": "secret=xxxx",
2862 ".env.local": "secret=1234",
2863 ".gitignore": "!.env.example",
2864 "README.md": "# Repo Exclude",
2865 "src": {
2866 "main.rs": "fn main() {}",
2867 },
2868 }),
2869 )
2870 .await;
2871
2872 let worktree = Worktree::local(
2873 project_dir,
2874 true,
2875 fs.clone(),
2876 Default::default(),
2877 true,
2878 WorktreeId::from_proto(0),
2879 &mut cx.to_async(),
2880 )
2881 .await
2882 .unwrap();
2883 worktree
2884 .update(cx, |worktree, _| {
2885 worktree.as_local().unwrap().scan_complete()
2886 })
2887 .await;
2888 cx.run_until_parked();
2889
2890 // .gitignore overrides .git/info/exclude
2891 worktree.update(cx, |worktree, _cx| {
2892 let expected_excluded_paths = [];
2893 let expected_ignored_paths = [".env.local"];
2894 let expected_tracked_paths = [".env.example", "README.md", "src/main.rs"];
2895 let expected_included_paths = [];
2896
2897 check_worktree_entries(
2898 worktree,
2899 &expected_excluded_paths,
2900 &expected_ignored_paths,
2901 &expected_tracked_paths,
2902 &expected_included_paths,
2903 );
2904 });
2905
2906 // Ignore statuses are updated when .git/info/exclude file changes
2907 fs.write(
2908 &project_dir.join(DOT_GIT).join(REPO_EXCLUDE),
2909 ".env.example".as_bytes(),
2910 )
2911 .await
2912 .unwrap();
2913 worktree
2914 .update(cx, |worktree, _| {
2915 worktree.as_local().unwrap().scan_complete()
2916 })
2917 .await;
2918 cx.run_until_parked();
2919
2920 worktree.update(cx, |worktree, _cx| {
2921 let expected_excluded_paths = [];
2922 let expected_ignored_paths = [];
2923 let expected_tracked_paths = [".env.example", ".env.local", "README.md", "src/main.rs"];
2924 let expected_included_paths = [];
2925
2926 check_worktree_entries(
2927 worktree,
2928 &expected_excluded_paths,
2929 &expected_ignored_paths,
2930 &expected_tracked_paths,
2931 &expected_included_paths,
2932 );
2933 });
2934}
2935
2936#[track_caller]
2937fn check_worktree_entries(
2938 tree: &Worktree,
2939 expected_excluded_paths: &[&str],
2940 expected_ignored_paths: &[&str],
2941 expected_tracked_paths: &[&str],
2942 expected_included_paths: &[&str],
2943) {
2944 for path in expected_excluded_paths {
2945 let entry = tree.entry_for_path(rel_path(path));
2946 assert!(
2947 entry.is_none(),
2948 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2949 );
2950 }
2951 for path in expected_ignored_paths {
2952 let entry = tree
2953 .entry_for_path(rel_path(path))
2954 .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
2955 assert!(
2956 entry.is_ignored,
2957 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2958 );
2959 }
2960 for path in expected_tracked_paths {
2961 let entry = tree
2962 .entry_for_path(rel_path(path))
2963 .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
2964 assert!(
2965 !entry.is_ignored || entry.is_always_included,
2966 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2967 );
2968 }
2969 for path in expected_included_paths {
2970 let entry = tree
2971 .entry_for_path(rel_path(path))
2972 .unwrap_or_else(|| panic!("Missing entry for expected included path '{path}'"));
2973 assert!(
2974 entry.is_always_included,
2975 "expected path '{path}' to always be included, but got entry: {entry:?}",
2976 );
2977 }
2978}
2979
2980#[gpui::test]
2981async fn test_root_repo_common_dir(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2982 init_test(cx);
2983
2984 use git::repository::Worktree as GitWorktree;
2985
2986 let fs = FakeFs::new(executor);
2987
2988 // Set up a main repo and a linked worktree pointing back to it.
2989 fs.insert_tree(
2990 path!("/main_repo"),
2991 json!({
2992 ".git": {},
2993 "file.txt": "content",
2994 }),
2995 )
2996 .await;
2997 fs.add_linked_worktree_for_repo(
2998 Path::new(path!("/main_repo/.git")),
2999 false,
3000 GitWorktree {
3001 path: PathBuf::from(path!("/linked_worktree")),
3002 ref_name: Some("refs/heads/feature".into()),
3003 sha: "abc123".into(),
3004 is_main: false,
3005 is_bare: false,
3006 },
3007 )
3008 .await;
3009 fs.write(
3010 path!("/linked_worktree/file.txt").as_ref(),
3011 "content".as_bytes(),
3012 )
3013 .await
3014 .unwrap();
3015
3016 let tree = Worktree::local(
3017 path!("/linked_worktree").as_ref(),
3018 true,
3019 fs.clone(),
3020 Arc::default(),
3021 true,
3022 WorktreeId::from_proto(0),
3023 &mut cx.to_async(),
3024 )
3025 .await
3026 .unwrap();
3027 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
3028 .await;
3029 cx.run_until_parked();
3030
3031 // For a linked worktree, root_repo_common_dir should point to the
3032 // main repo's .git, not the worktree-specific git directory.
3033 tree.read_with(cx, |tree, _| {
3034 assert_eq!(
3035 tree.snapshot().root_repo_common_dir().map(|p| p.as_ref()),
3036 Some(Path::new(path!("/main_repo/.git"))),
3037 );
3038 });
3039
3040 let event_count: Rc<Cell<usize>> = Rc::new(Cell::new(0));
3041 tree.update(cx, {
3042 let event_count = event_count.clone();
3043 |_, cx| {
3044 cx.subscribe(&cx.entity(), move |_, _, event, _| {
3045 if matches!(event, Event::UpdatedRootRepoCommonDir { .. }) {
3046 event_count.set(event_count.get() + 1);
3047 }
3048 })
3049 .detach();
3050 }
3051 });
3052
3053 // Remove .git — root_repo_common_dir should become None.
3054 fs.remove_file(
3055 &PathBuf::from(path!("/linked_worktree/.git")),
3056 Default::default(),
3057 )
3058 .await
3059 .unwrap();
3060 tree.flush_fs_events(cx).await;
3061
3062 tree.read_with(cx, |tree, _| {
3063 assert_eq!(tree.snapshot().root_repo_common_dir(), None);
3064 });
3065 assert_eq!(
3066 event_count.get(),
3067 1,
3068 "should have emitted UpdatedRootRepoCommonDir on removal"
3069 );
3070}
3071
3072#[gpui::test]
3073async fn test_linked_worktree_git_file_event_does_not_panic(
3074 executor: BackgroundExecutor,
3075 cx: &mut TestAppContext,
3076) {
3077 // Regression test: in a linked worktree, `.git` is a file (containing
3078 // "gitdir: ..."), not a directory. When the background scanner receives
3079 // a filesystem event for a path inside the main repo's `.git` directory
3080 // (which it watches via the commondir), the ancestor-walking code in
3081 // `process_events` calls `is_git_dir` on each ancestor. If `is_git_dir`
3082 // treats `.git` files the same as `.git` directories, it incorrectly
3083 // identifies the gitfile as a git dir, adds it to `dot_git_abs_paths`,
3084 // and `update_git_repositories` panics because the path is outside the
3085 // worktree root.
3086 init_test(cx);
3087
3088 use git::repository::Worktree as GitWorktree;
3089
3090 let fs = FakeFs::new(executor);
3091
3092 fs.insert_tree(
3093 path!("/main_repo"),
3094 json!({
3095 ".git": {},
3096 "file.txt": "content",
3097 }),
3098 )
3099 .await;
3100 fs.add_linked_worktree_for_repo(
3101 Path::new(path!("/main_repo/.git")),
3102 false,
3103 GitWorktree {
3104 path: PathBuf::from(path!("/linked_worktree")),
3105 ref_name: Some("refs/heads/feature".into()),
3106 sha: "abc123".into(),
3107 is_main: false,
3108 is_bare: false,
3109 },
3110 )
3111 .await;
3112 fs.write(
3113 path!("/linked_worktree/file.txt").as_ref(),
3114 "content".as_bytes(),
3115 )
3116 .await
3117 .unwrap();
3118
3119 let tree = Worktree::local(
3120 path!("/linked_worktree").as_ref(),
3121 true,
3122 fs.clone(),
3123 Arc::default(),
3124 true,
3125 WorktreeId::from_proto(0),
3126 &mut cx.to_async(),
3127 )
3128 .await
3129 .unwrap();
3130 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
3131 .await;
3132 cx.run_until_parked();
3133
3134 // Trigger a filesystem event inside the main repo's .git directory
3135 // (which the linked worktree scanner watches via the commondir). This
3136 // uses the sentinel-file helper to ensure the event goes through the
3137 // real watcher path, exactly as it would in production.
3138 tree.flush_fs_events_in_root_git_repository(cx).await;
3139
3140 // The worktree should still be intact.
3141 tree.read_with(cx, |tree, _| {
3142 assert_eq!(
3143 tree.snapshot().root_repo_common_dir().map(|p| p.as_ref()),
3144 Some(Path::new(path!("/main_repo/.git"))),
3145 );
3146 });
3147}
3148
3149#[gpui::test]
3150async fn test_linked_worktree_event_in_unregistered_common_git_dir_does_not_panic(
3151 executor: BackgroundExecutor,
3152 cx: &mut TestAppContext,
3153) {
3154 // Regression test: a rescan event on a linked worktree's commondir
3155 // must not panic when the worktree's repository has already been
3156 // unregistered from `git_repositories`.
3157 init_test(cx);
3158
3159 use git::repository::Worktree as GitWorktree;
3160
3161 let fs = FakeFs::new(executor);
3162
3163 fs.insert_tree(
3164 path!("/main_repo"),
3165 json!({
3166 ".git": {},
3167 "file.txt": "content",
3168 }),
3169 )
3170 .await;
3171 fs.add_linked_worktree_for_repo(
3172 Path::new(path!("/main_repo/.git")),
3173 false,
3174 GitWorktree {
3175 path: PathBuf::from(path!("/linked_worktree")),
3176 ref_name: Some("refs/heads/feature".into()),
3177 sha: "abc123".into(),
3178 is_main: false,
3179 is_bare: false,
3180 },
3181 )
3182 .await;
3183 fs.write(
3184 path!("/linked_worktree/file.txt").as_ref(),
3185 "content".as_bytes(),
3186 )
3187 .await
3188 .unwrap();
3189
3190 let tree = Worktree::local(
3191 path!("/linked_worktree").as_ref(),
3192 true,
3193 fs.clone(),
3194 Arc::default(),
3195 true,
3196 WorktreeId::from_proto(0),
3197 &mut cx.to_async(),
3198 )
3199 .await
3200 .unwrap();
3201 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
3202 .await;
3203 cx.run_until_parked();
3204
3205 // Unregister the linked worktree's repository by removing its gitfile.
3206 fs.remove_file(
3207 Path::new(path!("/linked_worktree/.git")),
3208 Default::default(),
3209 )
3210 .await
3211 .unwrap();
3212 tree.flush_fs_events(cx).await;
3213
3214 // Deliver the kind of Rescan event `FsWatcher` emits when the kernel
3215 // signals `need_rescan` for the commondir.
3216 fs.emit_fs_event(path!("/main_repo/.git"), Some(fs::PathEventKind::Rescan));
3217 cx.run_until_parked();
3218 tree.flush_fs_events(cx).await;
3219}
3220
3221#[gpui::test]
3222async fn test_dot_git_dir_event_does_not_suppress_children(
3223 executor: BackgroundExecutor,
3224 cx: &mut TestAppContext,
3225) {
3226 // On Windows, modifying a file inside .git causes ReadDirectoryChangesW to also emit
3227 // a Modify event for the .git directory itself (because its last-write timestamp changes).
3228 // When these events arrive in the same batch, a naive ancestor-based dedup would collapse
3229 // all child events into the .git directory event, losing the information about which
3230 // specific files changed. This test verifies that the git-related event processing happens
3231 // before the dedup, so that meaningful .git child events still trigger UpdatedGitRepositories.
3232 init_test(cx);
3233
3234 let fs = FakeFs::new(executor.clone());
3235 let project_dir = Path::new(path!("/project"));
3236 fs.insert_tree(
3237 project_dir,
3238 json!({
3239 ".git": {},
3240 "src": {
3241 "main.rs": "fn main() {}",
3242 },
3243 }),
3244 )
3245 .await;
3246
3247 let worktree = Worktree::local(
3248 project_dir,
3249 true,
3250 fs.clone(),
3251 Default::default(),
3252 true,
3253 WorktreeId::from_proto(0),
3254 &mut cx.to_async(),
3255 )
3256 .await
3257 .unwrap();
3258 worktree
3259 .update(cx, |worktree, _| {
3260 worktree.as_local().unwrap().scan_complete()
3261 })
3262 .await;
3263 cx.run_until_parked();
3264
3265 let dot_git = project_dir.join(DOT_GIT);
3266
3267 // Case 1: Events for .git AND .git/index.lock should NOT emit UpdatedGitRepositories
3268 // (index.lock is in the skipped files list)
3269 {
3270 let mut events = cx.events(&worktree);
3271 fs.pause_events();
3272 fs.emit_fs_event(dot_git.clone(), Some(PathEventKind::Changed));
3273 fs.emit_fs_event(dot_git.join("index.lock"), Some(PathEventKind::Created));
3274 fs.unpause_events_and_flush();
3275 executor.run_until_parked();
3276
3277 let got_git_update = drain_git_repo_updates(&mut events);
3278 assert!(
3279 !got_git_update,
3280 "should NOT emit UpdatedGitRepositories when .git batch only contains index.lock"
3281 );
3282 }
3283
3284 // Case 2: Event for just .git (bare directory event) should NOT emit UpdatedGitRepositories
3285 {
3286 let mut events = cx.events(&worktree);
3287 fs.pause_events();
3288 fs.emit_fs_event(dot_git.clone(), Some(PathEventKind::Changed));
3289 fs.unpause_events_and_flush();
3290 executor.run_until_parked();
3291
3292 let got_git_update = drain_git_repo_updates(&mut events);
3293 assert!(
3294 !got_git_update,
3295 "should NOT emit UpdatedGitRepositories for a bare .git directory event"
3296 );
3297 }
3298
3299 // Case 3: Events for .git AND .git/index should emit UpdatedGitRepositories
3300 {
3301 let mut events = cx.events(&worktree);
3302 fs.pause_events();
3303 fs.emit_fs_event(dot_git.clone(), Some(PathEventKind::Changed));
3304 fs.emit_fs_event(dot_git.join("index"), Some(PathEventKind::Changed));
3305 fs.unpause_events_and_flush();
3306 executor.run_until_parked();
3307
3308 let got_git_update = drain_git_repo_updates(&mut events);
3309 assert!(
3310 got_git_update,
3311 "should emit UpdatedGitRepositories when .git batch contains index"
3312 );
3313 }
3314
3315 // Case 4: Event for .git/index only should emit UpdatedGitRepositories
3316 {
3317 let mut events = cx.events(&worktree);
3318 fs.pause_events();
3319 fs.emit_fs_event(dot_git.join("index"), Some(PathEventKind::Changed));
3320 fs.unpause_events_and_flush();
3321 executor.run_until_parked();
3322
3323 let got_git_update = drain_git_repo_updates(&mut events);
3324 assert!(
3325 got_git_update,
3326 "should emit UpdatedGitRepositories for a .git/index event"
3327 );
3328 }
3329}
3330
3331fn drain_git_repo_updates(events: &mut futures::channel::mpsc::UnboundedReceiver<Event>) -> bool {
3332 let mut found = false;
3333 while let Ok(event) = events.try_recv() {
3334 if matches!(event, Event::UpdatedGitRepositories(_)) {
3335 found = true;
3336 }
3337 }
3338 found
3339}
3340
3341fn init_test(cx: &mut gpui::TestAppContext) {
3342 zlog::init_test();
3343
3344 cx.update(|cx| {
3345 let settings_store = SettingsStore::test(cx);
3346 cx.set_global(settings_store);
3347 });
3348}
3349
3350async fn wait_for_condition(
3351 cx: &mut TestAppContext,
3352 mut condition: impl FnMut(&mut TestAppContext) -> bool,
3353) {
3354 for _ in 0..50 {
3355 if condition(cx) {
3356 return;
3357 }
3358 cx.executor().run_until_parked();
3359 cx.background_executor
3360 .timer(std::time::Duration::from_millis(10))
3361 .await;
3362 }
3363 panic!("timed out waiting for test condition");
3364}
3365
3366#[gpui::test]
3367async fn test_load_file_encoding(cx: &mut TestAppContext) {
3368 init_test(cx);
3369
3370 struct TestCase {
3371 name: &'static str,
3372 bytes: Vec<u8>,
3373 expected_text: &'static str,
3374 }
3375
3376 // --- Success Cases ---
3377 let success_cases = vec![
3378 TestCase {
3379 name: "utf8.txt",
3380 bytes: "こんにちは".as_bytes().to_vec(),
3381 expected_text: "こんにちは",
3382 },
3383 TestCase {
3384 name: "sjis.txt",
3385 bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
3386 expected_text: "こんにちは",
3387 },
3388 TestCase {
3389 name: "eucjp.txt",
3390 bytes: vec![0xa4, 0xb3, 0xa4, 0xf3, 0xa4, 0xcb, 0xa4, 0xc1, 0xa4, 0xcf],
3391 expected_text: "こんにちは",
3392 },
3393 TestCase {
3394 name: "iso2022jp.txt",
3395 bytes: vec![
3396 0x1b, 0x24, 0x42, 0x24, 0x33, 0x24, 0x73, 0x24, 0x4b, 0x24, 0x41, 0x24, 0x4f, 0x1b,
3397 0x28, 0x42,
3398 ],
3399 expected_text: "こんにちは",
3400 },
3401 TestCase {
3402 name: "win1252.txt",
3403 bytes: vec![0x43, 0x61, 0x66, 0xe9],
3404 expected_text: "Café",
3405 },
3406 TestCase {
3407 name: "gbk.txt",
3408 bytes: vec![
3409 0xbd, 0xf1, 0xcc, 0xec, 0xcc, 0xec, 0xc6, 0xf8, 0xb2, 0xbb, 0xb4, 0xed,
3410 ],
3411 expected_text: "今天天气不错",
3412 },
3413 // UTF-16LE with BOM
3414 TestCase {
3415 name: "utf16le_bom.txt",
3416 bytes: vec![
3417 0xFF, 0xFE, // BOM
3418 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F, 0x30,
3419 ],
3420 expected_text: "こんにちは",
3421 },
3422 // UTF-16BE with BOM
3423 TestCase {
3424 name: "utf16be_bom.txt",
3425 bytes: vec![
3426 0xFE, 0xFF, // BOM
3427 0x30, 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F,
3428 ],
3429 expected_text: "こんにちは",
3430 },
3431 // UTF-16LE without BOM (ASCII only)
3432 // This relies on the "null byte heuristic" we implemented.
3433 // "ABC" -> 41 00 42 00 43 00
3434 TestCase {
3435 name: "utf16le_ascii_no_bom.txt",
3436 bytes: vec![0x41, 0x00, 0x42, 0x00, 0x43, 0x00],
3437 expected_text: "ABC",
3438 },
3439 ];
3440
3441 // --- Failure Cases ---
3442 let failure_cases = vec![
3443 // Binary File (Should be detected by heuristic and return Error)
3444 // Contains random bytes and mixed nulls that don't match UTF-16 patterns
3445 TestCase {
3446 name: "binary.bin",
3447 bytes: vec![0x00, 0xFF, 0x12, 0x00, 0x99, 0x88, 0x77, 0x66, 0x00],
3448 expected_text: "", // Not used
3449 },
3450 ];
3451
3452 let root_path = if cfg!(windows) {
3453 Path::new("C:\\root")
3454 } else {
3455 Path::new("/root")
3456 };
3457
3458 let fs = FakeFs::new(cx.background_executor.clone());
3459 fs.create_dir(root_path).await.unwrap();
3460
3461 for case in success_cases.iter().chain(failure_cases.iter()) {
3462 let path = root_path.join(case.name);
3463 fs.write(&path, &case.bytes).await.unwrap();
3464 }
3465
3466 let tree = Worktree::local(
3467 root_path,
3468 true,
3469 fs,
3470 Default::default(),
3471 true,
3472 WorktreeId::from_proto(0),
3473 &mut cx.to_async(),
3474 )
3475 .await
3476 .unwrap();
3477
3478 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3479 .await;
3480
3481 let rel_path = |name: &str| {
3482 RelPath::new(&Path::new(name), PathStyle::local())
3483 .unwrap()
3484 .into_arc()
3485 };
3486
3487 // Run Success Tests
3488 for case in success_cases {
3489 let loaded = tree
3490 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
3491 .await;
3492 if let Err(e) = &loaded {
3493 panic!("Failed to load success case '{}': {:?}", case.name, e);
3494 }
3495 let loaded = loaded.unwrap();
3496 assert_eq!(
3497 loaded.text, case.expected_text,
3498 "Encoding mismatch for file: {}",
3499 case.name
3500 );
3501 }
3502
3503 // Run Failure Tests
3504 for case in failure_cases {
3505 let loaded = tree
3506 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
3507 .await;
3508 assert!(
3509 loaded.is_err(),
3510 "Failure case '{}' unexpectedly succeeded! It should have been detected as binary.",
3511 case.name
3512 );
3513 let err_msg = loaded.unwrap_err().to_string();
3514 println!("Got expected error for {}: {}", case.name, err_msg);
3515 }
3516}
3517
3518#[gpui::test]
3519async fn test_write_file_encoding(cx: &mut gpui::TestAppContext) {
3520 init_test(cx);
3521 let fs = FakeFs::new(cx.executor());
3522
3523 let root_path = if cfg!(windows) {
3524 Path::new("C:\\root")
3525 } else {
3526 Path::new("/root")
3527 };
3528 fs.create_dir(root_path).await.unwrap();
3529
3530 let worktree = Worktree::local(
3531 root_path,
3532 true,
3533 fs.clone(),
3534 Default::default(),
3535 true,
3536 WorktreeId::from_proto(0),
3537 &mut cx.to_async(),
3538 )
3539 .await
3540 .unwrap();
3541
3542 // Define test case structure
3543 struct TestCase {
3544 name: &'static str,
3545 text: &'static str,
3546 encoding: &'static encoding_rs::Encoding,
3547 has_bom: bool,
3548 expected_bytes: Vec<u8>,
3549 }
3550
3551 let cases = vec![
3552 // Shift_JIS with Japanese
3553 TestCase {
3554 name: "Shift_JIS with Japanese",
3555 text: "こんにちは",
3556 encoding: encoding_rs::SHIFT_JIS,
3557 has_bom: false,
3558 expected_bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
3559 },
3560 // UTF-8 No BOM
3561 TestCase {
3562 name: "UTF-8 No BOM",
3563 text: "AB",
3564 encoding: encoding_rs::UTF_8,
3565 has_bom: false,
3566 expected_bytes: vec![0x41, 0x42],
3567 },
3568 // UTF-8 with BOM
3569 TestCase {
3570 name: "UTF-8 with BOM",
3571 text: "AB",
3572 encoding: encoding_rs::UTF_8,
3573 has_bom: true,
3574 expected_bytes: vec![0xEF, 0xBB, 0xBF, 0x41, 0x42],
3575 },
3576 // UTF-16LE No BOM with Japanese
3577 // NOTE: This passes thanks to the manual encoding fix implemented in `write_file`.
3578 TestCase {
3579 name: "UTF-16LE No BOM with Japanese",
3580 text: "こんにちは",
3581 encoding: encoding_rs::UTF_16LE,
3582 has_bom: false,
3583 expected_bytes: vec![0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f, 0x30],
3584 },
3585 // UTF-16LE with BOM
3586 TestCase {
3587 name: "UTF-16LE with BOM",
3588 text: "A",
3589 encoding: encoding_rs::UTF_16LE,
3590 has_bom: true,
3591 expected_bytes: vec![0xFF, 0xFE, 0x41, 0x00],
3592 },
3593 // UTF-16BE No BOM with Japanese
3594 // NOTE: This passes thanks to the manual encoding fix.
3595 TestCase {
3596 name: "UTF-16BE No BOM with Japanese",
3597 text: "こんにちは",
3598 encoding: encoding_rs::UTF_16BE,
3599 has_bom: false,
3600 expected_bytes: vec![0x30, 0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f],
3601 },
3602 // UTF-16BE with BOM
3603 TestCase {
3604 name: "UTF-16BE with BOM",
3605 text: "A",
3606 encoding: encoding_rs::UTF_16BE,
3607 has_bom: true,
3608 expected_bytes: vec![0xFE, 0xFF, 0x00, 0x41],
3609 },
3610 ];
3611
3612 for (i, case) in cases.into_iter().enumerate() {
3613 let file_name = format!("test_{}.txt", i);
3614 let path: Arc<Path> = Path::new(&file_name).into();
3615 let file_path = root_path.join(&file_name);
3616
3617 fs.insert_file(&file_path, "".into()).await;
3618
3619 let rel_path = RelPath::new(&path, PathStyle::local()).unwrap().into_arc();
3620 let text = text::Rope::from(case.text);
3621
3622 let task = worktree.update(cx, |wt, cx| {
3623 wt.write_file(
3624 rel_path,
3625 text,
3626 text::LineEnding::Unix,
3627 case.encoding,
3628 case.has_bom,
3629 cx,
3630 )
3631 });
3632
3633 if let Err(e) = task.await {
3634 panic!("Unexpected error in case '{}': {:?}", case.name, e);
3635 }
3636
3637 let bytes = fs.load_bytes(&file_path).await.unwrap();
3638
3639 assert_eq!(
3640 bytes, case.expected_bytes,
3641 "case '{}' mismatch. Expected {:?}, but got {:?}",
3642 case.name, case.expected_bytes, bytes
3643 );
3644 }
3645}
3646
3647#[gpui::test]
3648async fn test_refresh_entries_for_paths_creates_ancestors(cx: &mut TestAppContext) {
3649 init_test(cx);
3650 let fs = FakeFs::new(cx.background_executor.clone());
3651 fs.insert_tree(
3652 "/root",
3653 json!({
3654 "a": {
3655 "b": {
3656 "c": {
3657 "deep_file.txt": "content",
3658 "sibling.txt": "content"
3659 },
3660 "d": {
3661 "under_sibling_dir.txt": "content"
3662 }
3663 }
3664 }
3665 }),
3666 )
3667 .await;
3668
3669 let tree = Worktree::local(
3670 Path::new("/root"),
3671 true,
3672 fs.clone(),
3673 Default::default(),
3674 false, // Disable scanning so the initial scan doesn't discover any entries
3675 WorktreeId::from_proto(0),
3676 &mut cx.to_async(),
3677 )
3678 .await
3679 .unwrap();
3680
3681 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3682 .await;
3683
3684 tree.read_with(cx, |tree, _| {
3685 assert_eq!(
3686 tree.entries(true, 0)
3687 .map(|e| e.path.as_ref())
3688 .collect::<Vec<_>>(),
3689 &[rel_path("")],
3690 "Only root entry should exist when scanning is disabled"
3691 );
3692
3693 assert!(tree.entry_for_path(rel_path("a")).is_none());
3694 assert!(tree.entry_for_path(rel_path("a/b")).is_none());
3695 assert!(tree.entry_for_path(rel_path("a/b/c")).is_none());
3696 assert!(
3697 tree.entry_for_path(rel_path("a/b/c/deep_file.txt"))
3698 .is_none()
3699 );
3700 });
3701
3702 tree.read_with(cx, |tree, _| {
3703 tree.as_local()
3704 .unwrap()
3705 .refresh_entries_for_paths(vec![rel_path("a/b/c/deep_file.txt").into()])
3706 })
3707 .recv()
3708 .await;
3709
3710 tree.read_with(cx, |tree, _| {
3711 assert_eq!(
3712 tree.entries(true, 0)
3713 .map(|e| e.path.as_ref())
3714 .collect::<Vec<_>>(),
3715 &[
3716 rel_path(""),
3717 rel_path("a"),
3718 rel_path("a/b"),
3719 rel_path("a/b/c"),
3720 rel_path("a/b/c/deep_file.txt"),
3721 rel_path("a/b/c/sibling.txt"),
3722 rel_path("a/b/d"),
3723 ],
3724 "All ancestors should be created when refreshing a deeply nested path"
3725 );
3726 });
3727}
3728
3729#[gpui::test]
3730async fn test_single_file_worktree_deleted(cx: &mut TestAppContext) {
3731 init_test(cx);
3732 let fs = FakeFs::new(cx.background_executor.clone());
3733
3734 fs.insert_tree(
3735 "/root",
3736 json!({
3737 "test.txt": "content",
3738 }),
3739 )
3740 .await;
3741
3742 let tree = Worktree::local(
3743 Path::new("/root/test.txt"),
3744 true,
3745 fs.clone(),
3746 Default::default(),
3747 true,
3748 WorktreeId::from_proto(0),
3749 &mut cx.to_async(),
3750 )
3751 .await
3752 .unwrap();
3753
3754 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3755 .await;
3756
3757 tree.read_with(cx, |tree, _| {
3758 assert!(tree.is_single_file(), "Should be a single-file worktree");
3759 assert_eq!(tree.abs_path().as_ref(), Path::new("/root/test.txt"));
3760 });
3761
3762 // Delete the file
3763 fs.remove_file(Path::new("/root/test.txt"), Default::default())
3764 .await
3765 .unwrap();
3766
3767 // Subscribe to worktree events
3768 let deleted_event_received = Rc::new(Cell::new(false));
3769 let _subscription = cx.update({
3770 let deleted_event_received = deleted_event_received.clone();
3771 |cx| {
3772 cx.subscribe(&tree, move |_, event, _| {
3773 if matches!(event, Event::Deleted) {
3774 deleted_event_received.set(true);
3775 }
3776 })
3777 }
3778 });
3779
3780 // Trigger filesystem events - the scanner should detect the file is gone immediately
3781 // and emit a Deleted event
3782 cx.background_executor.run_until_parked();
3783 cx.background_executor
3784 .advance_clock(std::time::Duration::from_secs(1));
3785 cx.background_executor.run_until_parked();
3786
3787 assert!(
3788 deleted_event_received.get(),
3789 "Should receive Deleted event when single-file worktree root is deleted"
3790 );
3791}
3792
3793#[gpui::test]
3794async fn test_remote_worktree_without_git_emits_root_repo_event_after_first_update(
3795 cx: &mut TestAppContext,
3796) {
3797 cx.update(|cx| {
3798 let store = SettingsStore::test(cx);
3799 cx.set_global(store);
3800 });
3801
3802 let client = AnyProtoClient::new(NoopProtoClient::new());
3803
3804 let worktree = cx.update(|cx| {
3805 Worktree::remote(
3806 1,
3807 clock::ReplicaId::new(1),
3808 proto::WorktreeMetadata {
3809 id: 1,
3810 root_name: "project".to_string(),
3811 visible: true,
3812 abs_path: "/home/user/project".to_string(),
3813 root_repo_common_dir: None,
3814 },
3815 client,
3816 PathStyle::Posix,
3817 cx,
3818 )
3819 });
3820
3821 let events: Arc<std::sync::Mutex<Vec<&'static str>>> =
3822 Arc::new(std::sync::Mutex::new(Vec::new()));
3823 let events_clone = events.clone();
3824 cx.update(|cx| {
3825 cx.subscribe(&worktree, move |_, event, _cx| {
3826 if matches!(event, Event::UpdatedRootRepoCommonDir { .. }) {
3827 events_clone
3828 .lock()
3829 .unwrap()
3830 .push("UpdatedRootRepoCommonDir");
3831 }
3832 if matches!(event, Event::UpdatedEntries(_)) {
3833 events_clone.lock().unwrap().push("UpdatedEntries");
3834 }
3835 })
3836 .detach();
3837 });
3838
3839 // Send an update with entries but no repo info (plain directory).
3840 worktree.update(cx, |worktree, _cx| {
3841 worktree
3842 .as_remote()
3843 .unwrap()
3844 .update_from_remote(proto::UpdateWorktree {
3845 project_id: 1,
3846 worktree_id: 1,
3847 abs_path: "/home/user/project".to_string(),
3848 root_name: "project".to_string(),
3849 updated_entries: vec![proto::Entry {
3850 id: 1,
3851 is_dir: true,
3852 path: "".to_string(),
3853 inode: 1,
3854 mtime: Some(proto::Timestamp {
3855 seconds: 0,
3856 nanos: 0,
3857 }),
3858 is_ignored: false,
3859 is_hidden: false,
3860 is_external: false,
3861 is_fifo: false,
3862 size: None,
3863 canonical_path: None,
3864 }],
3865 removed_entries: vec![],
3866 scan_id: 1,
3867 is_last_update: true,
3868 updated_repositories: vec![],
3869 removed_repositories: vec![],
3870 root_repo_common_dir: None,
3871 });
3872 });
3873
3874 cx.run_until_parked();
3875
3876 let fired = events.lock().unwrap();
3877 assert!(
3878 fired.contains(&"UpdatedEntries"),
3879 "UpdatedEntries should fire after remote update"
3880 );
3881 assert!(
3882 fired.contains(&"UpdatedRootRepoCommonDir"),
3883 "UpdatedRootRepoCommonDir should fire after first remote update even when \
3884 root_repo_common_dir is None, to signal that repo state is now known"
3885 );
3886}
3887
3888#[gpui::test]
3889async fn test_remote_worktree_with_git_emits_root_repo_event_when_repo_info_arrives(
3890 cx: &mut TestAppContext,
3891) {
3892 cx.update(|cx| {
3893 let store = SettingsStore::test(cx);
3894 cx.set_global(store);
3895 });
3896
3897 let client = AnyProtoClient::new(NoopProtoClient::new());
3898
3899 let worktree = cx.update(|cx| {
3900 Worktree::remote(
3901 1,
3902 clock::ReplicaId::new(1),
3903 proto::WorktreeMetadata {
3904 id: 1,
3905 root_name: "project".to_string(),
3906 visible: true,
3907 abs_path: "/home/user/project".to_string(),
3908 root_repo_common_dir: None,
3909 },
3910 client,
3911 PathStyle::Posix,
3912 cx,
3913 )
3914 });
3915
3916 let events: Arc<std::sync::Mutex<Vec<&'static str>>> =
3917 Arc::new(std::sync::Mutex::new(Vec::new()));
3918 let events_clone = events.clone();
3919 cx.update(|cx| {
3920 cx.subscribe(&worktree, move |_, event, _cx| {
3921 if matches!(event, Event::UpdatedRootRepoCommonDir { .. }) {
3922 events_clone
3923 .lock()
3924 .unwrap()
3925 .push("UpdatedRootRepoCommonDir");
3926 }
3927 })
3928 .detach();
3929 });
3930
3931 // Send an update where repo info arrives (None -> Some).
3932 worktree.update(cx, |worktree, _cx| {
3933 worktree
3934 .as_remote()
3935 .unwrap()
3936 .update_from_remote(proto::UpdateWorktree {
3937 project_id: 1,
3938 worktree_id: 1,
3939 abs_path: "/home/user/project".to_string(),
3940 root_name: "project".to_string(),
3941 updated_entries: vec![proto::Entry {
3942 id: 1,
3943 is_dir: true,
3944 path: "".to_string(),
3945 inode: 1,
3946 mtime: Some(proto::Timestamp {
3947 seconds: 0,
3948 nanos: 0,
3949 }),
3950 is_ignored: false,
3951 is_hidden: false,
3952 is_external: false,
3953 is_fifo: false,
3954 size: None,
3955 canonical_path: None,
3956 }],
3957 removed_entries: vec![],
3958 scan_id: 1,
3959 is_last_update: true,
3960 updated_repositories: vec![],
3961 removed_repositories: vec![],
3962 root_repo_common_dir: Some("/home/user/project/.git".to_string()),
3963 });
3964 });
3965
3966 cx.run_until_parked();
3967
3968 let fired = events.lock().unwrap();
3969 assert!(
3970 fired.contains(&"UpdatedRootRepoCommonDir"),
3971 "UpdatedRootRepoCommonDir should fire when repo info arrives (None -> Some)"
3972 );
3973 assert_eq!(
3974 fired
3975 .iter()
3976 .filter(|e| **e == "UpdatedRootRepoCommonDir")
3977 .count(),
3978 1,
3979 "should fire exactly once, not duplicate"
3980 );
3981}