1mod worktree_settings;
2
3use anyhow::Result;
4use encoding_rs;
5use fs::{FakeFs, Fs, RealFs, RemoveOptions};
6use git::{DOT_GIT, GITIGNORE, REPO_EXCLUDE};
7use gpui::{AppContext as _, BackgroundExecutor, BorrowAppContext, Context, Task, TestAppContext};
8use parking_lot::Mutex;
9use postage::stream::Stream;
10use pretty_assertions::assert_eq;
11use rand::prelude::*;
12use worktree::{Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle};
13
14use serde_json::json;
15use settings::{SettingsStore, WorktreeId};
16use std::{
17 cell::Cell,
18 env,
19 fmt::Write,
20 mem,
21 path::{Path, PathBuf},
22 rc::Rc,
23 sync::Arc,
24};
25use util::{
26 ResultExt, path,
27 paths::PathStyle,
28 rel_path::{RelPath, rel_path},
29 test::TempTree,
30};
31
32#[gpui::test]
33async fn test_traversal(cx: &mut TestAppContext) {
34 init_test(cx);
35 let fs = FakeFs::new(cx.background_executor.clone());
36 fs.insert_tree(
37 "/root",
38 json!({
39 ".gitignore": "a/b\n",
40 "a": {
41 "b": "",
42 "c": "",
43 }
44 }),
45 )
46 .await;
47
48 let tree = Worktree::local(
49 Path::new("/root"),
50 true,
51 fs,
52 Default::default(),
53 true,
54 WorktreeId::from_proto(0),
55 &mut cx.to_async(),
56 )
57 .await
58 .unwrap();
59 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
60 .await;
61
62 tree.read_with(cx, |tree, _| {
63 assert_eq!(
64 tree.entries(false, 0)
65 .map(|entry| entry.path.as_ref())
66 .collect::<Vec<_>>(),
67 vec![
68 rel_path(""),
69 rel_path(".gitignore"),
70 rel_path("a"),
71 rel_path("a/c"),
72 ]
73 );
74 assert_eq!(
75 tree.entries(true, 0)
76 .map(|entry| entry.path.as_ref())
77 .collect::<Vec<_>>(),
78 vec![
79 rel_path(""),
80 rel_path(".gitignore"),
81 rel_path("a"),
82 rel_path("a/b"),
83 rel_path("a/c"),
84 ]
85 );
86 })
87}
88
89#[gpui::test(iterations = 10)]
90async fn test_circular_symlinks(cx: &mut TestAppContext) {
91 init_test(cx);
92 let fs = FakeFs::new(cx.background_executor.clone());
93 fs.insert_tree(
94 "/root",
95 json!({
96 "lib": {
97 "a": {
98 "a.txt": ""
99 },
100 "b": {
101 "b.txt": ""
102 }
103 }
104 }),
105 )
106 .await;
107 fs.create_symlink("/root/lib/a/lib".as_ref(), "..".into())
108 .await
109 .unwrap();
110 fs.create_symlink("/root/lib/b/lib".as_ref(), "..".into())
111 .await
112 .unwrap();
113
114 let tree = Worktree::local(
115 Path::new("/root"),
116 true,
117 fs.clone(),
118 Default::default(),
119 true,
120 WorktreeId::from_proto(0),
121 &mut cx.to_async(),
122 )
123 .await
124 .unwrap();
125
126 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
127 .await;
128
129 tree.read_with(cx, |tree, _| {
130 assert_eq!(
131 tree.entries(false, 0)
132 .map(|entry| entry.path.as_ref())
133 .collect::<Vec<_>>(),
134 vec![
135 rel_path(""),
136 rel_path("lib"),
137 rel_path("lib/a"),
138 rel_path("lib/a/a.txt"),
139 rel_path("lib/a/lib"),
140 rel_path("lib/b"),
141 rel_path("lib/b/b.txt"),
142 rel_path("lib/b/lib"),
143 ]
144 );
145 });
146
147 fs.rename(
148 Path::new("/root/lib/a/lib"),
149 Path::new("/root/lib/a/lib-2"),
150 Default::default(),
151 )
152 .await
153 .unwrap();
154 cx.executor().run_until_parked();
155 tree.read_with(cx, |tree, _| {
156 assert_eq!(
157 tree.entries(false, 0)
158 .map(|entry| entry.path.as_ref())
159 .collect::<Vec<_>>(),
160 vec![
161 rel_path(""),
162 rel_path("lib"),
163 rel_path("lib/a"),
164 rel_path("lib/a/a.txt"),
165 rel_path("lib/a/lib-2"),
166 rel_path("lib/b"),
167 rel_path("lib/b/b.txt"),
168 rel_path("lib/b/lib"),
169 ]
170 );
171 });
172}
173
174#[gpui::test]
175async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
176 init_test(cx);
177 let fs = FakeFs::new(cx.background_executor.clone());
178 fs.insert_tree(
179 "/root",
180 json!({
181 "dir1": {
182 "deps": {
183 // symlinks here
184 },
185 "src": {
186 "a.rs": "",
187 "b.rs": "",
188 },
189 },
190 "dir2": {
191 "src": {
192 "c.rs": "",
193 "d.rs": "",
194 }
195 },
196 "dir3": {
197 "deps": {},
198 "src": {
199 "e.rs": "",
200 "f.rs": "",
201 },
202 }
203 }),
204 )
205 .await;
206
207 // These symlinks point to directories outside of the worktree's root, dir1.
208 fs.create_symlink("/root/dir1/deps/dep-dir2".as_ref(), "../../dir2".into())
209 .await
210 .unwrap();
211 fs.create_symlink("/root/dir1/deps/dep-dir3".as_ref(), "../../dir3".into())
212 .await
213 .unwrap();
214
215 let tree = Worktree::local(
216 Path::new("/root/dir1"),
217 true,
218 fs.clone(),
219 Default::default(),
220 true,
221 WorktreeId::from_proto(0),
222 &mut cx.to_async(),
223 )
224 .await
225 .unwrap();
226
227 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
228 .await;
229
230 let tree_updates = Arc::new(Mutex::new(Vec::new()));
231 tree.update(cx, |_, cx| {
232 let tree_updates = tree_updates.clone();
233 cx.subscribe(&tree, move |_, _, event, _| {
234 if let Event::UpdatedEntries(update) = event {
235 tree_updates.lock().extend(
236 update
237 .iter()
238 .map(|(path, _, change)| (path.clone(), *change)),
239 );
240 }
241 })
242 .detach();
243 });
244
245 // The symlinked directories are not scanned by default.
246 tree.read_with(cx, |tree, _| {
247 assert_eq!(
248 tree.entries(true, 0)
249 .map(|entry| (entry.path.as_ref(), entry.is_external))
250 .collect::<Vec<_>>(),
251 vec![
252 (rel_path(""), false),
253 (rel_path("deps"), false),
254 (rel_path("deps/dep-dir2"), true),
255 (rel_path("deps/dep-dir3"), true),
256 (rel_path("src"), false),
257 (rel_path("src/a.rs"), false),
258 (rel_path("src/b.rs"), false),
259 ]
260 );
261
262 assert_eq!(
263 tree.entry_for_path(rel_path("deps/dep-dir2")).unwrap().kind,
264 EntryKind::UnloadedDir
265 );
266 });
267
268 // Expand one of the symlinked directories.
269 tree.read_with(cx, |tree, _| {
270 tree.as_local()
271 .unwrap()
272 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3").into()])
273 })
274 .recv()
275 .await;
276
277 // The expanded directory's contents are loaded. Subdirectories are
278 // not scanned yet.
279 tree.read_with(cx, |tree, _| {
280 assert_eq!(
281 tree.entries(true, 0)
282 .map(|entry| (entry.path.as_ref(), entry.is_external))
283 .collect::<Vec<_>>(),
284 vec![
285 (rel_path(""), false),
286 (rel_path("deps"), false),
287 (rel_path("deps/dep-dir2"), true),
288 (rel_path("deps/dep-dir3"), true),
289 (rel_path("deps/dep-dir3/deps"), true),
290 (rel_path("deps/dep-dir3/src"), true),
291 (rel_path("src"), false),
292 (rel_path("src/a.rs"), false),
293 (rel_path("src/b.rs"), false),
294 ]
295 );
296 });
297 assert_eq!(
298 mem::take(&mut *tree_updates.lock()),
299 &[
300 (rel_path("deps/dep-dir3").into(), PathChange::Loaded),
301 (rel_path("deps/dep-dir3/deps").into(), PathChange::Loaded),
302 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded)
303 ]
304 );
305
306 // Expand a subdirectory of one of the symlinked directories.
307 tree.read_with(cx, |tree, _| {
308 tree.as_local()
309 .unwrap()
310 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3/src").into()])
311 })
312 .recv()
313 .await;
314
315 // The expanded subdirectory's contents are loaded.
316 tree.read_with(cx, |tree, _| {
317 assert_eq!(
318 tree.entries(true, 0)
319 .map(|entry| (entry.path.as_ref(), entry.is_external))
320 .collect::<Vec<_>>(),
321 vec![
322 (rel_path(""), false),
323 (rel_path("deps"), false),
324 (rel_path("deps/dep-dir2"), true),
325 (rel_path("deps/dep-dir3"), true),
326 (rel_path("deps/dep-dir3/deps"), true),
327 (rel_path("deps/dep-dir3/src"), true),
328 (rel_path("deps/dep-dir3/src/e.rs"), true),
329 (rel_path("deps/dep-dir3/src/f.rs"), true),
330 (rel_path("src"), false),
331 (rel_path("src/a.rs"), false),
332 (rel_path("src/b.rs"), false),
333 ]
334 );
335 });
336
337 assert_eq!(
338 mem::take(&mut *tree_updates.lock()),
339 &[
340 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded),
341 (
342 rel_path("deps/dep-dir3/src/e.rs").into(),
343 PathChange::Loaded
344 ),
345 (
346 rel_path("deps/dep-dir3/src/f.rs").into(),
347 PathChange::Loaded
348 )
349 ]
350 );
351}
352
353#[cfg(target_os = "macos")]
354#[gpui::test]
355async fn test_renaming_case_only(cx: &mut TestAppContext) {
356 cx.executor().allow_parking();
357 init_test(cx);
358
359 const OLD_NAME: &str = "aaa.rs";
360 const NEW_NAME: &str = "AAA.rs";
361
362 let fs = Arc::new(RealFs::new(None, cx.executor()));
363 let temp_root = TempTree::new(json!({
364 OLD_NAME: "",
365 }));
366
367 let tree = Worktree::local(
368 temp_root.path(),
369 true,
370 fs.clone(),
371 Default::default(),
372 true,
373 WorktreeId::from_proto(0),
374 &mut cx.to_async(),
375 )
376 .await
377 .unwrap();
378
379 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
380 .await;
381 tree.read_with(cx, |tree, _| {
382 assert_eq!(
383 tree.entries(true, 0)
384 .map(|entry| entry.path.as_ref())
385 .collect::<Vec<_>>(),
386 vec![rel_path(""), rel_path(OLD_NAME)]
387 );
388 });
389
390 fs.rename(
391 &temp_root.path().join(OLD_NAME),
392 &temp_root.path().join(NEW_NAME),
393 fs::RenameOptions {
394 overwrite: true,
395 ignore_if_exists: true,
396 create_parents: false,
397 },
398 )
399 .await
400 .unwrap();
401
402 tree.flush_fs_events(cx).await;
403
404 tree.read_with(cx, |tree, _| {
405 assert_eq!(
406 tree.entries(true, 0)
407 .map(|entry| entry.path.as_ref())
408 .collect::<Vec<_>>(),
409 vec![rel_path(""), rel_path(NEW_NAME)]
410 );
411 });
412}
413
414#[gpui::test]
415async fn test_root_rescan_reconciles_stale_state(cx: &mut TestAppContext) {
416 init_test(cx);
417 let fs = FakeFs::new(cx.background_executor.clone());
418 fs.insert_tree(
419 "/root",
420 json!({
421 "old.txt": "",
422 }),
423 )
424 .await;
425
426 let tree = Worktree::local(
427 Path::new("/root"),
428 true,
429 fs.clone(),
430 Default::default(),
431 true,
432 WorktreeId::from_proto(0),
433 &mut cx.to_async(),
434 )
435 .await
436 .unwrap();
437
438 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
439 .await;
440
441 tree.read_with(cx, |tree, _| {
442 assert_eq!(
443 tree.entries(true, 0)
444 .map(|entry| entry.path.as_ref())
445 .collect::<Vec<_>>(),
446 vec![rel_path(""), rel_path("old.txt")]
447 );
448 });
449
450 fs.pause_events();
451 fs.remove_file(Path::new("/root/old.txt"), RemoveOptions::default())
452 .await
453 .unwrap();
454 fs.insert_file(Path::new("/root/new.txt"), Vec::new()).await;
455 assert_eq!(fs.buffered_event_count(), 2);
456 fs.clear_buffered_events();
457
458 tree.read_with(cx, |tree, _| {
459 assert!(tree.entry_for_path(rel_path("old.txt")).is_some());
460 assert!(tree.entry_for_path(rel_path("new.txt")).is_none());
461 });
462
463 fs.emit_fs_event("/root", Some(fs::PathEventKind::Rescan));
464 fs.unpause_events_and_flush();
465 tree.flush_fs_events(cx).await;
466
467 tree.read_with(cx, |tree, _| {
468 assert!(tree.entry_for_path(rel_path("old.txt")).is_none());
469 assert!(tree.entry_for_path(rel_path("new.txt")).is_some());
470 assert_eq!(
471 tree.entries(true, 0)
472 .map(|entry| entry.path.as_ref())
473 .collect::<Vec<_>>(),
474 vec![rel_path(""), rel_path("new.txt")]
475 );
476 });
477}
478
479#[gpui::test]
480async fn test_subtree_rescan_reports_unchanged_descendants_as_updated(cx: &mut TestAppContext) {
481 init_test(cx);
482 let fs = FakeFs::new(cx.background_executor.clone());
483 fs.insert_tree(
484 "/root",
485 json!({
486 "dir": {
487 "child.txt": "",
488 "nested": {
489 "grandchild.txt": "",
490 },
491 "remove": {
492 "removed.txt": "",
493 }
494 },
495 "other.txt": "",
496 }),
497 )
498 .await;
499
500 let tree = Worktree::local(
501 Path::new("/root"),
502 true,
503 fs.clone(),
504 Default::default(),
505 true,
506 WorktreeId::from_proto(0),
507 &mut cx.to_async(),
508 )
509 .await
510 .unwrap();
511
512 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
513 .await;
514
515 let tree_updates = Arc::new(Mutex::new(Vec::new()));
516 tree.update(cx, |_, cx| {
517 let tree_updates = tree_updates.clone();
518 cx.subscribe(&tree, move |_, _, event, _| {
519 if let Event::UpdatedEntries(update) = event {
520 tree_updates.lock().extend(
521 update
522 .iter()
523 .filter(|(path, _, _)| path.as_ref() != rel_path("fs-event-sentinel"))
524 .map(|(path, _, change)| (path.clone(), *change)),
525 );
526 }
527 })
528 .detach();
529 });
530 fs.pause_events();
531 fs.insert_file("/root/dir/new.txt", b"new content".to_vec())
532 .await;
533 fs.remove_dir(
534 "/root/dir/remove".as_ref(),
535 RemoveOptions {
536 recursive: true,
537 ignore_if_not_exists: false,
538 },
539 )
540 .await
541 .unwrap();
542 fs.clear_buffered_events();
543 fs.unpause_events_and_flush();
544
545 fs.emit_fs_event("/root/dir", Some(fs::PathEventKind::Rescan));
546 tree.flush_fs_events(cx).await;
547
548 assert_eq!(
549 mem::take(&mut *tree_updates.lock()),
550 &[
551 (rel_path("dir").into(), PathChange::Updated),
552 (rel_path("dir/child.txt").into(), PathChange::Updated),
553 (rel_path("dir/nested").into(), PathChange::Updated),
554 (
555 rel_path("dir/nested/grandchild.txt").into(),
556 PathChange::Updated
557 ),
558 (rel_path("dir/new.txt").into(), PathChange::Added),
559 (rel_path("dir/remove").into(), PathChange::Removed),
560 (
561 rel_path("dir/remove/removed.txt").into(),
562 PathChange::Removed
563 ),
564 ]
565 );
566
567 tree.read_with(cx, |tree, _| {
568 assert!(tree.entry_for_path(rel_path("other.txt")).is_some());
569 });
570}
571
572#[gpui::test]
573async fn test_open_gitignored_files(cx: &mut TestAppContext) {
574 init_test(cx);
575 let fs = FakeFs::new(cx.background_executor.clone());
576 fs.insert_tree(
577 "/root",
578 json!({
579 ".gitignore": "node_modules\n",
580 "one": {
581 "node_modules": {
582 "a": {
583 "a1.js": "a1",
584 "a2.js": "a2",
585 },
586 "b": {
587 "b1.js": "b1",
588 "b2.js": "b2",
589 },
590 "c": {
591 "c1.js": "c1",
592 "c2.js": "c2",
593 }
594 },
595 },
596 "two": {
597 "x.js": "",
598 "y.js": "",
599 },
600 }),
601 )
602 .await;
603
604 let tree = Worktree::local(
605 Path::new("/root"),
606 true,
607 fs.clone(),
608 Default::default(),
609 true,
610 WorktreeId::from_proto(0),
611 &mut cx.to_async(),
612 )
613 .await
614 .unwrap();
615
616 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
617 .await;
618
619 tree.read_with(cx, |tree, _| {
620 assert_eq!(
621 tree.entries(true, 0)
622 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
623 .collect::<Vec<_>>(),
624 vec![
625 (rel_path(""), false),
626 (rel_path(".gitignore"), false),
627 (rel_path("one"), false),
628 (rel_path("one/node_modules"), true),
629 (rel_path("two"), false),
630 (rel_path("two/x.js"), false),
631 (rel_path("two/y.js"), false),
632 ]
633 );
634 });
635
636 // Open a file that is nested inside of a gitignored directory that
637 // has not yet been expanded.
638 let prev_read_dir_count = fs.read_dir_call_count();
639 let loaded = tree
640 .update(cx, |tree, cx| {
641 tree.load_file(rel_path("one/node_modules/b/b1.js"), cx)
642 })
643 .await
644 .unwrap();
645
646 tree.read_with(cx, |tree, _| {
647 assert_eq!(
648 tree.entries(true, 0)
649 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
650 .collect::<Vec<_>>(),
651 vec![
652 (rel_path(""), false),
653 (rel_path(".gitignore"), false),
654 (rel_path("one"), false),
655 (rel_path("one/node_modules"), true),
656 (rel_path("one/node_modules/a"), true),
657 (rel_path("one/node_modules/b"), true),
658 (rel_path("one/node_modules/b/b1.js"), true),
659 (rel_path("one/node_modules/b/b2.js"), true),
660 (rel_path("one/node_modules/c"), true),
661 (rel_path("two"), false),
662 (rel_path("two/x.js"), false),
663 (rel_path("two/y.js"), false),
664 ]
665 );
666
667 assert_eq!(
668 loaded.file.path.as_ref(),
669 rel_path("one/node_modules/b/b1.js")
670 );
671
672 // Only the newly-expanded directories are scanned.
673 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
674 });
675
676 // Open another file in a different subdirectory of the same
677 // gitignored directory.
678 let prev_read_dir_count = fs.read_dir_call_count();
679 let loaded = tree
680 .update(cx, |tree, cx| {
681 tree.load_file(rel_path("one/node_modules/a/a2.js"), cx)
682 })
683 .await
684 .unwrap();
685
686 tree.read_with(cx, |tree, _| {
687 assert_eq!(
688 tree.entries(true, 0)
689 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
690 .collect::<Vec<_>>(),
691 vec![
692 (rel_path(""), false),
693 (rel_path(".gitignore"), false),
694 (rel_path("one"), false),
695 (rel_path("one/node_modules"), true),
696 (rel_path("one/node_modules/a"), true),
697 (rel_path("one/node_modules/a/a1.js"), true),
698 (rel_path("one/node_modules/a/a2.js"), true),
699 (rel_path("one/node_modules/b"), true),
700 (rel_path("one/node_modules/b/b1.js"), true),
701 (rel_path("one/node_modules/b/b2.js"), true),
702 (rel_path("one/node_modules/c"), true),
703 (rel_path("two"), false),
704 (rel_path("two/x.js"), false),
705 (rel_path("two/y.js"), false),
706 ]
707 );
708
709 assert_eq!(
710 loaded.file.path.as_ref(),
711 rel_path("one/node_modules/a/a2.js")
712 );
713
714 // Only the newly-expanded directory is scanned.
715 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
716 });
717
718 let path = PathBuf::from("/root/one/node_modules/c/lib");
719
720 // No work happens when files and directories change within an unloaded directory.
721 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
722 // When we open a directory, we check each ancestor whether it's a git
723 // repository. That means we have an fs.metadata call per ancestor that we
724 // need to subtract here.
725 let ancestors = path.ancestors().count();
726
727 fs.create_dir(path.as_ref()).await.unwrap();
728 cx.executor().run_until_parked();
729
730 assert_eq!(
731 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count - ancestors,
732 0
733 );
734}
735
736#[gpui::test]
737async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
738 init_test(cx);
739 let fs = FakeFs::new(cx.background_executor.clone());
740 fs.insert_tree(
741 "/root",
742 json!({
743 ".gitignore": "node_modules\n",
744 "a": {
745 "a.js": "",
746 },
747 "b": {
748 "b.js": "",
749 },
750 "node_modules": {
751 "c": {
752 "c.js": "",
753 },
754 "d": {
755 "d.js": "",
756 "e": {
757 "e1.js": "",
758 "e2.js": "",
759 },
760 "f": {
761 "f1.js": "",
762 "f2.js": "",
763 }
764 },
765 },
766 }),
767 )
768 .await;
769
770 let tree = Worktree::local(
771 Path::new("/root"),
772 true,
773 fs.clone(),
774 Default::default(),
775 true,
776 WorktreeId::from_proto(0),
777 &mut cx.to_async(),
778 )
779 .await
780 .unwrap();
781
782 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
783 .await;
784
785 // Open a file within the gitignored directory, forcing some of its
786 // subdirectories to be read, but not all.
787 let read_dir_count_1 = fs.read_dir_call_count();
788 tree.read_with(cx, |tree, _| {
789 tree.as_local()
790 .unwrap()
791 .refresh_entries_for_paths(vec![rel_path("node_modules/d/d.js").into()])
792 })
793 .recv()
794 .await;
795
796 // Those subdirectories are now loaded.
797 tree.read_with(cx, |tree, _| {
798 assert_eq!(
799 tree.entries(true, 0)
800 .map(|e| (e.path.as_ref(), e.is_ignored))
801 .collect::<Vec<_>>(),
802 &[
803 (rel_path(""), false),
804 (rel_path(".gitignore"), false),
805 (rel_path("a"), false),
806 (rel_path("a/a.js"), false),
807 (rel_path("b"), false),
808 (rel_path("b/b.js"), false),
809 (rel_path("node_modules"), true),
810 (rel_path("node_modules/c"), true),
811 (rel_path("node_modules/d"), true),
812 (rel_path("node_modules/d/d.js"), true),
813 (rel_path("node_modules/d/e"), true),
814 (rel_path("node_modules/d/f"), true),
815 ]
816 );
817 });
818 let read_dir_count_2 = fs.read_dir_call_count();
819 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
820
821 // Update the gitignore so that node_modules is no longer ignored,
822 // but a subdirectory is ignored
823 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
824 .await
825 .unwrap();
826 cx.executor().run_until_parked();
827
828 // All of the directories that are no longer ignored are now loaded.
829 tree.read_with(cx, |tree, _| {
830 assert_eq!(
831 tree.entries(true, 0)
832 .map(|e| (e.path.as_ref(), e.is_ignored))
833 .collect::<Vec<_>>(),
834 &[
835 (rel_path(""), false),
836 (rel_path(".gitignore"), false),
837 (rel_path("a"), false),
838 (rel_path("a/a.js"), false),
839 (rel_path("b"), false),
840 (rel_path("b/b.js"), false),
841 // This directory is no longer ignored
842 (rel_path("node_modules"), false),
843 (rel_path("node_modules/c"), false),
844 (rel_path("node_modules/c/c.js"), false),
845 (rel_path("node_modules/d"), false),
846 (rel_path("node_modules/d/d.js"), false),
847 // This subdirectory is now ignored
848 (rel_path("node_modules/d/e"), true),
849 (rel_path("node_modules/d/f"), false),
850 (rel_path("node_modules/d/f/f1.js"), false),
851 (rel_path("node_modules/d/f/f2.js"), false),
852 ]
853 );
854 });
855
856 // Each of the newly-loaded directories is scanned only once.
857 let read_dir_count_3 = fs.read_dir_call_count();
858 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
859}
860
861#[gpui::test]
862async fn test_write_file(cx: &mut TestAppContext) {
863 init_test(cx);
864 cx.executor().allow_parking();
865 let dir = TempTree::new(json!({
866 ".git": {},
867 ".gitignore": "ignored-dir\n",
868 "tracked-dir": {},
869 "ignored-dir": {}
870 }));
871
872 let worktree = Worktree::local(
873 dir.path(),
874 true,
875 Arc::new(RealFs::new(None, cx.executor())),
876 Default::default(),
877 true,
878 WorktreeId::from_proto(0),
879 &mut cx.to_async(),
880 )
881 .await
882 .unwrap();
883
884 #[cfg(not(target_os = "macos"))]
885 fs::fs_watcher::global(|_| {}).unwrap();
886
887 cx.read(|cx| worktree.read(cx).as_local().unwrap().scan_complete())
888 .await;
889 worktree.flush_fs_events(cx).await;
890
891 worktree
892 .update(cx, |tree, cx| {
893 tree.write_file(
894 rel_path("tracked-dir/file.txt").into(),
895 "hello".into(),
896 Default::default(),
897 encoding_rs::UTF_8,
898 false,
899 cx,
900 )
901 })
902 .await
903 .unwrap();
904 worktree
905 .update(cx, |tree, cx| {
906 tree.write_file(
907 rel_path("ignored-dir/file.txt").into(),
908 "world".into(),
909 Default::default(),
910 encoding_rs::UTF_8,
911 false,
912 cx,
913 )
914 })
915 .await
916 .unwrap();
917 worktree.read_with(cx, |tree, _| {
918 let tracked = tree
919 .entry_for_path(rel_path("tracked-dir/file.txt"))
920 .unwrap();
921 let ignored = tree
922 .entry_for_path(rel_path("ignored-dir/file.txt"))
923 .unwrap();
924 assert!(!tracked.is_ignored);
925 assert!(ignored.is_ignored);
926 });
927}
928
929#[gpui::test]
930async fn test_file_scan_inclusions(cx: &mut TestAppContext) {
931 init_test(cx);
932 cx.executor().allow_parking();
933 let dir = TempTree::new(json!({
934 ".gitignore": "**/target\n/node_modules\ntop_level.txt\n",
935 "target": {
936 "index": "blah2"
937 },
938 "node_modules": {
939 ".DS_Store": "",
940 "prettier": {
941 "package.json": "{}",
942 },
943 "package.json": "//package.json"
944 },
945 "src": {
946 ".DS_Store": "",
947 "foo": {
948 "foo.rs": "mod another;\n",
949 "another.rs": "// another",
950 },
951 "bar": {
952 "bar.rs": "// bar",
953 },
954 "lib.rs": "mod foo;\nmod bar;\n",
955 },
956 "top_level.txt": "top level file",
957 ".DS_Store": "",
958 }));
959 cx.update(|cx| {
960 cx.update_global::<SettingsStore, _>(|store, cx| {
961 store.update_user_settings(cx, |settings| {
962 settings.project.worktree.file_scan_exclusions = Some(vec![]);
963 settings.project.worktree.file_scan_inclusions = Some(vec![
964 "node_modules/**/package.json".to_string(),
965 "**/.DS_Store".to_string(),
966 ]);
967 });
968 });
969 });
970
971 let tree = Worktree::local(
972 dir.path(),
973 true,
974 Arc::new(RealFs::new(None, cx.executor())),
975 Default::default(),
976 true,
977 WorktreeId::from_proto(0),
978 &mut cx.to_async(),
979 )
980 .await
981 .unwrap();
982 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
983 .await;
984 tree.flush_fs_events(cx).await;
985 tree.read_with(cx, |tree, _| {
986 // Assert that file_scan_inclusions overrides file_scan_exclusions.
987 check_worktree_entries(
988 tree,
989 &[],
990 &["target", "node_modules"],
991 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
992 &[
993 "node_modules/prettier/package.json",
994 ".DS_Store",
995 "node_modules/.DS_Store",
996 "src/.DS_Store",
997 ],
998 )
999 });
1000}
1001
1002#[gpui::test]
1003async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext) {
1004 init_test(cx);
1005 cx.executor().allow_parking();
1006 let dir = TempTree::new(json!({
1007 ".gitignore": "**/target\n/node_modules\n",
1008 "target": {
1009 "index": "blah2"
1010 },
1011 "node_modules": {
1012 ".DS_Store": "",
1013 "prettier": {
1014 "package.json": "{}",
1015 },
1016 },
1017 "src": {
1018 ".DS_Store": "",
1019 "foo": {
1020 "foo.rs": "mod another;\n",
1021 "another.rs": "// another",
1022 },
1023 },
1024 ".DS_Store": "",
1025 }));
1026
1027 cx.update(|cx| {
1028 cx.update_global::<SettingsStore, _>(|store, cx| {
1029 store.update_user_settings(cx, |settings| {
1030 settings.project.worktree.file_scan_exclusions =
1031 Some(vec!["**/.DS_Store".to_string()]);
1032 settings.project.worktree.file_scan_inclusions =
1033 Some(vec!["**/.DS_Store".to_string()]);
1034 });
1035 });
1036 });
1037
1038 let tree = Worktree::local(
1039 dir.path(),
1040 true,
1041 Arc::new(RealFs::new(None, cx.executor())),
1042 Default::default(),
1043 true,
1044 WorktreeId::from_proto(0),
1045 &mut cx.to_async(),
1046 )
1047 .await
1048 .unwrap();
1049 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1050 .await;
1051 tree.flush_fs_events(cx).await;
1052 tree.read_with(cx, |tree, _| {
1053 // Assert that file_scan_inclusions overrides file_scan_exclusions.
1054 check_worktree_entries(
1055 tree,
1056 &[".DS_Store, src/.DS_Store"],
1057 &["target", "node_modules"],
1058 &["src/foo/another.rs", "src/foo/foo.rs", ".gitignore"],
1059 &[],
1060 )
1061 });
1062}
1063
1064#[gpui::test]
1065async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppContext) {
1066 init_test(cx);
1067 cx.executor().allow_parking();
1068 let dir = TempTree::new(json!({
1069 ".gitignore": "**/target\n/node_modules/\n",
1070 "target": {
1071 "index": "blah2"
1072 },
1073 "node_modules": {
1074 ".DS_Store": "",
1075 "prettier": {
1076 "package.json": "{}",
1077 },
1078 },
1079 "src": {
1080 ".DS_Store": "",
1081 "foo": {
1082 "foo.rs": "mod another;\n",
1083 "another.rs": "// another",
1084 },
1085 },
1086 ".DS_Store": "",
1087 }));
1088
1089 cx.update(|cx| {
1090 cx.update_global::<SettingsStore, _>(|store, cx| {
1091 store.update_user_settings(cx, |settings| {
1092 settings.project.worktree.file_scan_exclusions = Some(vec![]);
1093 settings.project.worktree.file_scan_inclusions =
1094 Some(vec!["node_modules/**".to_string()]);
1095 });
1096 });
1097 });
1098 let tree = Worktree::local(
1099 dir.path(),
1100 true,
1101 Arc::new(RealFs::new(None, cx.executor())),
1102 Default::default(),
1103 true,
1104 WorktreeId::from_proto(0),
1105 &mut cx.to_async(),
1106 )
1107 .await
1108 .unwrap();
1109 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1110 .await;
1111 tree.flush_fs_events(cx).await;
1112
1113 tree.read_with(cx, |tree, _| {
1114 assert!(
1115 tree.entry_for_path(rel_path("node_modules"))
1116 .is_some_and(|f| f.is_always_included)
1117 );
1118 assert!(
1119 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
1120 .is_some_and(|f| f.is_always_included)
1121 );
1122 });
1123
1124 cx.update(|cx| {
1125 cx.update_global::<SettingsStore, _>(|store, cx| {
1126 store.update_user_settings(cx, |settings| {
1127 settings.project.worktree.file_scan_exclusions = Some(vec![]);
1128 settings.project.worktree.file_scan_inclusions = Some(vec![]);
1129 });
1130 });
1131 });
1132 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1133 .await;
1134 tree.flush_fs_events(cx).await;
1135
1136 tree.read_with(cx, |tree, _| {
1137 assert!(
1138 tree.entry_for_path(rel_path("node_modules"))
1139 .is_some_and(|f| !f.is_always_included)
1140 );
1141 assert!(
1142 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
1143 .is_some_and(|f| !f.is_always_included)
1144 );
1145 });
1146}
1147
1148#[gpui::test]
1149async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
1150 init_test(cx);
1151 cx.executor().allow_parking();
1152 let dir = TempTree::new(json!({
1153 ".gitignore": "**/target\n/node_modules\n",
1154 "target": {
1155 "index": "blah2"
1156 },
1157 "node_modules": {
1158 ".DS_Store": "",
1159 "prettier": {
1160 "package.json": "{}",
1161 },
1162 },
1163 "src": {
1164 ".DS_Store": "",
1165 "foo": {
1166 "foo.rs": "mod another;\n",
1167 "another.rs": "// another",
1168 },
1169 "bar": {
1170 "bar.rs": "// bar",
1171 },
1172 "lib.rs": "mod foo;\nmod bar;\n",
1173 },
1174 ".DS_Store": "",
1175 }));
1176 cx.update(|cx| {
1177 cx.update_global::<SettingsStore, _>(|store, cx| {
1178 store.update_user_settings(cx, |settings| {
1179 settings.project.worktree.file_scan_exclusions =
1180 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
1181 });
1182 });
1183 });
1184
1185 let tree = Worktree::local(
1186 dir.path(),
1187 true,
1188 Arc::new(RealFs::new(None, cx.executor())),
1189 Default::default(),
1190 true,
1191 WorktreeId::from_proto(0),
1192 &mut cx.to_async(),
1193 )
1194 .await
1195 .unwrap();
1196 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1197 .await;
1198 tree.flush_fs_events(cx).await;
1199 tree.read_with(cx, |tree, _| {
1200 check_worktree_entries(
1201 tree,
1202 &[
1203 "src/foo/foo.rs",
1204 "src/foo/another.rs",
1205 "node_modules/.DS_Store",
1206 "src/.DS_Store",
1207 ".DS_Store",
1208 ],
1209 &["target", "node_modules"],
1210 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
1211 &[],
1212 )
1213 });
1214
1215 cx.update(|cx| {
1216 cx.update_global::<SettingsStore, _>(|store, cx| {
1217 store.update_user_settings(cx, |settings| {
1218 settings.project.worktree.file_scan_exclusions =
1219 Some(vec!["**/node_modules/**".to_string()]);
1220 });
1221 });
1222 });
1223 tree.flush_fs_events(cx).await;
1224 cx.executor().run_until_parked();
1225 tree.read_with(cx, |tree, _| {
1226 check_worktree_entries(
1227 tree,
1228 &[
1229 "node_modules/prettier/package.json",
1230 "node_modules/.DS_Store",
1231 "node_modules",
1232 ],
1233 &["target"],
1234 &[
1235 ".gitignore",
1236 "src/lib.rs",
1237 "src/bar/bar.rs",
1238 "src/foo/foo.rs",
1239 "src/foo/another.rs",
1240 "src/.DS_Store",
1241 ".DS_Store",
1242 ],
1243 &[],
1244 )
1245 });
1246}
1247
1248#[gpui::test]
1249async fn test_hidden_files(cx: &mut TestAppContext) {
1250 init_test(cx);
1251 cx.executor().allow_parking();
1252 let dir = TempTree::new(json!({
1253 ".gitignore": "**/target\n",
1254 ".hidden_file": "content",
1255 ".hidden_dir": {
1256 "nested.rs": "code",
1257 },
1258 "src": {
1259 "visible.rs": "code",
1260 },
1261 "logs": {
1262 "app.log": "logs",
1263 "debug.log": "logs",
1264 },
1265 "visible.txt": "content",
1266 }));
1267
1268 let tree = Worktree::local(
1269 dir.path(),
1270 true,
1271 Arc::new(RealFs::new(None, cx.executor())),
1272 Default::default(),
1273 true,
1274 WorktreeId::from_proto(0),
1275 &mut cx.to_async(),
1276 )
1277 .await
1278 .unwrap();
1279 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1280 .await;
1281 tree.flush_fs_events(cx).await;
1282
1283 tree.read_with(cx, |tree, _| {
1284 assert_eq!(
1285 tree.entries(true, 0)
1286 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1287 .collect::<Vec<_>>(),
1288 vec![
1289 (rel_path(""), false),
1290 (rel_path(".gitignore"), true),
1291 (rel_path(".hidden_dir"), true),
1292 (rel_path(".hidden_dir/nested.rs"), true),
1293 (rel_path(".hidden_file"), true),
1294 (rel_path("logs"), false),
1295 (rel_path("logs/app.log"), false),
1296 (rel_path("logs/debug.log"), false),
1297 (rel_path("src"), false),
1298 (rel_path("src/visible.rs"), false),
1299 (rel_path("visible.txt"), false),
1300 ]
1301 );
1302 });
1303
1304 cx.update(|cx| {
1305 cx.update_global::<SettingsStore, _>(|store, cx| {
1306 store.update_user_settings(cx, |settings| {
1307 settings.project.worktree.hidden_files = Some(vec!["**/*.log".to_string()]);
1308 });
1309 });
1310 });
1311 tree.flush_fs_events(cx).await;
1312 cx.executor().run_until_parked();
1313
1314 tree.read_with(cx, |tree, _| {
1315 assert_eq!(
1316 tree.entries(true, 0)
1317 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1318 .collect::<Vec<_>>(),
1319 vec![
1320 (rel_path(""), false),
1321 (rel_path(".gitignore"), false),
1322 (rel_path(".hidden_dir"), false),
1323 (rel_path(".hidden_dir/nested.rs"), false),
1324 (rel_path(".hidden_file"), false),
1325 (rel_path("logs"), false),
1326 (rel_path("logs/app.log"), true),
1327 (rel_path("logs/debug.log"), true),
1328 (rel_path("src"), false),
1329 (rel_path("src/visible.rs"), false),
1330 (rel_path("visible.txt"), false),
1331 ]
1332 );
1333 });
1334}
1335
1336#[gpui::test]
1337async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
1338 init_test(cx);
1339 cx.executor().allow_parking();
1340 let dir = TempTree::new(json!({
1341 ".git": {
1342 "HEAD": "ref: refs/heads/main\n",
1343 "foo": "bar",
1344 },
1345 ".gitignore": "**/target\n/node_modules\ntest_output\n",
1346 "target": {
1347 "index": "blah2"
1348 },
1349 "node_modules": {
1350 ".DS_Store": "",
1351 "prettier": {
1352 "package.json": "{}",
1353 },
1354 },
1355 "src": {
1356 ".DS_Store": "",
1357 "foo": {
1358 "foo.rs": "mod another;\n",
1359 "another.rs": "// another",
1360 },
1361 "bar": {
1362 "bar.rs": "// bar",
1363 },
1364 "lib.rs": "mod foo;\nmod bar;\n",
1365 },
1366 ".DS_Store": "",
1367 }));
1368 cx.update(|cx| {
1369 cx.update_global::<SettingsStore, _>(|store, cx| {
1370 store.update_user_settings(cx, |settings| {
1371 settings.project.worktree.file_scan_exclusions = Some(vec![
1372 "**/.git".to_string(),
1373 "node_modules/".to_string(),
1374 "build_output".to_string(),
1375 ]);
1376 });
1377 });
1378 });
1379
1380 let tree = Worktree::local(
1381 dir.path(),
1382 true,
1383 Arc::new(RealFs::new(None, cx.executor())),
1384 Default::default(),
1385 true,
1386 WorktreeId::from_proto(0),
1387 &mut cx.to_async(),
1388 )
1389 .await
1390 .unwrap();
1391 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1392 .await;
1393 tree.flush_fs_events(cx).await;
1394 tree.read_with(cx, |tree, _| {
1395 check_worktree_entries(
1396 tree,
1397 &[
1398 ".git/HEAD",
1399 ".git/foo",
1400 "node_modules",
1401 "node_modules/.DS_Store",
1402 "node_modules/prettier",
1403 "node_modules/prettier/package.json",
1404 ],
1405 &["target"],
1406 &[
1407 ".DS_Store",
1408 "src/.DS_Store",
1409 "src/lib.rs",
1410 "src/foo/foo.rs",
1411 "src/foo/another.rs",
1412 "src/bar/bar.rs",
1413 ".gitignore",
1414 ],
1415 &[],
1416 )
1417 });
1418
1419 let new_excluded_dir = dir.path().join("build_output");
1420 let new_ignored_dir = dir.path().join("test_output");
1421 std::fs::create_dir_all(&new_excluded_dir)
1422 .unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
1423 std::fs::create_dir_all(&new_ignored_dir)
1424 .unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
1425 let node_modules_dir = dir.path().join("node_modules");
1426 let dot_git_dir = dir.path().join(".git");
1427 let src_dir = dir.path().join("src");
1428 for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
1429 assert!(
1430 existing_dir.is_dir(),
1431 "Expect {existing_dir:?} to be present in the FS already"
1432 );
1433 }
1434
1435 for directory_for_new_file in [
1436 new_excluded_dir,
1437 new_ignored_dir,
1438 node_modules_dir,
1439 dot_git_dir,
1440 src_dir,
1441 ] {
1442 std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
1443 .unwrap_or_else(|e| {
1444 panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
1445 });
1446 }
1447 tree.flush_fs_events(cx).await;
1448
1449 tree.read_with(cx, |tree, _| {
1450 check_worktree_entries(
1451 tree,
1452 &[
1453 ".git/HEAD",
1454 ".git/foo",
1455 ".git/new_file",
1456 "node_modules",
1457 "node_modules/.DS_Store",
1458 "node_modules/prettier",
1459 "node_modules/prettier/package.json",
1460 "node_modules/new_file",
1461 "build_output",
1462 "build_output/new_file",
1463 "test_output/new_file",
1464 ],
1465 &["target", "test_output"],
1466 &[
1467 ".DS_Store",
1468 "src/.DS_Store",
1469 "src/lib.rs",
1470 "src/foo/foo.rs",
1471 "src/foo/another.rs",
1472 "src/bar/bar.rs",
1473 "src/new_file",
1474 ".gitignore",
1475 ],
1476 &[],
1477 )
1478 });
1479}
1480
1481#[gpui::test]
1482async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
1483 init_test(cx);
1484 cx.executor().allow_parking();
1485 let dir = TempTree::new(json!({
1486 ".git": {
1487 "HEAD": "ref: refs/heads/main\n",
1488 "foo": "foo contents",
1489 },
1490 }));
1491 let dot_git_worktree_dir = dir.path().join(".git");
1492
1493 let tree = Worktree::local(
1494 dot_git_worktree_dir.clone(),
1495 true,
1496 Arc::new(RealFs::new(None, cx.executor())),
1497 Default::default(),
1498 true,
1499 WorktreeId::from_proto(0),
1500 &mut cx.to_async(),
1501 )
1502 .await
1503 .unwrap();
1504 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1505 .await;
1506 tree.flush_fs_events(cx).await;
1507 tree.read_with(cx, |tree, _| {
1508 check_worktree_entries(tree, &[], &["HEAD", "foo"], &[], &[])
1509 });
1510
1511 std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents")
1512 .unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}"));
1513 tree.flush_fs_events(cx).await;
1514 tree.read_with(cx, |tree, _| {
1515 check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[], &[])
1516 });
1517}
1518
1519#[gpui::test(iterations = 30)]
1520async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
1521 init_test(cx);
1522 let fs = FakeFs::new(cx.background_executor.clone());
1523 fs.insert_tree(
1524 "/root",
1525 json!({
1526 "b": {},
1527 "c": {},
1528 "d": {},
1529 }),
1530 )
1531 .await;
1532
1533 let tree = Worktree::local(
1534 "/root".as_ref(),
1535 true,
1536 fs,
1537 Default::default(),
1538 true,
1539 WorktreeId::from_proto(0),
1540 &mut cx.to_async(),
1541 )
1542 .await
1543 .unwrap();
1544
1545 let snapshot1 = tree.update(cx, |tree, cx| {
1546 let tree = tree.as_local_mut().unwrap();
1547 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1548 tree.observe_updates(0, cx, {
1549 let snapshot = snapshot.clone();
1550 let settings = tree.settings();
1551 move |update| {
1552 snapshot
1553 .lock()
1554 .apply_remote_update(update, &settings.file_scan_inclusions);
1555 async { true }
1556 }
1557 });
1558 snapshot
1559 });
1560
1561 let entry = tree
1562 .update(cx, |tree, cx| {
1563 tree.as_local_mut()
1564 .unwrap()
1565 .create_entry(rel_path("a/e").into(), true, None, cx)
1566 })
1567 .await
1568 .unwrap()
1569 .into_included()
1570 .unwrap();
1571 assert!(entry.is_dir());
1572
1573 cx.executor().run_until_parked();
1574 tree.read_with(cx, |tree, _| {
1575 assert_eq!(
1576 tree.entry_for_path(rel_path("a/e")).unwrap().kind,
1577 EntryKind::Dir
1578 );
1579 });
1580
1581 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1582 assert_eq!(
1583 snapshot1.lock().entries(true, 0).collect::<Vec<_>>(),
1584 snapshot2.entries(true, 0).collect::<Vec<_>>()
1585 );
1586}
1587
1588#[gpui::test]
1589async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1590 init_test(cx);
1591 cx.executor().allow_parking();
1592
1593 let fs_fake = FakeFs::new(cx.background_executor.clone());
1594 fs_fake
1595 .insert_tree(
1596 "/root",
1597 json!({
1598 "a": {},
1599 }),
1600 )
1601 .await;
1602
1603 let tree_fake = Worktree::local(
1604 "/root".as_ref(),
1605 true,
1606 fs_fake,
1607 Default::default(),
1608 true,
1609 WorktreeId::from_proto(0),
1610 &mut cx.to_async(),
1611 )
1612 .await
1613 .unwrap();
1614
1615 let entry = tree_fake
1616 .update(cx, |tree, cx| {
1617 tree.as_local_mut().unwrap().create_entry(
1618 rel_path("a/b/c/d.txt").into(),
1619 false,
1620 None,
1621 cx,
1622 )
1623 })
1624 .await
1625 .unwrap()
1626 .into_included()
1627 .unwrap();
1628 assert!(entry.is_file());
1629
1630 cx.executor().run_until_parked();
1631 tree_fake.read_with(cx, |tree, _| {
1632 assert!(
1633 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1634 .unwrap()
1635 .is_file()
1636 );
1637 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1638 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1639 });
1640
1641 let fs_real = Arc::new(RealFs::new(None, cx.executor()));
1642 let temp_root = TempTree::new(json!({
1643 "a": {}
1644 }));
1645
1646 let tree_real = Worktree::local(
1647 temp_root.path(),
1648 true,
1649 fs_real,
1650 Default::default(),
1651 true,
1652 WorktreeId::from_proto(0),
1653 &mut cx.to_async(),
1654 )
1655 .await
1656 .unwrap();
1657
1658 let entry = tree_real
1659 .update(cx, |tree, cx| {
1660 tree.as_local_mut().unwrap().create_entry(
1661 rel_path("a/b/c/d.txt").into(),
1662 false,
1663 None,
1664 cx,
1665 )
1666 })
1667 .await
1668 .unwrap()
1669 .into_included()
1670 .unwrap();
1671 assert!(entry.is_file());
1672
1673 cx.executor().run_until_parked();
1674 tree_real.read_with(cx, |tree, _| {
1675 assert!(
1676 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1677 .unwrap()
1678 .is_file()
1679 );
1680 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1681 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1682 });
1683
1684 // Test smallest change
1685 let entry = tree_real
1686 .update(cx, |tree, cx| {
1687 tree.as_local_mut().unwrap().create_entry(
1688 rel_path("a/b/c/e.txt").into(),
1689 false,
1690 None,
1691 cx,
1692 )
1693 })
1694 .await
1695 .unwrap()
1696 .into_included()
1697 .unwrap();
1698 assert!(entry.is_file());
1699
1700 cx.executor().run_until_parked();
1701 tree_real.read_with(cx, |tree, _| {
1702 assert!(
1703 tree.entry_for_path(rel_path("a/b/c/e.txt"))
1704 .unwrap()
1705 .is_file()
1706 );
1707 });
1708
1709 // Test largest change
1710 let entry = tree_real
1711 .update(cx, |tree, cx| {
1712 tree.as_local_mut().unwrap().create_entry(
1713 rel_path("d/e/f/g.txt").into(),
1714 false,
1715 None,
1716 cx,
1717 )
1718 })
1719 .await
1720 .unwrap()
1721 .into_included()
1722 .unwrap();
1723 assert!(entry.is_file());
1724
1725 cx.executor().run_until_parked();
1726 tree_real.read_with(cx, |tree, _| {
1727 assert!(
1728 tree.entry_for_path(rel_path("d/e/f/g.txt"))
1729 .unwrap()
1730 .is_file()
1731 );
1732 assert!(tree.entry_for_path(rel_path("d/e/f")).unwrap().is_dir());
1733 assert!(tree.entry_for_path(rel_path("d/e")).unwrap().is_dir());
1734 assert!(tree.entry_for_path(rel_path("d")).unwrap().is_dir());
1735 });
1736}
1737
1738#[gpui::test]
1739async fn test_create_file_in_expanded_gitignored_dir(cx: &mut TestAppContext) {
1740 // Tests the behavior of our worktree refresh when a file in a gitignored directory
1741 // is created.
1742 init_test(cx);
1743 let fs = FakeFs::new(cx.background_executor.clone());
1744 fs.insert_tree(
1745 "/root",
1746 json!({
1747 ".gitignore": "ignored_dir\n",
1748 "ignored_dir": {
1749 "existing_file.txt": "existing content",
1750 "another_file.txt": "another content",
1751 },
1752 }),
1753 )
1754 .await;
1755
1756 let tree = Worktree::local(
1757 Path::new("/root"),
1758 true,
1759 fs.clone(),
1760 Default::default(),
1761 true,
1762 WorktreeId::from_proto(0),
1763 &mut cx.to_async(),
1764 )
1765 .await
1766 .unwrap();
1767
1768 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1769 .await;
1770
1771 tree.read_with(cx, |tree, _| {
1772 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1773 assert!(ignored_dir.is_ignored);
1774 assert_eq!(ignored_dir.kind, EntryKind::UnloadedDir);
1775 });
1776
1777 tree.update(cx, |tree, cx| {
1778 tree.load_file(rel_path("ignored_dir/existing_file.txt"), cx)
1779 })
1780 .await
1781 .unwrap();
1782
1783 tree.read_with(cx, |tree, _| {
1784 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1785 assert!(ignored_dir.is_ignored);
1786 assert_eq!(ignored_dir.kind, EntryKind::Dir);
1787
1788 assert!(
1789 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
1790 .is_some()
1791 );
1792 assert!(
1793 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
1794 .is_some()
1795 );
1796 });
1797
1798 let entry = tree
1799 .update(cx, |tree, cx| {
1800 tree.create_entry(rel_path("ignored_dir/new_file.txt").into(), false, None, cx)
1801 })
1802 .await
1803 .unwrap();
1804 assert!(entry.into_included().is_some());
1805
1806 cx.executor().run_until_parked();
1807
1808 tree.read_with(cx, |tree, _| {
1809 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1810 assert!(ignored_dir.is_ignored);
1811 assert_eq!(
1812 ignored_dir.kind,
1813 EntryKind::Dir,
1814 "ignored_dir should still be loaded, not UnloadedDir"
1815 );
1816
1817 assert!(
1818 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
1819 .is_some(),
1820 "existing_file.txt should still be visible"
1821 );
1822 assert!(
1823 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
1824 .is_some(),
1825 "another_file.txt should still be visible"
1826 );
1827 assert!(
1828 tree.entry_for_path(rel_path("ignored_dir/new_file.txt"))
1829 .is_some(),
1830 "new_file.txt should be visible"
1831 );
1832 });
1833}
1834
1835#[gpui::test]
1836async fn test_fs_event_for_gitignored_dir_does_not_lose_contents(cx: &mut TestAppContext) {
1837 // Tests the behavior of our worktree refresh when a directory modification for a gitignored directory
1838 // is triggered.
1839 init_test(cx);
1840 let fs = FakeFs::new(cx.background_executor.clone());
1841 fs.insert_tree(
1842 "/root",
1843 json!({
1844 ".gitignore": "ignored_dir\n",
1845 "ignored_dir": {
1846 "file1.txt": "content1",
1847 "file2.txt": "content2",
1848 },
1849 }),
1850 )
1851 .await;
1852
1853 let tree = Worktree::local(
1854 Path::new("/root"),
1855 true,
1856 fs.clone(),
1857 Default::default(),
1858 true,
1859 WorktreeId::from_proto(0),
1860 &mut cx.to_async(),
1861 )
1862 .await
1863 .unwrap();
1864
1865 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1866 .await;
1867
1868 // Load a file to expand the ignored directory
1869 tree.update(cx, |tree, cx| {
1870 tree.load_file(rel_path("ignored_dir/file1.txt"), cx)
1871 })
1872 .await
1873 .unwrap();
1874
1875 tree.read_with(cx, |tree, _| {
1876 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1877 assert_eq!(ignored_dir.kind, EntryKind::Dir);
1878 assert!(
1879 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
1880 .is_some()
1881 );
1882 assert!(
1883 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
1884 .is_some()
1885 );
1886 });
1887
1888 fs.emit_fs_event("/root/ignored_dir", Some(fs::PathEventKind::Changed));
1889 tree.flush_fs_events(cx).await;
1890
1891 tree.read_with(cx, |tree, _| {
1892 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1893 assert_eq!(
1894 ignored_dir.kind,
1895 EntryKind::Dir,
1896 "ignored_dir should still be loaded (Dir), not UnloadedDir"
1897 );
1898 assert!(
1899 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
1900 .is_some(),
1901 "file1.txt should still be visible after directory fs event"
1902 );
1903 assert!(
1904 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
1905 .is_some(),
1906 "file2.txt should still be visible after directory fs event"
1907 );
1908 });
1909}
1910
1911#[gpui::test(iterations = 100)]
1912async fn test_random_worktree_operations_during_initial_scan(
1913 cx: &mut TestAppContext,
1914 mut rng: StdRng,
1915) {
1916 init_test(cx);
1917 let operations = env::var("OPERATIONS")
1918 .map(|o| o.parse().unwrap())
1919 .unwrap_or(5);
1920 let initial_entries = env::var("INITIAL_ENTRIES")
1921 .map(|o| o.parse().unwrap())
1922 .unwrap_or(20);
1923
1924 let root_dir = Path::new(path!("/test"));
1925 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1926 fs.as_fake().insert_tree(root_dir, json!({})).await;
1927 for _ in 0..initial_entries {
1928 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1929 }
1930 log::info!("generated initial tree");
1931
1932 let worktree = Worktree::local(
1933 root_dir,
1934 true,
1935 fs.clone(),
1936 Default::default(),
1937 true,
1938 WorktreeId::from_proto(0),
1939 &mut cx.to_async(),
1940 )
1941 .await
1942 .unwrap();
1943
1944 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1945 let updates = Arc::new(Mutex::new(Vec::new()));
1946 worktree.update(cx, |tree, cx| {
1947 check_worktree_change_events(tree, cx);
1948
1949 tree.as_local_mut().unwrap().observe_updates(0, cx, {
1950 let updates = updates.clone();
1951 move |update| {
1952 updates.lock().push(update);
1953 async { true }
1954 }
1955 });
1956 });
1957
1958 for _ in 0..operations {
1959 worktree
1960 .update(cx, |worktree, cx| {
1961 randomly_mutate_worktree(worktree, &mut rng, cx)
1962 })
1963 .await
1964 .log_err();
1965 worktree.read_with(cx, |tree, _| {
1966 tree.as_local().unwrap().snapshot().check_invariants(true)
1967 });
1968
1969 if rng.random_bool(0.6) {
1970 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1971 }
1972 }
1973
1974 worktree
1975 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1976 .await;
1977
1978 cx.executor().run_until_parked();
1979
1980 let final_snapshot = worktree.read_with(cx, |tree, _| {
1981 let tree = tree.as_local().unwrap();
1982 let snapshot = tree.snapshot();
1983 snapshot.check_invariants(true);
1984 snapshot
1985 });
1986
1987 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
1988
1989 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1990 let mut updated_snapshot = snapshot.clone();
1991 for update in updates.lock().iter() {
1992 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1993 updated_snapshot
1994 .apply_remote_update(update.clone(), &settings.file_scan_inclusions);
1995 }
1996 }
1997
1998 assert_eq!(
1999 updated_snapshot.entries(true, 0).collect::<Vec<_>>(),
2000 final_snapshot.entries(true, 0).collect::<Vec<_>>(),
2001 "wrong updates after snapshot {i}: {updates:#?}",
2002 );
2003 }
2004}
2005
2006#[gpui::test(iterations = 100)]
2007async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
2008 init_test(cx);
2009 let operations = env::var("OPERATIONS")
2010 .map(|o| o.parse().unwrap())
2011 .unwrap_or(40);
2012 let initial_entries = env::var("INITIAL_ENTRIES")
2013 .map(|o| o.parse().unwrap())
2014 .unwrap_or(20);
2015
2016 let root_dir = Path::new(path!("/test"));
2017 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
2018 fs.as_fake().insert_tree(root_dir, json!({})).await;
2019 for _ in 0..initial_entries {
2020 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
2021 }
2022 log::info!("generated initial tree");
2023
2024 let worktree = Worktree::local(
2025 root_dir,
2026 true,
2027 fs.clone(),
2028 Default::default(),
2029 true,
2030 WorktreeId::from_proto(0),
2031 &mut cx.to_async(),
2032 )
2033 .await
2034 .unwrap();
2035
2036 let updates = Arc::new(Mutex::new(Vec::new()));
2037 worktree.update(cx, |tree, cx| {
2038 check_worktree_change_events(tree, cx);
2039
2040 tree.as_local_mut().unwrap().observe_updates(0, cx, {
2041 let updates = updates.clone();
2042 move |update| {
2043 updates.lock().push(update);
2044 async { true }
2045 }
2046 });
2047 });
2048
2049 worktree
2050 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
2051 .await;
2052
2053 fs.as_fake().pause_events();
2054 let mut snapshots = Vec::new();
2055 let mut mutations_len = operations;
2056 while mutations_len > 1 {
2057 if rng.random_bool(0.2) {
2058 worktree
2059 .update(cx, |worktree, cx| {
2060 randomly_mutate_worktree(worktree, &mut rng, cx)
2061 })
2062 .await
2063 .log_err();
2064 } else {
2065 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
2066 }
2067
2068 let buffered_event_count = fs.as_fake().buffered_event_count();
2069 if buffered_event_count > 0 && rng.random_bool(0.3) {
2070 let len = rng.random_range(0..=buffered_event_count);
2071 log::info!("flushing {} events", len);
2072 fs.as_fake().flush_events(len);
2073 } else {
2074 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
2075 mutations_len -= 1;
2076 }
2077
2078 cx.executor().run_until_parked();
2079 if rng.random_bool(0.2) {
2080 log::info!("storing snapshot {}", snapshots.len());
2081 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2082 snapshots.push(snapshot);
2083 }
2084 }
2085
2086 log::info!("quiescing");
2087 fs.as_fake().flush_events(usize::MAX);
2088 cx.executor().run_until_parked();
2089
2090 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2091 snapshot.check_invariants(true);
2092 let expanded_paths = snapshot
2093 .expanded_entries()
2094 .map(|e| e.path.clone())
2095 .collect::<Vec<_>>();
2096
2097 {
2098 let new_worktree = Worktree::local(
2099 root_dir,
2100 true,
2101 fs.clone(),
2102 Default::default(),
2103 true,
2104 WorktreeId::from_proto(0),
2105 &mut cx.to_async(),
2106 )
2107 .await
2108 .unwrap();
2109 new_worktree
2110 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
2111 .await;
2112 new_worktree
2113 .update(cx, |tree, _| {
2114 tree.as_local_mut()
2115 .unwrap()
2116 .refresh_entries_for_paths(expanded_paths)
2117 })
2118 .recv()
2119 .await;
2120 let new_snapshot =
2121 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2122 assert_eq!(
2123 snapshot.entries_without_ids(true),
2124 new_snapshot.entries_without_ids(true)
2125 );
2126 }
2127
2128 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
2129
2130 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
2131 for update in updates.lock().iter() {
2132 if update.scan_id >= prev_snapshot.scan_id() as u64 {
2133 prev_snapshot.apply_remote_update(update.clone(), &settings.file_scan_inclusions);
2134 }
2135 }
2136
2137 assert_eq!(
2138 prev_snapshot
2139 .entries(true, 0)
2140 .map(ignore_pending_dir)
2141 .collect::<Vec<_>>(),
2142 snapshot
2143 .entries(true, 0)
2144 .map(ignore_pending_dir)
2145 .collect::<Vec<_>>(),
2146 "wrong updates after snapshot {i}: {updates:#?}",
2147 );
2148 }
2149
2150 fn ignore_pending_dir(entry: &Entry) -> Entry {
2151 let mut entry = entry.clone();
2152 if entry.kind.is_dir() {
2153 entry.kind = EntryKind::Dir
2154 }
2155 entry
2156 }
2157}
2158
2159// The worktree's `UpdatedEntries` event can be used to follow along with
2160// all changes to the worktree's snapshot.
2161fn check_worktree_change_events(tree: &mut Worktree, cx: &mut Context<Worktree>) {
2162 let mut entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
2163 cx.subscribe(&cx.entity(), move |tree, _, event, _| {
2164 if let Event::UpdatedEntries(changes) = event {
2165 for (path, _, change_type) in changes.iter() {
2166 let entry = tree.entry_for_path(path).cloned();
2167 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
2168 Ok(ix) | Err(ix) => ix,
2169 };
2170 match change_type {
2171 PathChange::Added => entries.insert(ix, entry.unwrap()),
2172 PathChange::Removed => drop(entries.remove(ix)),
2173 PathChange::Updated => {
2174 let entry = entry.unwrap();
2175 let existing_entry = entries.get_mut(ix).unwrap();
2176 assert_eq!(existing_entry.path, entry.path);
2177 *existing_entry = entry;
2178 }
2179 PathChange::AddedOrUpdated | PathChange::Loaded => {
2180 let entry = entry.unwrap();
2181 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
2182 *entries.get_mut(ix).unwrap() = entry;
2183 } else {
2184 entries.insert(ix, entry);
2185 }
2186 }
2187 }
2188 }
2189
2190 let new_entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
2191 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
2192 }
2193 })
2194 .detach();
2195}
2196
2197fn randomly_mutate_worktree(
2198 worktree: &mut Worktree,
2199 rng: &mut impl Rng,
2200 cx: &mut Context<Worktree>,
2201) -> Task<Result<()>> {
2202 log::info!("mutating worktree");
2203 let worktree = worktree.as_local_mut().unwrap();
2204 let snapshot = worktree.snapshot();
2205 let entry = snapshot.entries(false, 0).choose(rng).unwrap();
2206
2207 match rng.random_range(0_u32..100) {
2208 0..=33 if entry.path.as_ref() != RelPath::empty() => {
2209 log::info!("deleting entry {:?} ({})", entry.path, entry.id.to_usize());
2210 worktree.delete_entry(entry.id, false, cx).unwrap()
2211 }
2212 _ => {
2213 if entry.is_dir() {
2214 let child_path = entry.path.join(rel_path(&random_filename(rng)));
2215 let is_dir = rng.random_bool(0.3);
2216 log::info!(
2217 "creating {} at {:?}",
2218 if is_dir { "dir" } else { "file" },
2219 child_path,
2220 );
2221 let task = worktree.create_entry(child_path, is_dir, None, cx);
2222 cx.background_spawn(async move {
2223 task.await?;
2224 Ok(())
2225 })
2226 } else {
2227 log::info!(
2228 "overwriting file {:?} ({})",
2229 &entry.path,
2230 entry.id.to_usize()
2231 );
2232 let task = worktree.write_file(
2233 entry.path.clone(),
2234 "".into(),
2235 Default::default(),
2236 encoding_rs::UTF_8,
2237 false,
2238 cx,
2239 );
2240 cx.background_spawn(async move {
2241 task.await?;
2242 Ok(())
2243 })
2244 }
2245 }
2246 }
2247}
2248
2249async fn randomly_mutate_fs(
2250 fs: &Arc<dyn Fs>,
2251 root_path: &Path,
2252 insertion_probability: f64,
2253 rng: &mut impl Rng,
2254) {
2255 log::info!("mutating fs");
2256 let mut files = Vec::new();
2257 let mut dirs = Vec::new();
2258 for path in fs.as_fake().paths(false) {
2259 if path.starts_with(root_path) {
2260 if fs.is_file(&path).await {
2261 files.push(path);
2262 } else {
2263 dirs.push(path);
2264 }
2265 }
2266 }
2267
2268 if (files.is_empty() && dirs.len() == 1) || rng.random_bool(insertion_probability) {
2269 let path = dirs.choose(rng).unwrap();
2270 let new_path = path.join(random_filename(rng));
2271
2272 if rng.random() {
2273 log::info!(
2274 "creating dir {:?}",
2275 new_path.strip_prefix(root_path).unwrap()
2276 );
2277 fs.create_dir(&new_path).await.unwrap();
2278 } else {
2279 log::info!(
2280 "creating file {:?}",
2281 new_path.strip_prefix(root_path).unwrap()
2282 );
2283 fs.create_file(&new_path, Default::default()).await.unwrap();
2284 }
2285 } else if rng.random_bool(0.05) {
2286 let ignore_dir_path = dirs.choose(rng).unwrap();
2287 let ignore_path = ignore_dir_path.join(GITIGNORE);
2288
2289 let subdirs = dirs
2290 .iter()
2291 .filter(|d| d.starts_with(ignore_dir_path))
2292 .cloned()
2293 .collect::<Vec<_>>();
2294 let subfiles = files
2295 .iter()
2296 .filter(|d| d.starts_with(ignore_dir_path))
2297 .cloned()
2298 .collect::<Vec<_>>();
2299 let files_to_ignore = {
2300 let len = rng.random_range(0..=subfiles.len());
2301 subfiles.choose_multiple(rng, len)
2302 };
2303 let dirs_to_ignore = {
2304 let len = rng.random_range(0..subdirs.len());
2305 subdirs.choose_multiple(rng, len)
2306 };
2307
2308 let mut ignore_contents = String::new();
2309 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
2310 writeln!(
2311 ignore_contents,
2312 "{}",
2313 path_to_ignore
2314 .strip_prefix(ignore_dir_path)
2315 .unwrap()
2316 .to_str()
2317 .unwrap()
2318 )
2319 .unwrap();
2320 }
2321 log::info!(
2322 "creating gitignore {:?} with contents:\n{}",
2323 ignore_path.strip_prefix(root_path).unwrap(),
2324 ignore_contents
2325 );
2326 fs.save(
2327 &ignore_path,
2328 &ignore_contents.as_str().into(),
2329 Default::default(),
2330 )
2331 .await
2332 .unwrap();
2333 } else {
2334 let old_path = {
2335 let file_path = files.choose(rng);
2336 let dir_path = dirs[1..].choose(rng);
2337 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
2338 };
2339
2340 let is_rename = rng.random();
2341 if is_rename {
2342 let new_path_parent = dirs
2343 .iter()
2344 .filter(|d| !d.starts_with(old_path))
2345 .choose(rng)
2346 .unwrap();
2347
2348 let overwrite_existing_dir =
2349 !old_path.starts_with(new_path_parent) && rng.random_bool(0.3);
2350 let new_path = if overwrite_existing_dir {
2351 fs.remove_dir(
2352 new_path_parent,
2353 RemoveOptions {
2354 recursive: true,
2355 ignore_if_not_exists: true,
2356 },
2357 )
2358 .await
2359 .unwrap();
2360 new_path_parent.to_path_buf()
2361 } else {
2362 new_path_parent.join(random_filename(rng))
2363 };
2364
2365 log::info!(
2366 "renaming {:?} to {}{:?}",
2367 old_path.strip_prefix(root_path).unwrap(),
2368 if overwrite_existing_dir {
2369 "overwrite "
2370 } else {
2371 ""
2372 },
2373 new_path.strip_prefix(root_path).unwrap()
2374 );
2375 fs.rename(
2376 old_path,
2377 &new_path,
2378 fs::RenameOptions {
2379 overwrite: true,
2380 ignore_if_exists: true,
2381 create_parents: false,
2382 },
2383 )
2384 .await
2385 .unwrap();
2386 } else if fs.is_file(old_path).await {
2387 log::info!(
2388 "deleting file {:?}",
2389 old_path.strip_prefix(root_path).unwrap()
2390 );
2391 fs.remove_file(old_path, Default::default()).await.unwrap();
2392 } else {
2393 log::info!(
2394 "deleting dir {:?}",
2395 old_path.strip_prefix(root_path).unwrap()
2396 );
2397 fs.remove_dir(
2398 old_path,
2399 RemoveOptions {
2400 recursive: true,
2401 ignore_if_not_exists: true,
2402 },
2403 )
2404 .await
2405 .unwrap();
2406 }
2407 }
2408}
2409
2410fn random_filename(rng: &mut impl Rng) -> String {
2411 (0..6)
2412 .map(|_| rng.sample(rand::distr::Alphanumeric))
2413 .map(char::from)
2414 .collect()
2415}
2416
2417#[gpui::test]
2418async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
2419 init_test(cx);
2420 let fs = FakeFs::new(cx.background_executor.clone());
2421 fs.insert_tree("/", json!({".env": "PRIVATE=secret\n"}))
2422 .await;
2423 let tree = Worktree::local(
2424 Path::new("/.env"),
2425 true,
2426 fs.clone(),
2427 Default::default(),
2428 true,
2429 WorktreeId::from_proto(0),
2430 &mut cx.to_async(),
2431 )
2432 .await
2433 .unwrap();
2434 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2435 .await;
2436 tree.read_with(cx, |tree, _| {
2437 let entry = tree.entry_for_path(rel_path("")).unwrap();
2438 assert!(entry.is_private);
2439 });
2440}
2441
2442#[gpui::test]
2443async fn test_repository_above_root(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2444 init_test(cx);
2445
2446 let fs = FakeFs::new(executor);
2447 fs.insert_tree(
2448 path!("/root"),
2449 json!({
2450 ".git": {},
2451 "subproject": {
2452 "a.txt": "A"
2453 }
2454 }),
2455 )
2456 .await;
2457 let worktree = Worktree::local(
2458 path!("/root/subproject").as_ref(),
2459 true,
2460 fs.clone(),
2461 Arc::default(),
2462 true,
2463 WorktreeId::from_proto(0),
2464 &mut cx.to_async(),
2465 )
2466 .await
2467 .unwrap();
2468 worktree
2469 .update(cx, |worktree, _| {
2470 worktree.as_local().unwrap().scan_complete()
2471 })
2472 .await;
2473 cx.run_until_parked();
2474 let repos = worktree.update(cx, |worktree, _| {
2475 worktree.as_local().unwrap().repositories()
2476 });
2477 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2478
2479 fs.touch_path(path!("/root/subproject")).await;
2480 worktree
2481 .update(cx, |worktree, _| {
2482 worktree.as_local().unwrap().scan_complete()
2483 })
2484 .await;
2485 cx.run_until_parked();
2486
2487 let repos = worktree.update(cx, |worktree, _| {
2488 worktree.as_local().unwrap().repositories()
2489 });
2490 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2491}
2492
2493#[gpui::test]
2494async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2495 init_test(cx);
2496
2497 let home = paths::home_dir();
2498 let fs = FakeFs::new(executor);
2499 fs.insert_tree(
2500 home,
2501 json!({
2502 ".config": {
2503 "git": {
2504 "ignore": "foo\n/bar\nbaz\n"
2505 }
2506 },
2507 "project": {
2508 ".git": {},
2509 ".gitignore": "!baz",
2510 "foo": "",
2511 "bar": "",
2512 "sub": {
2513 "bar": "",
2514 },
2515 "subrepo": {
2516 ".git": {},
2517 "bar": ""
2518 },
2519 "baz": ""
2520 }
2521 }),
2522 )
2523 .await;
2524 let worktree = Worktree::local(
2525 home.join("project"),
2526 true,
2527 fs.clone(),
2528 Arc::default(),
2529 true,
2530 WorktreeId::from_proto(0),
2531 &mut cx.to_async(),
2532 )
2533 .await
2534 .unwrap();
2535 worktree
2536 .update(cx, |worktree, _| {
2537 worktree.as_local().unwrap().scan_complete()
2538 })
2539 .await;
2540 cx.run_until_parked();
2541
2542 // .gitignore overrides excludesFile, and anchored paths in excludesFile are resolved
2543 // relative to the nearest containing repository
2544 worktree.update(cx, |worktree, _cx| {
2545 check_worktree_entries(
2546 worktree,
2547 &[],
2548 &["foo", "bar", "subrepo/bar"],
2549 &["sub/bar", "baz"],
2550 &[],
2551 );
2552 });
2553
2554 // Ignore statuses are updated when excludesFile changes
2555 fs.write(
2556 &home.join(".config").join("git").join("ignore"),
2557 "/bar\nbaz\n".as_bytes(),
2558 )
2559 .await
2560 .unwrap();
2561 worktree
2562 .update(cx, |worktree, _| {
2563 worktree.as_local().unwrap().scan_complete()
2564 })
2565 .await;
2566 cx.run_until_parked();
2567
2568 worktree.update(cx, |worktree, _cx| {
2569 check_worktree_entries(
2570 worktree,
2571 &[],
2572 &["bar", "subrepo/bar"],
2573 &["foo", "sub/bar", "baz"],
2574 &[],
2575 );
2576 });
2577
2578 // Statuses are updated when .git added/removed
2579 fs.remove_dir(
2580 &home.join("project").join("subrepo").join(".git"),
2581 RemoveOptions {
2582 recursive: true,
2583 ..Default::default()
2584 },
2585 )
2586 .await
2587 .unwrap();
2588 worktree
2589 .update(cx, |worktree, _| {
2590 worktree.as_local().unwrap().scan_complete()
2591 })
2592 .await;
2593 cx.run_until_parked();
2594
2595 worktree.update(cx, |worktree, _cx| {
2596 check_worktree_entries(
2597 worktree,
2598 &[],
2599 &["bar"],
2600 &["foo", "sub/bar", "baz", "subrepo/bar"],
2601 &[],
2602 );
2603 });
2604}
2605
2606#[gpui::test]
2607async fn test_repo_exclude(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2608 init_test(cx);
2609
2610 let fs = FakeFs::new(executor);
2611 let project_dir = Path::new(path!("/project"));
2612 fs.insert_tree(
2613 project_dir,
2614 json!({
2615 ".git": {
2616 "info": {
2617 "exclude": ".env.*"
2618 }
2619 },
2620 ".env.example": "secret=xxxx",
2621 ".env.local": "secret=1234",
2622 ".gitignore": "!.env.example",
2623 "README.md": "# Repo Exclude",
2624 "src": {
2625 "main.rs": "fn main() {}",
2626 },
2627 }),
2628 )
2629 .await;
2630
2631 let worktree = Worktree::local(
2632 project_dir,
2633 true,
2634 fs.clone(),
2635 Default::default(),
2636 true,
2637 WorktreeId::from_proto(0),
2638 &mut cx.to_async(),
2639 )
2640 .await
2641 .unwrap();
2642 worktree
2643 .update(cx, |worktree, _| {
2644 worktree.as_local().unwrap().scan_complete()
2645 })
2646 .await;
2647 cx.run_until_parked();
2648
2649 // .gitignore overrides .git/info/exclude
2650 worktree.update(cx, |worktree, _cx| {
2651 let expected_excluded_paths = [];
2652 let expected_ignored_paths = [".env.local"];
2653 let expected_tracked_paths = [".env.example", "README.md", "src/main.rs"];
2654 let expected_included_paths = [];
2655
2656 check_worktree_entries(
2657 worktree,
2658 &expected_excluded_paths,
2659 &expected_ignored_paths,
2660 &expected_tracked_paths,
2661 &expected_included_paths,
2662 );
2663 });
2664
2665 // Ignore statuses are updated when .git/info/exclude file changes
2666 fs.write(
2667 &project_dir.join(DOT_GIT).join(REPO_EXCLUDE),
2668 ".env.example".as_bytes(),
2669 )
2670 .await
2671 .unwrap();
2672 worktree
2673 .update(cx, |worktree, _| {
2674 worktree.as_local().unwrap().scan_complete()
2675 })
2676 .await;
2677 cx.run_until_parked();
2678
2679 worktree.update(cx, |worktree, _cx| {
2680 let expected_excluded_paths = [];
2681 let expected_ignored_paths = [];
2682 let expected_tracked_paths = [".env.example", ".env.local", "README.md", "src/main.rs"];
2683 let expected_included_paths = [];
2684
2685 check_worktree_entries(
2686 worktree,
2687 &expected_excluded_paths,
2688 &expected_ignored_paths,
2689 &expected_tracked_paths,
2690 &expected_included_paths,
2691 );
2692 });
2693}
2694
2695#[track_caller]
2696fn check_worktree_entries(
2697 tree: &Worktree,
2698 expected_excluded_paths: &[&str],
2699 expected_ignored_paths: &[&str],
2700 expected_tracked_paths: &[&str],
2701 expected_included_paths: &[&str],
2702) {
2703 for path in expected_excluded_paths {
2704 let entry = tree.entry_for_path(rel_path(path));
2705 assert!(
2706 entry.is_none(),
2707 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2708 );
2709 }
2710 for path in expected_ignored_paths {
2711 let entry = tree
2712 .entry_for_path(rel_path(path))
2713 .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
2714 assert!(
2715 entry.is_ignored,
2716 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2717 );
2718 }
2719 for path in expected_tracked_paths {
2720 let entry = tree
2721 .entry_for_path(rel_path(path))
2722 .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
2723 assert!(
2724 !entry.is_ignored || entry.is_always_included,
2725 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2726 );
2727 }
2728 for path in expected_included_paths {
2729 let entry = tree
2730 .entry_for_path(rel_path(path))
2731 .unwrap_or_else(|| panic!("Missing entry for expected included path '{path}'"));
2732 assert!(
2733 entry.is_always_included,
2734 "expected path '{path}' to always be included, but got entry: {entry:?}",
2735 );
2736 }
2737}
2738
2739fn init_test(cx: &mut gpui::TestAppContext) {
2740 zlog::init_test();
2741
2742 cx.update(|cx| {
2743 let settings_store = SettingsStore::test(cx);
2744 cx.set_global(settings_store);
2745 });
2746}
2747
2748#[gpui::test]
2749async fn test_load_file_encoding(cx: &mut TestAppContext) {
2750 init_test(cx);
2751
2752 struct TestCase {
2753 name: &'static str,
2754 bytes: Vec<u8>,
2755 expected_text: &'static str,
2756 }
2757
2758 // --- Success Cases ---
2759 let success_cases = vec![
2760 TestCase {
2761 name: "utf8.txt",
2762 bytes: "こんにちは".as_bytes().to_vec(),
2763 expected_text: "こんにちは",
2764 },
2765 TestCase {
2766 name: "sjis.txt",
2767 bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
2768 expected_text: "こんにちは",
2769 },
2770 TestCase {
2771 name: "eucjp.txt",
2772 bytes: vec![0xa4, 0xb3, 0xa4, 0xf3, 0xa4, 0xcb, 0xa4, 0xc1, 0xa4, 0xcf],
2773 expected_text: "こんにちは",
2774 },
2775 TestCase {
2776 name: "iso2022jp.txt",
2777 bytes: vec![
2778 0x1b, 0x24, 0x42, 0x24, 0x33, 0x24, 0x73, 0x24, 0x4b, 0x24, 0x41, 0x24, 0x4f, 0x1b,
2779 0x28, 0x42,
2780 ],
2781 expected_text: "こんにちは",
2782 },
2783 TestCase {
2784 name: "win1252.txt",
2785 bytes: vec![0x43, 0x61, 0x66, 0xe9],
2786 expected_text: "Café",
2787 },
2788 TestCase {
2789 name: "gbk.txt",
2790 bytes: vec![
2791 0xbd, 0xf1, 0xcc, 0xec, 0xcc, 0xec, 0xc6, 0xf8, 0xb2, 0xbb, 0xb4, 0xed,
2792 ],
2793 expected_text: "今天天气不错",
2794 },
2795 // UTF-16LE with BOM
2796 TestCase {
2797 name: "utf16le_bom.txt",
2798 bytes: vec![
2799 0xFF, 0xFE, // BOM
2800 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F, 0x30,
2801 ],
2802 expected_text: "こんにちは",
2803 },
2804 // UTF-16BE with BOM
2805 TestCase {
2806 name: "utf16be_bom.txt",
2807 bytes: vec![
2808 0xFE, 0xFF, // BOM
2809 0x30, 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F,
2810 ],
2811 expected_text: "こんにちは",
2812 },
2813 // UTF-16LE without BOM (ASCII only)
2814 // This relies on the "null byte heuristic" we implemented.
2815 // "ABC" -> 41 00 42 00 43 00
2816 TestCase {
2817 name: "utf16le_ascii_no_bom.txt",
2818 bytes: vec![0x41, 0x00, 0x42, 0x00, 0x43, 0x00],
2819 expected_text: "ABC",
2820 },
2821 ];
2822
2823 // --- Failure Cases ---
2824 let failure_cases = vec![
2825 // Binary File (Should be detected by heuristic and return Error)
2826 // Contains random bytes and mixed nulls that don't match UTF-16 patterns
2827 TestCase {
2828 name: "binary.bin",
2829 bytes: vec![0x00, 0xFF, 0x12, 0x00, 0x99, 0x88, 0x77, 0x66, 0x00],
2830 expected_text: "", // Not used
2831 },
2832 ];
2833
2834 let root_path = if cfg!(windows) {
2835 Path::new("C:\\root")
2836 } else {
2837 Path::new("/root")
2838 };
2839
2840 let fs = FakeFs::new(cx.background_executor.clone());
2841 fs.create_dir(root_path).await.unwrap();
2842
2843 for case in success_cases.iter().chain(failure_cases.iter()) {
2844 let path = root_path.join(case.name);
2845 fs.write(&path, &case.bytes).await.unwrap();
2846 }
2847
2848 let tree = Worktree::local(
2849 root_path,
2850 true,
2851 fs,
2852 Default::default(),
2853 true,
2854 WorktreeId::from_proto(0),
2855 &mut cx.to_async(),
2856 )
2857 .await
2858 .unwrap();
2859
2860 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2861 .await;
2862
2863 let rel_path = |name: &str| {
2864 RelPath::new(&Path::new(name), PathStyle::local())
2865 .unwrap()
2866 .into_arc()
2867 };
2868
2869 // Run Success Tests
2870 for case in success_cases {
2871 let loaded = tree
2872 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
2873 .await;
2874 if let Err(e) = &loaded {
2875 panic!("Failed to load success case '{}': {:?}", case.name, e);
2876 }
2877 let loaded = loaded.unwrap();
2878 assert_eq!(
2879 loaded.text, case.expected_text,
2880 "Encoding mismatch for file: {}",
2881 case.name
2882 );
2883 }
2884
2885 // Run Failure Tests
2886 for case in failure_cases {
2887 let loaded = tree
2888 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
2889 .await;
2890 assert!(
2891 loaded.is_err(),
2892 "Failure case '{}' unexpectedly succeeded! It should have been detected as binary.",
2893 case.name
2894 );
2895 let err_msg = loaded.unwrap_err().to_string();
2896 println!("Got expected error for {}: {}", case.name, err_msg);
2897 }
2898}
2899
2900#[gpui::test]
2901async fn test_write_file_encoding(cx: &mut gpui::TestAppContext) {
2902 init_test(cx);
2903 let fs = FakeFs::new(cx.executor());
2904
2905 let root_path = if cfg!(windows) {
2906 Path::new("C:\\root")
2907 } else {
2908 Path::new("/root")
2909 };
2910 fs.create_dir(root_path).await.unwrap();
2911
2912 let worktree = Worktree::local(
2913 root_path,
2914 true,
2915 fs.clone(),
2916 Default::default(),
2917 true,
2918 WorktreeId::from_proto(0),
2919 &mut cx.to_async(),
2920 )
2921 .await
2922 .unwrap();
2923
2924 // Define test case structure
2925 struct TestCase {
2926 name: &'static str,
2927 text: &'static str,
2928 encoding: &'static encoding_rs::Encoding,
2929 has_bom: bool,
2930 expected_bytes: Vec<u8>,
2931 }
2932
2933 let cases = vec![
2934 // Shift_JIS with Japanese
2935 TestCase {
2936 name: "Shift_JIS with Japanese",
2937 text: "こんにちは",
2938 encoding: encoding_rs::SHIFT_JIS,
2939 has_bom: false,
2940 expected_bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
2941 },
2942 // UTF-8 No BOM
2943 TestCase {
2944 name: "UTF-8 No BOM",
2945 text: "AB",
2946 encoding: encoding_rs::UTF_8,
2947 has_bom: false,
2948 expected_bytes: vec![0x41, 0x42],
2949 },
2950 // UTF-8 with BOM
2951 TestCase {
2952 name: "UTF-8 with BOM",
2953 text: "AB",
2954 encoding: encoding_rs::UTF_8,
2955 has_bom: true,
2956 expected_bytes: vec![0xEF, 0xBB, 0xBF, 0x41, 0x42],
2957 },
2958 // UTF-16LE No BOM with Japanese
2959 // NOTE: This passes thanks to the manual encoding fix implemented in `write_file`.
2960 TestCase {
2961 name: "UTF-16LE No BOM with Japanese",
2962 text: "こんにちは",
2963 encoding: encoding_rs::UTF_16LE,
2964 has_bom: false,
2965 expected_bytes: vec![0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f, 0x30],
2966 },
2967 // UTF-16LE with BOM
2968 TestCase {
2969 name: "UTF-16LE with BOM",
2970 text: "A",
2971 encoding: encoding_rs::UTF_16LE,
2972 has_bom: true,
2973 expected_bytes: vec![0xFF, 0xFE, 0x41, 0x00],
2974 },
2975 // UTF-16BE No BOM with Japanese
2976 // NOTE: This passes thanks to the manual encoding fix.
2977 TestCase {
2978 name: "UTF-16BE No BOM with Japanese",
2979 text: "こんにちは",
2980 encoding: encoding_rs::UTF_16BE,
2981 has_bom: false,
2982 expected_bytes: vec![0x30, 0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f],
2983 },
2984 // UTF-16BE with BOM
2985 TestCase {
2986 name: "UTF-16BE with BOM",
2987 text: "A",
2988 encoding: encoding_rs::UTF_16BE,
2989 has_bom: true,
2990 expected_bytes: vec![0xFE, 0xFF, 0x00, 0x41],
2991 },
2992 ];
2993
2994 for (i, case) in cases.into_iter().enumerate() {
2995 let file_name = format!("test_{}.txt", i);
2996 let path: Arc<Path> = Path::new(&file_name).into();
2997 let file_path = root_path.join(&file_name);
2998
2999 fs.insert_file(&file_path, "".into()).await;
3000
3001 let rel_path = RelPath::new(&path, PathStyle::local()).unwrap().into_arc();
3002 let text = text::Rope::from(case.text);
3003
3004 let task = worktree.update(cx, |wt, cx| {
3005 wt.write_file(
3006 rel_path,
3007 text,
3008 text::LineEnding::Unix,
3009 case.encoding,
3010 case.has_bom,
3011 cx,
3012 )
3013 });
3014
3015 if let Err(e) = task.await {
3016 panic!("Unexpected error in case '{}': {:?}", case.name, e);
3017 }
3018
3019 let bytes = fs.load_bytes(&file_path).await.unwrap();
3020
3021 assert_eq!(
3022 bytes, case.expected_bytes,
3023 "case '{}' mismatch. Expected {:?}, but got {:?}",
3024 case.name, case.expected_bytes, bytes
3025 );
3026 }
3027}
3028
3029#[gpui::test]
3030async fn test_refresh_entries_for_paths_creates_ancestors(cx: &mut TestAppContext) {
3031 init_test(cx);
3032 let fs = FakeFs::new(cx.background_executor.clone());
3033 fs.insert_tree(
3034 "/root",
3035 json!({
3036 "a": {
3037 "b": {
3038 "c": {
3039 "deep_file.txt": "content",
3040 "sibling.txt": "content"
3041 },
3042 "d": {
3043 "under_sibling_dir.txt": "content"
3044 }
3045 }
3046 }
3047 }),
3048 )
3049 .await;
3050
3051 let tree = Worktree::local(
3052 Path::new("/root"),
3053 true,
3054 fs.clone(),
3055 Default::default(),
3056 false, // Disable scanning so the initial scan doesn't discover any entries
3057 WorktreeId::from_proto(0),
3058 &mut cx.to_async(),
3059 )
3060 .await
3061 .unwrap();
3062
3063 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3064 .await;
3065
3066 tree.read_with(cx, |tree, _| {
3067 assert_eq!(
3068 tree.entries(true, 0)
3069 .map(|e| e.path.as_ref())
3070 .collect::<Vec<_>>(),
3071 &[rel_path("")],
3072 "Only root entry should exist when scanning is disabled"
3073 );
3074
3075 assert!(tree.entry_for_path(rel_path("a")).is_none());
3076 assert!(tree.entry_for_path(rel_path("a/b")).is_none());
3077 assert!(tree.entry_for_path(rel_path("a/b/c")).is_none());
3078 assert!(
3079 tree.entry_for_path(rel_path("a/b/c/deep_file.txt"))
3080 .is_none()
3081 );
3082 });
3083
3084 tree.read_with(cx, |tree, _| {
3085 tree.as_local()
3086 .unwrap()
3087 .refresh_entries_for_paths(vec![rel_path("a/b/c/deep_file.txt").into()])
3088 })
3089 .recv()
3090 .await;
3091
3092 tree.read_with(cx, |tree, _| {
3093 assert_eq!(
3094 tree.entries(true, 0)
3095 .map(|e| e.path.as_ref())
3096 .collect::<Vec<_>>(),
3097 &[
3098 rel_path(""),
3099 rel_path("a"),
3100 rel_path("a/b"),
3101 rel_path("a/b/c"),
3102 rel_path("a/b/c/deep_file.txt"),
3103 rel_path("a/b/c/sibling.txt"),
3104 rel_path("a/b/d"),
3105 ],
3106 "All ancestors should be created when refreshing a deeply nested path"
3107 );
3108 });
3109}
3110
3111#[gpui::test]
3112async fn test_single_file_worktree_deleted(cx: &mut TestAppContext) {
3113 init_test(cx);
3114 let fs = FakeFs::new(cx.background_executor.clone());
3115
3116 fs.insert_tree(
3117 "/root",
3118 json!({
3119 "test.txt": "content",
3120 }),
3121 )
3122 .await;
3123
3124 let tree = Worktree::local(
3125 Path::new("/root/test.txt"),
3126 true,
3127 fs.clone(),
3128 Default::default(),
3129 true,
3130 WorktreeId::from_proto(0),
3131 &mut cx.to_async(),
3132 )
3133 .await
3134 .unwrap();
3135
3136 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3137 .await;
3138
3139 tree.read_with(cx, |tree, _| {
3140 assert!(tree.is_single_file(), "Should be a single-file worktree");
3141 assert_eq!(tree.abs_path().as_ref(), Path::new("/root/test.txt"));
3142 });
3143
3144 // Delete the file
3145 fs.remove_file(Path::new("/root/test.txt"), Default::default())
3146 .await
3147 .unwrap();
3148
3149 // Subscribe to worktree events
3150 let deleted_event_received = Rc::new(Cell::new(false));
3151 let _subscription = cx.update({
3152 let deleted_event_received = deleted_event_received.clone();
3153 |cx| {
3154 cx.subscribe(&tree, move |_, event, _| {
3155 if matches!(event, Event::Deleted) {
3156 deleted_event_received.set(true);
3157 }
3158 })
3159 }
3160 });
3161
3162 // Trigger filesystem events - the scanner should detect the file is gone immediately
3163 // and emit a Deleted event
3164 cx.background_executor.run_until_parked();
3165 cx.background_executor
3166 .advance_clock(std::time::Duration::from_secs(1));
3167 cx.background_executor.run_until_parked();
3168
3169 assert!(
3170 deleted_event_received.get(),
3171 "Should receive Deleted event when single-file worktree root is deleted"
3172 );
3173}