1use crate::{
2 Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle,
3 worktree_settings::WorktreeSettings,
4};
5use anyhow::Result;
6use encoding_rs::UTF_8;
7use fs::{FakeFs, Fs, RealFs, RemoveOptions, encodings::EncodingWrapper};
8use git::GITIGNORE;
9use gpui::{AppContext as _, BackgroundExecutor, BorrowAppContext, Context, Task, TestAppContext};
10use parking_lot::Mutex;
11use postage::stream::Stream;
12use pretty_assertions::assert_eq;
13use rand::prelude::*;
14
15use serde_json::json;
16use settings::{Settings, SettingsStore};
17use std::{
18 env,
19 fmt::Write,
20 mem,
21 path::{Path, PathBuf},
22 sync::Arc,
23};
24use text::Rope;
25use util::{
26 ResultExt, path,
27 rel_path::{RelPath, rel_path},
28 test::TempTree,
29};
30
31#[gpui::test]
32async fn test_traversal(cx: &mut TestAppContext) {
33 init_test(cx);
34 let fs = FakeFs::new(cx.background_executor.clone());
35 fs.insert_tree(
36 "/root",
37 json!({
38 ".gitignore": "a/b\n",
39 "a": {
40 "b": "",
41 "c": "",
42 }
43 }),
44 )
45 .await;
46
47 let tree = Worktree::local(
48 Path::new("/root"),
49 true,
50 fs,
51 Default::default(),
52 &mut cx.to_async(),
53 )
54 .await
55 .unwrap();
56 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
57 .await;
58
59 tree.read_with(cx, |tree, _| {
60 assert_eq!(
61 tree.entries(false, 0)
62 .map(|entry| entry.path.as_ref())
63 .collect::<Vec<_>>(),
64 vec![
65 rel_path(""),
66 rel_path(".gitignore"),
67 rel_path("a"),
68 rel_path("a/c"),
69 ]
70 );
71 assert_eq!(
72 tree.entries(true, 0)
73 .map(|entry| entry.path.as_ref())
74 .collect::<Vec<_>>(),
75 vec![
76 rel_path(""),
77 rel_path(".gitignore"),
78 rel_path("a"),
79 rel_path("a/b"),
80 rel_path("a/c"),
81 ]
82 );
83 })
84}
85
86#[gpui::test(iterations = 10)]
87async fn test_circular_symlinks(cx: &mut TestAppContext) {
88 init_test(cx);
89 let fs = FakeFs::new(cx.background_executor.clone());
90 fs.insert_tree(
91 "/root",
92 json!({
93 "lib": {
94 "a": {
95 "a.txt": ""
96 },
97 "b": {
98 "b.txt": ""
99 }
100 }
101 }),
102 )
103 .await;
104 fs.create_symlink("/root/lib/a/lib".as_ref(), "..".into())
105 .await
106 .unwrap();
107 fs.create_symlink("/root/lib/b/lib".as_ref(), "..".into())
108 .await
109 .unwrap();
110
111 let tree = Worktree::local(
112 Path::new("/root"),
113 true,
114 fs.clone(),
115 Default::default(),
116 &mut cx.to_async(),
117 )
118 .await
119 .unwrap();
120
121 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
122 .await;
123
124 tree.read_with(cx, |tree, _| {
125 assert_eq!(
126 tree.entries(false, 0)
127 .map(|entry| entry.path.as_ref())
128 .collect::<Vec<_>>(),
129 vec![
130 rel_path(""),
131 rel_path("lib"),
132 rel_path("lib/a"),
133 rel_path("lib/a/a.txt"),
134 rel_path("lib/a/lib"),
135 rel_path("lib/b"),
136 rel_path("lib/b/b.txt"),
137 rel_path("lib/b/lib"),
138 ]
139 );
140 });
141
142 fs.rename(
143 Path::new("/root/lib/a/lib"),
144 Path::new("/root/lib/a/lib-2"),
145 Default::default(),
146 )
147 .await
148 .unwrap();
149 cx.executor().run_until_parked();
150 tree.read_with(cx, |tree, _| {
151 assert_eq!(
152 tree.entries(false, 0)
153 .map(|entry| entry.path.as_ref())
154 .collect::<Vec<_>>(),
155 vec![
156 rel_path(""),
157 rel_path("lib"),
158 rel_path("lib/a"),
159 rel_path("lib/a/a.txt"),
160 rel_path("lib/a/lib-2"),
161 rel_path("lib/b"),
162 rel_path("lib/b/b.txt"),
163 rel_path("lib/b/lib"),
164 ]
165 );
166 });
167}
168
169#[gpui::test]
170async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
171 init_test(cx);
172 let fs = FakeFs::new(cx.background_executor.clone());
173 fs.insert_tree(
174 "/root",
175 json!({
176 "dir1": {
177 "deps": {
178 // symlinks here
179 },
180 "src": {
181 "a.rs": "",
182 "b.rs": "",
183 },
184 },
185 "dir2": {
186 "src": {
187 "c.rs": "",
188 "d.rs": "",
189 }
190 },
191 "dir3": {
192 "deps": {},
193 "src": {
194 "e.rs": "",
195 "f.rs": "",
196 },
197 }
198 }),
199 )
200 .await;
201
202 // These symlinks point to directories outside of the worktree's root, dir1.
203 fs.create_symlink("/root/dir1/deps/dep-dir2".as_ref(), "../../dir2".into())
204 .await
205 .unwrap();
206 fs.create_symlink("/root/dir1/deps/dep-dir3".as_ref(), "../../dir3".into())
207 .await
208 .unwrap();
209
210 let tree = Worktree::local(
211 Path::new("/root/dir1"),
212 true,
213 fs.clone(),
214 Default::default(),
215 &mut cx.to_async(),
216 )
217 .await
218 .unwrap();
219
220 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
221 .await;
222
223 let tree_updates = Arc::new(Mutex::new(Vec::new()));
224 tree.update(cx, |_, cx| {
225 let tree_updates = tree_updates.clone();
226 cx.subscribe(&tree, move |_, _, event, _| {
227 if let Event::UpdatedEntries(update) = event {
228 tree_updates.lock().extend(
229 update
230 .iter()
231 .map(|(path, _, change)| (path.clone(), *change)),
232 );
233 }
234 })
235 .detach();
236 });
237
238 // The symlinked directories are not scanned by default.
239 tree.read_with(cx, |tree, _| {
240 assert_eq!(
241 tree.entries(true, 0)
242 .map(|entry| (entry.path.as_ref(), entry.is_external))
243 .collect::<Vec<_>>(),
244 vec![
245 (rel_path(""), false),
246 (rel_path("deps"), false),
247 (rel_path("deps/dep-dir2"), true),
248 (rel_path("deps/dep-dir3"), true),
249 (rel_path("src"), false),
250 (rel_path("src/a.rs"), false),
251 (rel_path("src/b.rs"), false),
252 ]
253 );
254
255 assert_eq!(
256 tree.entry_for_path(rel_path("deps/dep-dir2")).unwrap().kind,
257 EntryKind::UnloadedDir
258 );
259 });
260
261 // Expand one of the symlinked directories.
262 tree.read_with(cx, |tree, _| {
263 tree.as_local()
264 .unwrap()
265 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3").into()])
266 })
267 .recv()
268 .await;
269
270 // The expanded directory's contents are loaded. Subdirectories are
271 // not scanned yet.
272 tree.read_with(cx, |tree, _| {
273 assert_eq!(
274 tree.entries(true, 0)
275 .map(|entry| (entry.path.as_ref(), entry.is_external))
276 .collect::<Vec<_>>(),
277 vec![
278 (rel_path(""), false),
279 (rel_path("deps"), false),
280 (rel_path("deps/dep-dir2"), true),
281 (rel_path("deps/dep-dir3"), true),
282 (rel_path("deps/dep-dir3/deps"), true),
283 (rel_path("deps/dep-dir3/src"), true),
284 (rel_path("src"), false),
285 (rel_path("src/a.rs"), false),
286 (rel_path("src/b.rs"), false),
287 ]
288 );
289 });
290 assert_eq!(
291 mem::take(&mut *tree_updates.lock()),
292 &[
293 (rel_path("deps/dep-dir3").into(), PathChange::Loaded),
294 (rel_path("deps/dep-dir3/deps").into(), PathChange::Loaded),
295 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded)
296 ]
297 );
298
299 // Expand a subdirectory of one of the symlinked directories.
300 tree.read_with(cx, |tree, _| {
301 tree.as_local()
302 .unwrap()
303 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3/src").into()])
304 })
305 .recv()
306 .await;
307
308 // The expanded subdirectory's contents are loaded.
309 tree.read_with(cx, |tree, _| {
310 assert_eq!(
311 tree.entries(true, 0)
312 .map(|entry| (entry.path.as_ref(), entry.is_external))
313 .collect::<Vec<_>>(),
314 vec![
315 (rel_path(""), false),
316 (rel_path("deps"), false),
317 (rel_path("deps/dep-dir2"), true),
318 (rel_path("deps/dep-dir3"), true),
319 (rel_path("deps/dep-dir3/deps"), true),
320 (rel_path("deps/dep-dir3/src"), true),
321 (rel_path("deps/dep-dir3/src/e.rs"), true),
322 (rel_path("deps/dep-dir3/src/f.rs"), true),
323 (rel_path("src"), false),
324 (rel_path("src/a.rs"), false),
325 (rel_path("src/b.rs"), false),
326 ]
327 );
328 });
329
330 assert_eq!(
331 mem::take(&mut *tree_updates.lock()),
332 &[
333 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded),
334 (
335 rel_path("deps/dep-dir3/src/e.rs").into(),
336 PathChange::Loaded
337 ),
338 (
339 rel_path("deps/dep-dir3/src/f.rs").into(),
340 PathChange::Loaded
341 )
342 ]
343 );
344}
345
346#[cfg(target_os = "macos")]
347#[gpui::test]
348async fn test_renaming_case_only(cx: &mut TestAppContext) {
349 cx.executor().allow_parking();
350 init_test(cx);
351
352 const OLD_NAME: &str = "aaa.rs";
353 const NEW_NAME: &str = "AAA.rs";
354
355 let fs = Arc::new(RealFs::new(None, cx.executor()));
356 let temp_root = TempTree::new(json!({
357 OLD_NAME: "",
358 }));
359
360 let tree = Worktree::local(
361 temp_root.path(),
362 true,
363 fs.clone(),
364 Default::default(),
365 &mut cx.to_async(),
366 )
367 .await
368 .unwrap();
369
370 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
371 .await;
372 tree.read_with(cx, |tree, _| {
373 assert_eq!(
374 tree.entries(true, 0)
375 .map(|entry| entry.path.as_ref())
376 .collect::<Vec<_>>(),
377 vec![rel_path(""), rel_path(OLD_NAME)]
378 );
379 });
380
381 fs.rename(
382 &temp_root.path().join(OLD_NAME),
383 &temp_root.path().join(NEW_NAME),
384 fs::RenameOptions {
385 overwrite: true,
386 ignore_if_exists: true,
387 },
388 )
389 .await
390 .unwrap();
391
392 tree.flush_fs_events(cx).await;
393
394 tree.read_with(cx, |tree, _| {
395 assert_eq!(
396 tree.entries(true, 0)
397 .map(|entry| entry.path.as_ref())
398 .collect::<Vec<_>>(),
399 vec![rel_path(""), rel_path(NEW_NAME)]
400 );
401 });
402}
403
404#[gpui::test]
405async fn test_open_gitignored_files(cx: &mut TestAppContext) {
406 init_test(cx);
407 let fs = FakeFs::new(cx.background_executor.clone());
408 fs.insert_tree(
409 "/root",
410 json!({
411 ".gitignore": "node_modules\n",
412 "one": {
413 "node_modules": {
414 "a": {
415 "a1.js": "a1",
416 "a2.js": "a2",
417 },
418 "b": {
419 "b1.js": "b1",
420 "b2.js": "b2",
421 },
422 "c": {
423 "c1.js": "c1",
424 "c2.js": "c2",
425 }
426 },
427 },
428 "two": {
429 "x.js": "",
430 "y.js": "",
431 },
432 }),
433 )
434 .await;
435
436 let tree = Worktree::local(
437 Path::new("/root"),
438 true,
439 fs.clone(),
440 Default::default(),
441 &mut cx.to_async(),
442 )
443 .await
444 .unwrap();
445
446 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
447 .await;
448
449 tree.read_with(cx, |tree, _| {
450 assert_eq!(
451 tree.entries(true, 0)
452 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
453 .collect::<Vec<_>>(),
454 vec![
455 (rel_path(""), false),
456 (rel_path(".gitignore"), false),
457 (rel_path("one"), false),
458 (rel_path("one/node_modules"), true),
459 (rel_path("two"), false),
460 (rel_path("two/x.js"), false),
461 (rel_path("two/y.js"), false),
462 ]
463 );
464 });
465
466 // Open a file that is nested inside of a gitignored directory that
467 // has not yet been expanded.
468 let prev_read_dir_count = fs.read_dir_call_count();
469 let loaded = tree
470 .update(cx, |tree, cx| {
471 tree.load_file(
472 "one/node_modules/b/b1.js".as_ref(),
473 None,
474 false,
475 false,
476 None,
477 cx,
478 )
479 })
480 .await
481 .unwrap();
482
483 tree.read_with(cx, |tree, _| {
484 assert_eq!(
485 tree.entries(true, 0)
486 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
487 .collect::<Vec<_>>(),
488 vec![
489 (rel_path(""), false),
490 (rel_path(".gitignore"), false),
491 (rel_path("one"), false),
492 (rel_path("one/node_modules"), true),
493 (rel_path("one/node_modules/a"), true),
494 (rel_path("one/node_modules/b"), true),
495 (rel_path("one/node_modules/b/b1.js"), true),
496 (rel_path("one/node_modules/b/b2.js"), true),
497 (rel_path("one/node_modules/c"), true),
498 (rel_path("two"), false),
499 (rel_path("two/x.js"), false),
500 (rel_path("two/y.js"), false),
501 ]
502 );
503
504 assert_eq!(
505 loaded.file.path.as_ref(),
506 rel_path("one/node_modules/b/b1.js")
507 );
508
509 // Only the newly-expanded directories are scanned.
510 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
511 });
512
513 // Open another file in a different subdirectory of the same
514 // gitignored directory.
515 let prev_read_dir_count = fs.read_dir_call_count();
516 let loaded = tree
517 .update(cx, |tree, cx| {
518 tree.load_file(
519 "one/node_modules/a/a2.js".as_ref(),
520 None,
521 false,
522 false,
523 None,
524 cx,
525 )
526 })
527 .await
528 .unwrap();
529
530 tree.read_with(cx, |tree, _| {
531 assert_eq!(
532 tree.entries(true, 0)
533 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
534 .collect::<Vec<_>>(),
535 vec![
536 (rel_path(""), false),
537 (rel_path(".gitignore"), false),
538 (rel_path("one"), false),
539 (rel_path("one/node_modules"), true),
540 (rel_path("one/node_modules/a"), true),
541 (rel_path("one/node_modules/a/a1.js"), true),
542 (rel_path("one/node_modules/a/a2.js"), true),
543 (rel_path("one/node_modules/b"), true),
544 (rel_path("one/node_modules/b/b1.js"), true),
545 (rel_path("one/node_modules/b/b2.js"), true),
546 (rel_path("one/node_modules/c"), true),
547 (rel_path("two"), false),
548 (rel_path("two/x.js"), false),
549 (rel_path("two/y.js"), false),
550 ]
551 );
552
553 assert_eq!(
554 loaded.file.path.as_ref(),
555 rel_path("one/node_modules/a/a2.js")
556 );
557
558 // Only the newly-expanded directory is scanned.
559 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
560 });
561
562 let path = PathBuf::from("/root/one/node_modules/c/lib");
563
564 // No work happens when files and directories change within an unloaded directory.
565 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
566 // When we open a directory, we check each ancestor whether it's a git
567 // repository. That means we have an fs.metadata call per ancestor that we
568 // need to subtract here.
569 let ancestors = path.ancestors().count();
570
571 fs.create_dir(path.as_ref()).await.unwrap();
572 cx.executor().run_until_parked();
573
574 assert_eq!(
575 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count - ancestors,
576 0
577 );
578}
579
580#[gpui::test]
581async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
582 init_test(cx);
583 let fs = FakeFs::new(cx.background_executor.clone());
584 fs.insert_tree(
585 "/root",
586 json!({
587 ".gitignore": "node_modules\n",
588 "a": {
589 "a.js": "",
590 },
591 "b": {
592 "b.js": "",
593 },
594 "node_modules": {
595 "c": {
596 "c.js": "",
597 },
598 "d": {
599 "d.js": "",
600 "e": {
601 "e1.js": "",
602 "e2.js": "",
603 },
604 "f": {
605 "f1.js": "",
606 "f2.js": "",
607 }
608 },
609 },
610 }),
611 )
612 .await;
613
614 let tree = Worktree::local(
615 Path::new("/root"),
616 true,
617 fs.clone(),
618 Default::default(),
619 &mut cx.to_async(),
620 )
621 .await
622 .unwrap();
623
624 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
625 .await;
626
627 // Open a file within the gitignored directory, forcing some of its
628 // subdirectories to be read, but not all.
629 let read_dir_count_1 = fs.read_dir_call_count();
630 tree.read_with(cx, |tree, _| {
631 tree.as_local()
632 .unwrap()
633 .refresh_entries_for_paths(vec![rel_path("node_modules/d/d.js").into()])
634 })
635 .recv()
636 .await;
637
638 // Those subdirectories are now loaded.
639 tree.read_with(cx, |tree, _| {
640 assert_eq!(
641 tree.entries(true, 0)
642 .map(|e| (e.path.as_ref(), e.is_ignored))
643 .collect::<Vec<_>>(),
644 &[
645 (rel_path(""), false),
646 (rel_path(".gitignore"), false),
647 (rel_path("a"), false),
648 (rel_path("a/a.js"), false),
649 (rel_path("b"), false),
650 (rel_path("b/b.js"), false),
651 (rel_path("node_modules"), true),
652 (rel_path("node_modules/c"), true),
653 (rel_path("node_modules/d"), true),
654 (rel_path("node_modules/d/d.js"), true),
655 (rel_path("node_modules/d/e"), true),
656 (rel_path("node_modules/d/f"), true),
657 ]
658 );
659 });
660 let read_dir_count_2 = fs.read_dir_call_count();
661 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
662
663 // Update the gitignore so that node_modules is no longer ignored,
664 // but a subdirectory is ignored
665 fs.save(
666 "/root/.gitignore".as_ref(),
667 &Rope::from_str("e", cx.background_executor()),
668 Default::default(),
669 encoding_wrapper,
670 )
671 .await
672 .unwrap();
673 cx.executor().run_until_parked();
674
675 // All of the directories that are no longer ignored are now loaded.
676 tree.read_with(cx, |tree, _| {
677 assert_eq!(
678 tree.entries(true, 0)
679 .map(|e| (e.path.as_ref(), e.is_ignored))
680 .collect::<Vec<_>>(),
681 &[
682 (rel_path(""), false),
683 (rel_path(".gitignore"), false),
684 (rel_path("a"), false),
685 (rel_path("a/a.js"), false),
686 (rel_path("b"), false),
687 (rel_path("b/b.js"), false),
688 // This directory is no longer ignored
689 (rel_path("node_modules"), false),
690 (rel_path("node_modules/c"), false),
691 (rel_path("node_modules/c/c.js"), false),
692 (rel_path("node_modules/d"), false),
693 (rel_path("node_modules/d/d.js"), false),
694 // This subdirectory is now ignored
695 (rel_path("node_modules/d/e"), true),
696 (rel_path("node_modules/d/f"), false),
697 (rel_path("node_modules/d/f/f1.js"), false),
698 (rel_path("node_modules/d/f/f2.js"), false),
699 ]
700 );
701 });
702
703 // Each of the newly-loaded directories is scanned only once.
704 let read_dir_count_3 = fs.read_dir_call_count();
705 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
706}
707
708#[gpui::test]
709async fn test_write_file(cx: &mut TestAppContext) {
710 init_test(cx);
711 cx.executor().allow_parking();
712 let dir = TempTree::new(json!({
713 ".git": {},
714 ".gitignore": "ignored-dir\n",
715 "tracked-dir": {},
716 "ignored-dir": {}
717 }));
718
719 let worktree = Worktree::local(
720 dir.path(),
721 true,
722 Arc::new(RealFs::new(None, cx.executor())),
723 Default::default(),
724 &mut cx.to_async(),
725 )
726 .await
727 .unwrap();
728
729 #[cfg(not(target_os = "macos"))]
730 fs::fs_watcher::global(|_| {}).unwrap();
731
732 cx.read(|cx| worktree.read(cx).as_local().unwrap().scan_complete())
733 .await;
734 worktree.flush_fs_events(cx).await;
735
736 worktree
737 .update(cx, |tree, cx| {
738 tree.write_file(
739 rel_path("tracked-dir/file.txt").into(),
740 Rope::from_str("hello", cx.background_executor()),
741 Default::default(),
742 cx,
743 UTF_8,
744 )
745 })
746 .await
747 .unwrap();
748 worktree
749 .update(cx, |tree, cx| {
750 tree.write_file(
751 rel_path("ignored-dir/file.txt").into(),
752 Rope::from_str("world", cx.background_executor()),
753 Default::default(),
754 cx,
755 UTF_8,
756 )
757 })
758 .await
759 .unwrap();
760 worktree.read_with(cx, |tree, _| {
761 let tracked = tree
762 .entry_for_path(rel_path("tracked-dir/file.txt"))
763 .unwrap();
764 let ignored = tree
765 .entry_for_path(rel_path("ignored-dir/file.txt"))
766 .unwrap();
767 assert!(!tracked.is_ignored);
768 assert!(ignored.is_ignored);
769 });
770}
771
772#[gpui::test]
773async fn test_file_scan_inclusions(cx: &mut TestAppContext) {
774 init_test(cx);
775 cx.executor().allow_parking();
776 let dir = TempTree::new(json!({
777 ".gitignore": "**/target\n/node_modules\ntop_level.txt\n",
778 "target": {
779 "index": "blah2"
780 },
781 "node_modules": {
782 ".DS_Store": "",
783 "prettier": {
784 "package.json": "{}",
785 },
786 },
787 "src": {
788 ".DS_Store": "",
789 "foo": {
790 "foo.rs": "mod another;\n",
791 "another.rs": "// another",
792 },
793 "bar": {
794 "bar.rs": "// bar",
795 },
796 "lib.rs": "mod foo;\nmod bar;\n",
797 },
798 "top_level.txt": "top level file",
799 ".DS_Store": "",
800 }));
801 cx.update(|cx| {
802 cx.update_global::<SettingsStore, _>(|store, cx| {
803 store.update_user_settings(cx, |settings| {
804 settings.project.worktree.file_scan_exclusions = Some(vec![]);
805 settings.project.worktree.file_scan_inclusions = Some(vec![
806 "node_modules/**/package.json".to_string(),
807 "**/.DS_Store".to_string(),
808 ]);
809 });
810 });
811 });
812
813 let tree = Worktree::local(
814 dir.path(),
815 true,
816 Arc::new(RealFs::new(None, cx.executor())),
817 Default::default(),
818 &mut cx.to_async(),
819 )
820 .await
821 .unwrap();
822 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
823 .await;
824 tree.flush_fs_events(cx).await;
825 tree.read_with(cx, |tree, _| {
826 // Assert that file_scan_inclusions overrides file_scan_exclusions.
827 check_worktree_entries(
828 tree,
829 &[],
830 &["target", "node_modules"],
831 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
832 &[
833 "node_modules/prettier/package.json",
834 ".DS_Store",
835 "node_modules/.DS_Store",
836 "src/.DS_Store",
837 ],
838 )
839 });
840}
841
842#[gpui::test]
843async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext) {
844 init_test(cx);
845 cx.executor().allow_parking();
846 let dir = TempTree::new(json!({
847 ".gitignore": "**/target\n/node_modules\n",
848 "target": {
849 "index": "blah2"
850 },
851 "node_modules": {
852 ".DS_Store": "",
853 "prettier": {
854 "package.json": "{}",
855 },
856 },
857 "src": {
858 ".DS_Store": "",
859 "foo": {
860 "foo.rs": "mod another;\n",
861 "another.rs": "// another",
862 },
863 },
864 ".DS_Store": "",
865 }));
866
867 cx.update(|cx| {
868 cx.update_global::<SettingsStore, _>(|store, cx| {
869 store.update_user_settings(cx, |settings| {
870 settings.project.worktree.file_scan_exclusions =
871 Some(vec!["**/.DS_Store".to_string()]);
872 settings.project.worktree.file_scan_inclusions =
873 Some(vec!["**/.DS_Store".to_string()]);
874 });
875 });
876 });
877
878 let tree = Worktree::local(
879 dir.path(),
880 true,
881 Arc::new(RealFs::new(None, cx.executor())),
882 Default::default(),
883 &mut cx.to_async(),
884 )
885 .await
886 .unwrap();
887 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
888 .await;
889 tree.flush_fs_events(cx).await;
890 tree.read_with(cx, |tree, _| {
891 // Assert that file_scan_inclusions overrides file_scan_exclusions.
892 check_worktree_entries(
893 tree,
894 &[".DS_Store, src/.DS_Store"],
895 &["target", "node_modules"],
896 &["src/foo/another.rs", "src/foo/foo.rs", ".gitignore"],
897 &[],
898 )
899 });
900}
901
902#[gpui::test]
903async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppContext) {
904 init_test(cx);
905 cx.executor().allow_parking();
906 let dir = TempTree::new(json!({
907 ".gitignore": "**/target\n/node_modules/\n",
908 "target": {
909 "index": "blah2"
910 },
911 "node_modules": {
912 ".DS_Store": "",
913 "prettier": {
914 "package.json": "{}",
915 },
916 },
917 "src": {
918 ".DS_Store": "",
919 "foo": {
920 "foo.rs": "mod another;\n",
921 "another.rs": "// another",
922 },
923 },
924 ".DS_Store": "",
925 }));
926
927 cx.update(|cx| {
928 cx.update_global::<SettingsStore, _>(|store, cx| {
929 store.update_user_settings(cx, |settings| {
930 settings.project.worktree.file_scan_exclusions = Some(vec![]);
931 settings.project.worktree.file_scan_inclusions =
932 Some(vec!["node_modules/**".to_string()]);
933 });
934 });
935 });
936 let tree = Worktree::local(
937 dir.path(),
938 true,
939 Arc::new(RealFs::new(None, cx.executor())),
940 Default::default(),
941 &mut cx.to_async(),
942 )
943 .await
944 .unwrap();
945 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
946 .await;
947 tree.flush_fs_events(cx).await;
948
949 tree.read_with(cx, |tree, _| {
950 assert!(
951 tree.entry_for_path(rel_path("node_modules"))
952 .is_some_and(|f| f.is_always_included)
953 );
954 assert!(
955 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
956 .is_some_and(|f| f.is_always_included)
957 );
958 });
959
960 cx.update(|cx| {
961 cx.update_global::<SettingsStore, _>(|store, cx| {
962 store.update_user_settings(cx, |settings| {
963 settings.project.worktree.file_scan_exclusions = Some(vec![]);
964 settings.project.worktree.file_scan_inclusions = Some(vec![]);
965 });
966 });
967 });
968 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
969 .await;
970 tree.flush_fs_events(cx).await;
971
972 tree.read_with(cx, |tree, _| {
973 assert!(
974 tree.entry_for_path(rel_path("node_modules"))
975 .is_some_and(|f| !f.is_always_included)
976 );
977 assert!(
978 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
979 .is_some_and(|f| !f.is_always_included)
980 );
981 });
982}
983
984#[gpui::test]
985async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
986 init_test(cx);
987 cx.executor().allow_parking();
988 let dir = TempTree::new(json!({
989 ".gitignore": "**/target\n/node_modules\n",
990 "target": {
991 "index": "blah2"
992 },
993 "node_modules": {
994 ".DS_Store": "",
995 "prettier": {
996 "package.json": "{}",
997 },
998 },
999 "src": {
1000 ".DS_Store": "",
1001 "foo": {
1002 "foo.rs": "mod another;\n",
1003 "another.rs": "// another",
1004 },
1005 "bar": {
1006 "bar.rs": "// bar",
1007 },
1008 "lib.rs": "mod foo;\nmod bar;\n",
1009 },
1010 ".DS_Store": "",
1011 }));
1012 cx.update(|cx| {
1013 cx.update_global::<SettingsStore, _>(|store, cx| {
1014 store.update_user_settings(cx, |settings| {
1015 settings.project.worktree.file_scan_exclusions =
1016 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
1017 });
1018 });
1019 });
1020
1021 let tree = Worktree::local(
1022 dir.path(),
1023 true,
1024 Arc::new(RealFs::new(None, cx.executor())),
1025 Default::default(),
1026 &mut cx.to_async(),
1027 )
1028 .await
1029 .unwrap();
1030 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1031 .await;
1032 tree.flush_fs_events(cx).await;
1033 tree.read_with(cx, |tree, _| {
1034 check_worktree_entries(
1035 tree,
1036 &[
1037 "src/foo/foo.rs",
1038 "src/foo/another.rs",
1039 "node_modules/.DS_Store",
1040 "src/.DS_Store",
1041 ".DS_Store",
1042 ],
1043 &["target", "node_modules"],
1044 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
1045 &[],
1046 )
1047 });
1048
1049 cx.update(|cx| {
1050 cx.update_global::<SettingsStore, _>(|store, cx| {
1051 store.update_user_settings(cx, |settings| {
1052 settings.project.worktree.file_scan_exclusions =
1053 Some(vec!["**/node_modules/**".to_string()]);
1054 });
1055 });
1056 });
1057 tree.flush_fs_events(cx).await;
1058 cx.executor().run_until_parked();
1059 tree.read_with(cx, |tree, _| {
1060 check_worktree_entries(
1061 tree,
1062 &[
1063 "node_modules/prettier/package.json",
1064 "node_modules/.DS_Store",
1065 "node_modules",
1066 ],
1067 &["target"],
1068 &[
1069 ".gitignore",
1070 "src/lib.rs",
1071 "src/bar/bar.rs",
1072 "src/foo/foo.rs",
1073 "src/foo/another.rs",
1074 "src/.DS_Store",
1075 ".DS_Store",
1076 ],
1077 &[],
1078 )
1079 });
1080}
1081
1082#[gpui::test]
1083async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
1084 init_test(cx);
1085 cx.executor().allow_parking();
1086 let dir = TempTree::new(json!({
1087 ".git": {
1088 "HEAD": "ref: refs/heads/main\n",
1089 "foo": "bar",
1090 },
1091 ".gitignore": "**/target\n/node_modules\ntest_output\n",
1092 "target": {
1093 "index": "blah2"
1094 },
1095 "node_modules": {
1096 ".DS_Store": "",
1097 "prettier": {
1098 "package.json": "{}",
1099 },
1100 },
1101 "src": {
1102 ".DS_Store": "",
1103 "foo": {
1104 "foo.rs": "mod another;\n",
1105 "another.rs": "// another",
1106 },
1107 "bar": {
1108 "bar.rs": "// bar",
1109 },
1110 "lib.rs": "mod foo;\nmod bar;\n",
1111 },
1112 ".DS_Store": "",
1113 }));
1114 cx.update(|cx| {
1115 cx.update_global::<SettingsStore, _>(|store, cx| {
1116 store.update_user_settings(cx, |settings| {
1117 settings.project.worktree.file_scan_exclusions = Some(vec![
1118 "**/.git".to_string(),
1119 "node_modules/".to_string(),
1120 "build_output".to_string(),
1121 ]);
1122 });
1123 });
1124 });
1125
1126 let tree = Worktree::local(
1127 dir.path(),
1128 true,
1129 Arc::new(RealFs::new(None, cx.executor())),
1130 Default::default(),
1131 &mut cx.to_async(),
1132 )
1133 .await
1134 .unwrap();
1135 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1136 .await;
1137 tree.flush_fs_events(cx).await;
1138 tree.read_with(cx, |tree, _| {
1139 check_worktree_entries(
1140 tree,
1141 &[
1142 ".git/HEAD",
1143 ".git/foo",
1144 "node_modules",
1145 "node_modules/.DS_Store",
1146 "node_modules/prettier",
1147 "node_modules/prettier/package.json",
1148 ],
1149 &["target"],
1150 &[
1151 ".DS_Store",
1152 "src/.DS_Store",
1153 "src/lib.rs",
1154 "src/foo/foo.rs",
1155 "src/foo/another.rs",
1156 "src/bar/bar.rs",
1157 ".gitignore",
1158 ],
1159 &[],
1160 )
1161 });
1162
1163 let new_excluded_dir = dir.path().join("build_output");
1164 let new_ignored_dir = dir.path().join("test_output");
1165 std::fs::create_dir_all(&new_excluded_dir)
1166 .unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
1167 std::fs::create_dir_all(&new_ignored_dir)
1168 .unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
1169 let node_modules_dir = dir.path().join("node_modules");
1170 let dot_git_dir = dir.path().join(".git");
1171 let src_dir = dir.path().join("src");
1172 for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
1173 assert!(
1174 existing_dir.is_dir(),
1175 "Expect {existing_dir:?} to be present in the FS already"
1176 );
1177 }
1178
1179 for directory_for_new_file in [
1180 new_excluded_dir,
1181 new_ignored_dir,
1182 node_modules_dir,
1183 dot_git_dir,
1184 src_dir,
1185 ] {
1186 std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
1187 .unwrap_or_else(|e| {
1188 panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
1189 });
1190 }
1191 tree.flush_fs_events(cx).await;
1192
1193 tree.read_with(cx, |tree, _| {
1194 check_worktree_entries(
1195 tree,
1196 &[
1197 ".git/HEAD",
1198 ".git/foo",
1199 ".git/new_file",
1200 "node_modules",
1201 "node_modules/.DS_Store",
1202 "node_modules/prettier",
1203 "node_modules/prettier/package.json",
1204 "node_modules/new_file",
1205 "build_output",
1206 "build_output/new_file",
1207 "test_output/new_file",
1208 ],
1209 &["target", "test_output"],
1210 &[
1211 ".DS_Store",
1212 "src/.DS_Store",
1213 "src/lib.rs",
1214 "src/foo/foo.rs",
1215 "src/foo/another.rs",
1216 "src/bar/bar.rs",
1217 "src/new_file",
1218 ".gitignore",
1219 ],
1220 &[],
1221 )
1222 });
1223}
1224
1225#[gpui::test]
1226async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
1227 init_test(cx);
1228 cx.executor().allow_parking();
1229 let dir = TempTree::new(json!({
1230 ".git": {
1231 "HEAD": "ref: refs/heads/main\n",
1232 "foo": "foo contents",
1233 },
1234 }));
1235 let dot_git_worktree_dir = dir.path().join(".git");
1236
1237 let tree = Worktree::local(
1238 dot_git_worktree_dir.clone(),
1239 true,
1240 Arc::new(RealFs::new(None, cx.executor())),
1241 Default::default(),
1242 &mut cx.to_async(),
1243 )
1244 .await
1245 .unwrap();
1246 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1247 .await;
1248 tree.flush_fs_events(cx).await;
1249 tree.read_with(cx, |tree, _| {
1250 check_worktree_entries(tree, &[], &["HEAD", "foo"], &[], &[])
1251 });
1252
1253 std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents")
1254 .unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}"));
1255 tree.flush_fs_events(cx).await;
1256 tree.read_with(cx, |tree, _| {
1257 check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[], &[])
1258 });
1259}
1260
1261#[gpui::test(iterations = 30)]
1262async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
1263 init_test(cx);
1264 let fs = FakeFs::new(cx.background_executor.clone());
1265 fs.insert_tree(
1266 "/root",
1267 json!({
1268 "b": {},
1269 "c": {},
1270 "d": {},
1271 }),
1272 )
1273 .await;
1274
1275 let tree = Worktree::local(
1276 "/root".as_ref(),
1277 true,
1278 fs,
1279 Default::default(),
1280 &mut cx.to_async(),
1281 )
1282 .await
1283 .unwrap();
1284
1285 let snapshot1 = tree.update(cx, |tree, cx| {
1286 let tree = tree.as_local_mut().unwrap();
1287 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1288 tree.observe_updates(0, cx, {
1289 let snapshot = snapshot.clone();
1290 let settings = tree.settings();
1291 move |update| {
1292 snapshot
1293 .lock()
1294 .apply_remote_update(update, &settings.file_scan_inclusions);
1295 async { true }
1296 }
1297 });
1298 snapshot
1299 });
1300
1301 let entry = tree
1302 .update(cx, |tree, cx| {
1303 tree.as_local_mut()
1304 .unwrap()
1305 .create_entry(rel_path("a/e").into(), true, None, cx)
1306 })
1307 .await
1308 .unwrap()
1309 .into_included()
1310 .unwrap();
1311 assert!(entry.is_dir());
1312
1313 cx.executor().run_until_parked();
1314 tree.read_with(cx, |tree, _| {
1315 assert_eq!(
1316 tree.entry_for_path(rel_path("a/e")).unwrap().kind,
1317 EntryKind::Dir
1318 );
1319 });
1320
1321 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1322 assert_eq!(
1323 snapshot1.lock().entries(true, 0).collect::<Vec<_>>(),
1324 snapshot2.entries(true, 0).collect::<Vec<_>>()
1325 );
1326}
1327
1328#[gpui::test]
1329async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1330 init_test(cx);
1331 cx.executor().allow_parking();
1332
1333 let fs_fake = FakeFs::new(cx.background_executor.clone());
1334 fs_fake
1335 .insert_tree(
1336 "/root",
1337 json!({
1338 "a": {},
1339 }),
1340 )
1341 .await;
1342
1343 let tree_fake = Worktree::local(
1344 "/root".as_ref(),
1345 true,
1346 fs_fake,
1347 Default::default(),
1348 &mut cx.to_async(),
1349 )
1350 .await
1351 .unwrap();
1352
1353 let entry = tree_fake
1354 .update(cx, |tree, cx| {
1355 tree.as_local_mut().unwrap().create_entry(
1356 rel_path("a/b/c/d.txt").into(),
1357 false,
1358 None,
1359 cx,
1360 )
1361 })
1362 .await
1363 .unwrap()
1364 .into_included()
1365 .unwrap();
1366 assert!(entry.is_file());
1367
1368 cx.executor().run_until_parked();
1369 tree_fake.read_with(cx, |tree, _| {
1370 assert!(
1371 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1372 .unwrap()
1373 .is_file()
1374 );
1375 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1376 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1377 });
1378
1379 let fs_real = Arc::new(RealFs::new(None, cx.executor()));
1380 let temp_root = TempTree::new(json!({
1381 "a": {}
1382 }));
1383
1384 let tree_real = Worktree::local(
1385 temp_root.path(),
1386 true,
1387 fs_real,
1388 Default::default(),
1389 &mut cx.to_async(),
1390 )
1391 .await
1392 .unwrap();
1393
1394 let entry = tree_real
1395 .update(cx, |tree, cx| {
1396 tree.as_local_mut().unwrap().create_entry(
1397 rel_path("a/b/c/d.txt").into(),
1398 false,
1399 None,
1400 cx,
1401 )
1402 })
1403 .await
1404 .unwrap()
1405 .into_included()
1406 .unwrap();
1407 assert!(entry.is_file());
1408
1409 cx.executor().run_until_parked();
1410 tree_real.read_with(cx, |tree, _| {
1411 assert!(
1412 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1413 .unwrap()
1414 .is_file()
1415 );
1416 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1417 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1418 });
1419
1420 // Test smallest change
1421 let entry = tree_real
1422 .update(cx, |tree, cx| {
1423 tree.as_local_mut().unwrap().create_entry(
1424 rel_path("a/b/c/e.txt").into(),
1425 false,
1426 None,
1427 cx,
1428 )
1429 })
1430 .await
1431 .unwrap()
1432 .into_included()
1433 .unwrap();
1434 assert!(entry.is_file());
1435
1436 cx.executor().run_until_parked();
1437 tree_real.read_with(cx, |tree, _| {
1438 assert!(
1439 tree.entry_for_path(rel_path("a/b/c/e.txt"))
1440 .unwrap()
1441 .is_file()
1442 );
1443 });
1444
1445 // Test largest change
1446 let entry = tree_real
1447 .update(cx, |tree, cx| {
1448 tree.as_local_mut().unwrap().create_entry(
1449 rel_path("d/e/f/g.txt").into(),
1450 false,
1451 None,
1452 cx,
1453 )
1454 })
1455 .await
1456 .unwrap()
1457 .into_included()
1458 .unwrap();
1459 assert!(entry.is_file());
1460
1461 cx.executor().run_until_parked();
1462 tree_real.read_with(cx, |tree, _| {
1463 assert!(
1464 tree.entry_for_path(rel_path("d/e/f/g.txt"))
1465 .unwrap()
1466 .is_file()
1467 );
1468 assert!(tree.entry_for_path(rel_path("d/e/f")).unwrap().is_dir());
1469 assert!(tree.entry_for_path(rel_path("d/e")).unwrap().is_dir());
1470 assert!(tree.entry_for_path(rel_path("d")).unwrap().is_dir());
1471 });
1472}
1473
1474#[gpui::test(iterations = 100)]
1475async fn test_random_worktree_operations_during_initial_scan(
1476 cx: &mut TestAppContext,
1477 mut rng: StdRng,
1478) {
1479 init_test(cx);
1480 let operations = env::var("OPERATIONS")
1481 .map(|o| o.parse().unwrap())
1482 .unwrap_or(5);
1483 let initial_entries = env::var("INITIAL_ENTRIES")
1484 .map(|o| o.parse().unwrap())
1485 .unwrap_or(20);
1486
1487 let root_dir = Path::new(path!("/test"));
1488 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1489 fs.as_fake().insert_tree(root_dir, json!({})).await;
1490 for _ in 0..initial_entries {
1491 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await;
1492 }
1493 log::info!("generated initial tree");
1494
1495 let worktree = Worktree::local(
1496 root_dir,
1497 true,
1498 fs.clone(),
1499 Default::default(),
1500 &mut cx.to_async(),
1501 )
1502 .await
1503 .unwrap();
1504
1505 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1506 let updates = Arc::new(Mutex::new(Vec::new()));
1507 worktree.update(cx, |tree, cx| {
1508 check_worktree_change_events(tree, cx);
1509
1510 tree.as_local_mut().unwrap().observe_updates(0, cx, {
1511 let updates = updates.clone();
1512 move |update| {
1513 updates.lock().push(update);
1514 async { true }
1515 }
1516 });
1517 });
1518
1519 for _ in 0..operations {
1520 worktree
1521 .update(cx, |worktree, cx| {
1522 randomly_mutate_worktree(worktree, &mut rng, cx)
1523 })
1524 .await
1525 .log_err();
1526 worktree.read_with(cx, |tree, _| {
1527 tree.as_local().unwrap().snapshot().check_invariants(true)
1528 });
1529
1530 if rng.random_bool(0.6) {
1531 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1532 }
1533 }
1534
1535 worktree
1536 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1537 .await;
1538
1539 cx.executor().run_until_parked();
1540
1541 let final_snapshot = worktree.read_with(cx, |tree, _| {
1542 let tree = tree.as_local().unwrap();
1543 let snapshot = tree.snapshot();
1544 snapshot.check_invariants(true);
1545 snapshot
1546 });
1547
1548 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
1549
1550 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1551 let mut updated_snapshot = snapshot.clone();
1552 for update in updates.lock().iter() {
1553 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1554 updated_snapshot
1555 .apply_remote_update(update.clone(), &settings.file_scan_inclusions);
1556 }
1557 }
1558
1559 assert_eq!(
1560 updated_snapshot.entries(true, 0).collect::<Vec<_>>(),
1561 final_snapshot.entries(true, 0).collect::<Vec<_>>(),
1562 "wrong updates after snapshot {i}: {updates:#?}",
1563 );
1564 }
1565}
1566
1567#[gpui::test(iterations = 100)]
1568async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1569 init_test(cx);
1570 let operations = env::var("OPERATIONS")
1571 .map(|o| o.parse().unwrap())
1572 .unwrap_or(40);
1573 let initial_entries = env::var("INITIAL_ENTRIES")
1574 .map(|o| o.parse().unwrap())
1575 .unwrap_or(20);
1576
1577 let root_dir = Path::new(path!("/test"));
1578 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1579 fs.as_fake().insert_tree(root_dir, json!({})).await;
1580 for _ in 0..initial_entries {
1581 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await;
1582 }
1583 log::info!("generated initial tree");
1584
1585 let worktree = Worktree::local(
1586 root_dir,
1587 true,
1588 fs.clone(),
1589 Default::default(),
1590 &mut cx.to_async(),
1591 )
1592 .await
1593 .unwrap();
1594
1595 let updates = Arc::new(Mutex::new(Vec::new()));
1596 worktree.update(cx, |tree, cx| {
1597 check_worktree_change_events(tree, cx);
1598
1599 tree.as_local_mut().unwrap().observe_updates(0, cx, {
1600 let updates = updates.clone();
1601 move |update| {
1602 updates.lock().push(update);
1603 async { true }
1604 }
1605 });
1606 });
1607
1608 worktree
1609 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1610 .await;
1611
1612 fs.as_fake().pause_events();
1613 let mut snapshots = Vec::new();
1614 let mut mutations_len = operations;
1615 while mutations_len > 1 {
1616 if rng.random_bool(0.2) {
1617 worktree
1618 .update(cx, |worktree, cx| {
1619 randomly_mutate_worktree(worktree, &mut rng, cx)
1620 })
1621 .await
1622 .log_err();
1623 } else {
1624 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await;
1625 }
1626
1627 let buffered_event_count = fs.as_fake().buffered_event_count();
1628 if buffered_event_count > 0 && rng.random_bool(0.3) {
1629 let len = rng.random_range(0..=buffered_event_count);
1630 log::info!("flushing {} events", len);
1631 fs.as_fake().flush_events(len);
1632 } else {
1633 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng, cx.background_executor()).await;
1634 mutations_len -= 1;
1635 }
1636
1637 cx.executor().run_until_parked();
1638 if rng.random_bool(0.2) {
1639 log::info!("storing snapshot {}", snapshots.len());
1640 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1641 snapshots.push(snapshot);
1642 }
1643 }
1644
1645 log::info!("quiescing");
1646 fs.as_fake().flush_events(usize::MAX);
1647 cx.executor().run_until_parked();
1648
1649 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1650 snapshot.check_invariants(true);
1651 let expanded_paths = snapshot
1652 .expanded_entries()
1653 .map(|e| e.path.clone())
1654 .collect::<Vec<_>>();
1655
1656 {
1657 let new_worktree = Worktree::local(
1658 root_dir,
1659 true,
1660 fs.clone(),
1661 Default::default(),
1662 &mut cx.to_async(),
1663 )
1664 .await
1665 .unwrap();
1666 new_worktree
1667 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1668 .await;
1669 new_worktree
1670 .update(cx, |tree, _| {
1671 tree.as_local_mut()
1672 .unwrap()
1673 .refresh_entries_for_paths(expanded_paths)
1674 })
1675 .recv()
1676 .await;
1677 let new_snapshot =
1678 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1679 assert_eq!(
1680 snapshot.entries_without_ids(true),
1681 new_snapshot.entries_without_ids(true)
1682 );
1683 }
1684
1685 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
1686
1687 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1688 for update in updates.lock().iter() {
1689 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1690 prev_snapshot.apply_remote_update(update.clone(), &settings.file_scan_inclusions);
1691 }
1692 }
1693
1694 assert_eq!(
1695 prev_snapshot
1696 .entries(true, 0)
1697 .map(ignore_pending_dir)
1698 .collect::<Vec<_>>(),
1699 snapshot
1700 .entries(true, 0)
1701 .map(ignore_pending_dir)
1702 .collect::<Vec<_>>(),
1703 "wrong updates after snapshot {i}: {updates:#?}",
1704 );
1705 }
1706
1707 fn ignore_pending_dir(entry: &Entry) -> Entry {
1708 let mut entry = entry.clone();
1709 if entry.kind.is_dir() {
1710 entry.kind = EntryKind::Dir
1711 }
1712 entry
1713 }
1714}
1715
1716// The worktree's `UpdatedEntries` event can be used to follow along with
1717// all changes to the worktree's snapshot.
1718fn check_worktree_change_events(tree: &mut Worktree, cx: &mut Context<Worktree>) {
1719 let mut entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
1720 cx.subscribe(&cx.entity(), move |tree, _, event, _| {
1721 if let Event::UpdatedEntries(changes) = event {
1722 for (path, _, change_type) in changes.iter() {
1723 let entry = tree.entry_for_path(path).cloned();
1724 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1725 Ok(ix) | Err(ix) => ix,
1726 };
1727 match change_type {
1728 PathChange::Added => entries.insert(ix, entry.unwrap()),
1729 PathChange::Removed => drop(entries.remove(ix)),
1730 PathChange::Updated => {
1731 let entry = entry.unwrap();
1732 let existing_entry = entries.get_mut(ix).unwrap();
1733 assert_eq!(existing_entry.path, entry.path);
1734 *existing_entry = entry;
1735 }
1736 PathChange::AddedOrUpdated | PathChange::Loaded => {
1737 let entry = entry.unwrap();
1738 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1739 *entries.get_mut(ix).unwrap() = entry;
1740 } else {
1741 entries.insert(ix, entry);
1742 }
1743 }
1744 }
1745 }
1746
1747 let new_entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
1748 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1749 }
1750 })
1751 .detach();
1752}
1753
1754fn randomly_mutate_worktree(
1755 worktree: &mut Worktree,
1756 rng: &mut impl Rng,
1757 cx: &mut Context<Worktree>,
1758) -> Task<Result<()>> {
1759 log::info!("mutating worktree");
1760 let worktree = worktree.as_local_mut().unwrap();
1761 let snapshot = worktree.snapshot();
1762 let entry = snapshot.entries(false, 0).choose(rng).unwrap();
1763
1764 match rng.random_range(0_u32..100) {
1765 0..=33 if entry.path.as_ref() != RelPath::empty() => {
1766 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1767 worktree.delete_entry(entry.id, false, cx).unwrap()
1768 }
1769 _ => {
1770 if entry.is_dir() {
1771 let child_path = entry.path.join(rel_path(&random_filename(rng)));
1772 let is_dir = rng.random_bool(0.3);
1773 log::info!(
1774 "creating {} at {:?}",
1775 if is_dir { "dir" } else { "file" },
1776 child_path,
1777 );
1778 let task = worktree.create_entry(child_path, is_dir, None, cx);
1779 cx.background_spawn(async move {
1780 task.await?;
1781 Ok(())
1782 })
1783 } else {
1784 log::info!("overwriting file {:?} ({})", &entry.path, entry.id.0);
1785 let task = worktree.write_file(
1786 entry.path.clone(),
1787 Rope::default(),
1788 Default::default(),
1789 cx,
1790 UTF_8,
1791 );
1792 cx.background_spawn(async move {
1793 task.await?;
1794 Ok(())
1795 })
1796 }
1797 }
1798 }
1799}
1800
1801async fn randomly_mutate_fs(
1802 fs: &Arc<dyn Fs>,
1803 root_path: &Path,
1804 insertion_probability: f64,
1805 rng: &mut impl Rng,
1806 executor: &BackgroundExecutor,
1807) {
1808 log::info!("mutating fs");
1809 let mut files = Vec::new();
1810 let mut dirs = Vec::new();
1811 for path in fs.as_fake().paths(false) {
1812 if path.starts_with(root_path) {
1813 if fs.is_file(&path).await {
1814 files.push(path);
1815 } else {
1816 dirs.push(path);
1817 }
1818 }
1819 }
1820
1821 if (files.is_empty() && dirs.len() == 1) || rng.random_bool(insertion_probability) {
1822 let path = dirs.choose(rng).unwrap();
1823 let new_path = path.join(random_filename(rng));
1824
1825 if rng.random() {
1826 log::info!(
1827 "creating dir {:?}",
1828 new_path.strip_prefix(root_path).unwrap()
1829 );
1830 fs.create_dir(&new_path).await.unwrap();
1831 } else {
1832 log::info!(
1833 "creating file {:?}",
1834 new_path.strip_prefix(root_path).unwrap()
1835 );
1836 fs.create_file(&new_path, Default::default()).await.unwrap();
1837 }
1838 } else if rng.random_bool(0.05) {
1839 let ignore_dir_path = dirs.choose(rng).unwrap();
1840 let ignore_path = ignore_dir_path.join(GITIGNORE);
1841
1842 let subdirs = dirs
1843 .iter()
1844 .filter(|d| d.starts_with(ignore_dir_path))
1845 .cloned()
1846 .collect::<Vec<_>>();
1847 let subfiles = files
1848 .iter()
1849 .filter(|d| d.starts_with(ignore_dir_path))
1850 .cloned()
1851 .collect::<Vec<_>>();
1852 let files_to_ignore = {
1853 let len = rng.random_range(0..=subfiles.len());
1854 subfiles.choose_multiple(rng, len)
1855 };
1856 let dirs_to_ignore = {
1857 let len = rng.random_range(0..subdirs.len());
1858 subdirs.choose_multiple(rng, len)
1859 };
1860
1861 let mut ignore_contents = String::new();
1862 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1863 writeln!(
1864 ignore_contents,
1865 "{}",
1866 path_to_ignore
1867 .strip_prefix(ignore_dir_path)
1868 .unwrap()
1869 .to_str()
1870 .unwrap()
1871 )
1872 .unwrap();
1873 }
1874 log::info!(
1875 "creating gitignore {:?} with contents:\n{}",
1876 ignore_path.strip_prefix(root_path).unwrap(),
1877 ignore_contents
1878 );
1879 let encoding_wrapper = EncodingWrapper::new(UTF_8);
1880 fs.save(
1881 &ignore_path,
1882 &Rope::from_str(ignore_contents.as_str(), executor),
1883 Default::default(),
1884 encoding_wrapper,
1885 )
1886 .await
1887 .unwrap();
1888 } else {
1889 let old_path = {
1890 let file_path = files.choose(rng);
1891 let dir_path = dirs[1..].choose(rng);
1892 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1893 };
1894
1895 let is_rename = rng.random();
1896 if is_rename {
1897 let new_path_parent = dirs
1898 .iter()
1899 .filter(|d| !d.starts_with(old_path))
1900 .choose(rng)
1901 .unwrap();
1902
1903 let overwrite_existing_dir =
1904 !old_path.starts_with(new_path_parent) && rng.random_bool(0.3);
1905 let new_path = if overwrite_existing_dir {
1906 fs.remove_dir(
1907 new_path_parent,
1908 RemoveOptions {
1909 recursive: true,
1910 ignore_if_not_exists: true,
1911 },
1912 )
1913 .await
1914 .unwrap();
1915 new_path_parent.to_path_buf()
1916 } else {
1917 new_path_parent.join(random_filename(rng))
1918 };
1919
1920 log::info!(
1921 "renaming {:?} to {}{:?}",
1922 old_path.strip_prefix(root_path).unwrap(),
1923 if overwrite_existing_dir {
1924 "overwrite "
1925 } else {
1926 ""
1927 },
1928 new_path.strip_prefix(root_path).unwrap()
1929 );
1930 fs.rename(
1931 old_path,
1932 &new_path,
1933 fs::RenameOptions {
1934 overwrite: true,
1935 ignore_if_exists: true,
1936 },
1937 )
1938 .await
1939 .unwrap();
1940 } else if fs.is_file(old_path).await {
1941 log::info!(
1942 "deleting file {:?}",
1943 old_path.strip_prefix(root_path).unwrap()
1944 );
1945 fs.remove_file(old_path, Default::default()).await.unwrap();
1946 } else {
1947 log::info!(
1948 "deleting dir {:?}",
1949 old_path.strip_prefix(root_path).unwrap()
1950 );
1951 fs.remove_dir(
1952 old_path,
1953 RemoveOptions {
1954 recursive: true,
1955 ignore_if_not_exists: true,
1956 },
1957 )
1958 .await
1959 .unwrap();
1960 }
1961 }
1962}
1963
1964fn random_filename(rng: &mut impl Rng) -> String {
1965 (0..6)
1966 .map(|_| rng.sample(rand::distr::Alphanumeric))
1967 .map(char::from)
1968 .collect()
1969}
1970
1971#[gpui::test]
1972async fn test_rename_file_to_new_directory(cx: &mut TestAppContext) {
1973 init_test(cx);
1974 let fs = FakeFs::new(cx.background_executor.clone());
1975 let expected_contents = "content";
1976 fs.as_fake()
1977 .insert_tree(
1978 "/root",
1979 json!({
1980 "test.txt": expected_contents
1981 }),
1982 )
1983 .await;
1984 let worktree = Worktree::local(
1985 Path::new("/root"),
1986 true,
1987 fs.clone(),
1988 Arc::default(),
1989 &mut cx.to_async(),
1990 )
1991 .await
1992 .unwrap();
1993 cx.read(|cx| worktree.read(cx).as_local().unwrap().scan_complete())
1994 .await;
1995
1996 let entry_id = worktree.read_with(cx, |worktree, _| {
1997 worktree.entry_for_path("test.txt").unwrap().id
1998 });
1999 let _result = worktree
2000 .update(cx, |worktree, cx| {
2001 worktree.rename_entry(entry_id, Path::new("dir1/dir2/dir3/test.txt"), cx)
2002 })
2003 .await
2004 .unwrap();
2005 worktree.read_with(cx, |worktree, _| {
2006 assert!(
2007 worktree.entry_for_path("test.txt").is_none(),
2008 "Old file should have been removed"
2009 );
2010 assert!(
2011 worktree.entry_for_path("dir1/dir2/dir3/test.txt").is_some(),
2012 "Whole directory hierarchy and the new file should have been created"
2013 );
2014 });
2015 assert_eq!(
2016 worktree
2017 .update(cx, |worktree, cx| {
2018 worktree.load_file("dir1/dir2/dir3/test.txt".as_ref(), None, cx)
2019 })
2020 .await
2021 .unwrap()
2022 .text,
2023 expected_contents,
2024 "Moved file's contents should be preserved"
2025 );
2026
2027 let entry_id = worktree.read_with(cx, |worktree, _| {
2028 worktree
2029 .entry_for_path("dir1/dir2/dir3/test.txt")
2030 .unwrap()
2031 .id
2032 });
2033 let _result = worktree
2034 .update(cx, |worktree, cx| {
2035 worktree.rename_entry(entry_id, Path::new("dir1/dir2/test.txt"), cx)
2036 })
2037 .await
2038 .unwrap();
2039 worktree.read_with(cx, |worktree, _| {
2040 assert!(
2041 worktree.entry_for_path("test.txt").is_none(),
2042 "First file should not reappear"
2043 );
2044 assert!(
2045 worktree.entry_for_path("dir1/dir2/dir3/test.txt").is_none(),
2046 "Old file should have been removed"
2047 );
2048 assert!(
2049 worktree.entry_for_path("dir1/dir2/test.txt").is_some(),
2050 "No error should have occurred after moving into existing directory"
2051 );
2052 });
2053 assert_eq!(
2054 worktree
2055 .update(cx, |worktree, cx| {
2056 worktree.load_file("dir1/dir2/test.txt".as_ref(), None, cx)
2057 })
2058 .await
2059 .unwrap()
2060 .text,
2061 expected_contents,
2062 "Moved file's contents should be preserved"
2063 );
2064}
2065
2066#[gpui::test]
2067async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
2068 init_test(cx);
2069 let fs = FakeFs::new(cx.background_executor.clone());
2070 fs.insert_tree("/", json!({".env": "PRIVATE=secret\n"}))
2071 .await;
2072 let tree = Worktree::local(
2073 Path::new("/.env"),
2074 true,
2075 fs.clone(),
2076 Default::default(),
2077 &mut cx.to_async(),
2078 )
2079 .await
2080 .unwrap();
2081 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2082 .await;
2083 tree.read_with(cx, |tree, _| {
2084 let entry = tree.entry_for_path(rel_path("")).unwrap();
2085 assert!(entry.is_private);
2086 });
2087}
2088
2089#[gpui::test]
2090async fn test_repository_above_root(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2091 init_test(cx);
2092
2093 let fs = FakeFs::new(executor);
2094 fs.insert_tree(
2095 path!("/root"),
2096 json!({
2097 ".git": {},
2098 "subproject": {
2099 "a.txt": "A"
2100 }
2101 }),
2102 )
2103 .await;
2104 let worktree = Worktree::local(
2105 path!("/root/subproject").as_ref(),
2106 true,
2107 fs.clone(),
2108 Arc::default(),
2109 &mut cx.to_async(),
2110 )
2111 .await
2112 .unwrap();
2113 worktree
2114 .update(cx, |worktree, _| {
2115 worktree.as_local().unwrap().scan_complete()
2116 })
2117 .await;
2118 cx.run_until_parked();
2119 let repos = worktree.update(cx, |worktree, _| {
2120 worktree
2121 .as_local()
2122 .unwrap()
2123 .git_repositories
2124 .values()
2125 .map(|entry| entry.work_directory_abs_path.clone())
2126 .collect::<Vec<_>>()
2127 });
2128 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2129
2130 fs.touch_path(path!("/root/subproject")).await;
2131 worktree
2132 .update(cx, |worktree, _| {
2133 worktree.as_local().unwrap().scan_complete()
2134 })
2135 .await;
2136 cx.run_until_parked();
2137
2138 let repos = worktree.update(cx, |worktree, _| {
2139 worktree
2140 .as_local()
2141 .unwrap()
2142 .git_repositories
2143 .values()
2144 .map(|entry| entry.work_directory_abs_path.clone())
2145 .collect::<Vec<_>>()
2146 });
2147 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2148}
2149
2150#[gpui::test]
2151async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2152 init_test(cx);
2153
2154 let home = paths::home_dir();
2155 let fs = FakeFs::new(executor);
2156 fs.insert_tree(
2157 home,
2158 json!({
2159 ".config": {
2160 "git": {
2161 "ignore": "foo\n/bar\nbaz\n"
2162 }
2163 },
2164 "project": {
2165 ".git": {},
2166 ".gitignore": "!baz",
2167 "foo": "",
2168 "bar": "",
2169 "sub": {
2170 "bar": "",
2171 },
2172 "subrepo": {
2173 ".git": {},
2174 "bar": ""
2175 },
2176 "baz": ""
2177 }
2178 }),
2179 )
2180 .await;
2181 let worktree = Worktree::local(
2182 home.join("project"),
2183 true,
2184 fs.clone(),
2185 Arc::default(),
2186 &mut cx.to_async(),
2187 )
2188 .await
2189 .unwrap();
2190 worktree
2191 .update(cx, |worktree, _| {
2192 worktree.as_local().unwrap().scan_complete()
2193 })
2194 .await;
2195 cx.run_until_parked();
2196
2197 // .gitignore overrides excludesFile, and anchored paths in excludesFile are resolved
2198 // relative to the nearest containing repository
2199 worktree.update(cx, |worktree, _cx| {
2200 check_worktree_entries(
2201 worktree,
2202 &[],
2203 &["foo", "bar", "subrepo/bar"],
2204 &["sub/bar", "baz"],
2205 &[],
2206 );
2207 });
2208
2209 // Ignore statuses are updated when excludesFile changes
2210 fs.write(
2211 &home.join(".config").join("git").join("ignore"),
2212 "/bar\nbaz\n".as_bytes(),
2213 )
2214 .await
2215 .unwrap();
2216 worktree
2217 .update(cx, |worktree, _| {
2218 worktree.as_local().unwrap().scan_complete()
2219 })
2220 .await;
2221 cx.run_until_parked();
2222
2223 worktree.update(cx, |worktree, _cx| {
2224 check_worktree_entries(
2225 worktree,
2226 &[],
2227 &["bar", "subrepo/bar"],
2228 &["foo", "sub/bar", "baz"],
2229 &[],
2230 );
2231 });
2232
2233 // Statuses are updated when .git added/removed
2234 fs.remove_dir(
2235 &home.join("project").join("subrepo").join(".git"),
2236 RemoveOptions {
2237 recursive: true,
2238 ..Default::default()
2239 },
2240 )
2241 .await
2242 .unwrap();
2243 worktree
2244 .update(cx, |worktree, _| {
2245 worktree.as_local().unwrap().scan_complete()
2246 })
2247 .await;
2248 cx.run_until_parked();
2249
2250 worktree.update(cx, |worktree, _cx| {
2251 check_worktree_entries(
2252 worktree,
2253 &[],
2254 &["bar"],
2255 &["foo", "sub/bar", "baz", "subrepo/bar"],
2256 &[],
2257 );
2258 });
2259}
2260
2261#[track_caller]
2262fn check_worktree_entries(
2263 tree: &Worktree,
2264 expected_excluded_paths: &[&str],
2265 expected_ignored_paths: &[&str],
2266 expected_tracked_paths: &[&str],
2267 expected_included_paths: &[&str],
2268) {
2269 for path in expected_excluded_paths {
2270 let entry = tree.entry_for_path(rel_path(path));
2271 assert!(
2272 entry.is_none(),
2273 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2274 );
2275 }
2276 for path in expected_ignored_paths {
2277 let entry = tree
2278 .entry_for_path(rel_path(path))
2279 .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
2280 assert!(
2281 entry.is_ignored,
2282 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2283 );
2284 }
2285 for path in expected_tracked_paths {
2286 let entry = tree
2287 .entry_for_path(rel_path(path))
2288 .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
2289 assert!(
2290 !entry.is_ignored || entry.is_always_included,
2291 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2292 );
2293 }
2294 for path in expected_included_paths {
2295 let entry = tree
2296 .entry_for_path(rel_path(path))
2297 .unwrap_or_else(|| panic!("Missing entry for expected included path '{path}'"));
2298 assert!(
2299 entry.is_always_included,
2300 "expected path '{path}' to always be included, but got entry: {entry:?}",
2301 );
2302 }
2303}
2304
2305fn init_test(cx: &mut gpui::TestAppContext) {
2306 zlog::init_test();
2307
2308 cx.update(|cx| {
2309 let settings_store = SettingsStore::test(cx);
2310 cx.set_global(settings_store);
2311 WorktreeSettings::register(cx);
2312 });
2313}