1mod worktree_settings;
2
3use anyhow::Result;
4use encoding_rs;
5use fs::{FakeFs, Fs, RealFs, RemoveOptions};
6use git::{DOT_GIT, GITIGNORE, REPO_EXCLUDE};
7use gpui::{AppContext as _, BackgroundExecutor, BorrowAppContext, Context, Task, TestAppContext};
8use parking_lot::Mutex;
9use postage::stream::Stream;
10use pretty_assertions::assert_eq;
11use rand::prelude::*;
12use worktree::{Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle};
13
14use serde_json::json;
15use settings::SettingsStore;
16use std::{
17 env,
18 fmt::Write,
19 mem,
20 path::{Path, PathBuf},
21 sync::Arc,
22};
23use util::{
24 ResultExt, path,
25 paths::PathStyle,
26 rel_path::{RelPath, rel_path},
27 test::TempTree,
28};
29
30#[gpui::test]
31async fn test_traversal(cx: &mut TestAppContext) {
32 init_test(cx);
33 let fs = FakeFs::new(cx.background_executor.clone());
34 fs.insert_tree(
35 "/root",
36 json!({
37 ".gitignore": "a/b\n",
38 "a": {
39 "b": "",
40 "c": "",
41 }
42 }),
43 )
44 .await;
45
46 let tree = Worktree::local(
47 Path::new("/root"),
48 true,
49 fs,
50 Default::default(),
51 true,
52 &mut cx.to_async(),
53 )
54 .await
55 .unwrap();
56 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
57 .await;
58
59 tree.read_with(cx, |tree, _| {
60 assert_eq!(
61 tree.entries(false, 0)
62 .map(|entry| entry.path.as_ref())
63 .collect::<Vec<_>>(),
64 vec![
65 rel_path(""),
66 rel_path(".gitignore"),
67 rel_path("a"),
68 rel_path("a/c"),
69 ]
70 );
71 assert_eq!(
72 tree.entries(true, 0)
73 .map(|entry| entry.path.as_ref())
74 .collect::<Vec<_>>(),
75 vec![
76 rel_path(""),
77 rel_path(".gitignore"),
78 rel_path("a"),
79 rel_path("a/b"),
80 rel_path("a/c"),
81 ]
82 );
83 })
84}
85
86#[gpui::test(iterations = 10)]
87async fn test_circular_symlinks(cx: &mut TestAppContext) {
88 init_test(cx);
89 let fs = FakeFs::new(cx.background_executor.clone());
90 fs.insert_tree(
91 "/root",
92 json!({
93 "lib": {
94 "a": {
95 "a.txt": ""
96 },
97 "b": {
98 "b.txt": ""
99 }
100 }
101 }),
102 )
103 .await;
104 fs.create_symlink("/root/lib/a/lib".as_ref(), "..".into())
105 .await
106 .unwrap();
107 fs.create_symlink("/root/lib/b/lib".as_ref(), "..".into())
108 .await
109 .unwrap();
110
111 let tree = Worktree::local(
112 Path::new("/root"),
113 true,
114 fs.clone(),
115 Default::default(),
116 true,
117 &mut cx.to_async(),
118 )
119 .await
120 .unwrap();
121
122 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
123 .await;
124
125 tree.read_with(cx, |tree, _| {
126 assert_eq!(
127 tree.entries(false, 0)
128 .map(|entry| entry.path.as_ref())
129 .collect::<Vec<_>>(),
130 vec![
131 rel_path(""),
132 rel_path("lib"),
133 rel_path("lib/a"),
134 rel_path("lib/a/a.txt"),
135 rel_path("lib/a/lib"),
136 rel_path("lib/b"),
137 rel_path("lib/b/b.txt"),
138 rel_path("lib/b/lib"),
139 ]
140 );
141 });
142
143 fs.rename(
144 Path::new("/root/lib/a/lib"),
145 Path::new("/root/lib/a/lib-2"),
146 Default::default(),
147 )
148 .await
149 .unwrap();
150 cx.executor().run_until_parked();
151 tree.read_with(cx, |tree, _| {
152 assert_eq!(
153 tree.entries(false, 0)
154 .map(|entry| entry.path.as_ref())
155 .collect::<Vec<_>>(),
156 vec![
157 rel_path(""),
158 rel_path("lib"),
159 rel_path("lib/a"),
160 rel_path("lib/a/a.txt"),
161 rel_path("lib/a/lib-2"),
162 rel_path("lib/b"),
163 rel_path("lib/b/b.txt"),
164 rel_path("lib/b/lib"),
165 ]
166 );
167 });
168}
169
170#[gpui::test]
171async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
172 init_test(cx);
173 let fs = FakeFs::new(cx.background_executor.clone());
174 fs.insert_tree(
175 "/root",
176 json!({
177 "dir1": {
178 "deps": {
179 // symlinks here
180 },
181 "src": {
182 "a.rs": "",
183 "b.rs": "",
184 },
185 },
186 "dir2": {
187 "src": {
188 "c.rs": "",
189 "d.rs": "",
190 }
191 },
192 "dir3": {
193 "deps": {},
194 "src": {
195 "e.rs": "",
196 "f.rs": "",
197 },
198 }
199 }),
200 )
201 .await;
202
203 // These symlinks point to directories outside of the worktree's root, dir1.
204 fs.create_symlink("/root/dir1/deps/dep-dir2".as_ref(), "../../dir2".into())
205 .await
206 .unwrap();
207 fs.create_symlink("/root/dir1/deps/dep-dir3".as_ref(), "../../dir3".into())
208 .await
209 .unwrap();
210
211 let tree = Worktree::local(
212 Path::new("/root/dir1"),
213 true,
214 fs.clone(),
215 Default::default(),
216 true,
217 &mut cx.to_async(),
218 )
219 .await
220 .unwrap();
221
222 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
223 .await;
224
225 let tree_updates = Arc::new(Mutex::new(Vec::new()));
226 tree.update(cx, |_, cx| {
227 let tree_updates = tree_updates.clone();
228 cx.subscribe(&tree, move |_, _, event, _| {
229 if let Event::UpdatedEntries(update) = event {
230 tree_updates.lock().extend(
231 update
232 .iter()
233 .map(|(path, _, change)| (path.clone(), *change)),
234 );
235 }
236 })
237 .detach();
238 });
239
240 // The symlinked directories are not scanned by default.
241 tree.read_with(cx, |tree, _| {
242 assert_eq!(
243 tree.entries(true, 0)
244 .map(|entry| (entry.path.as_ref(), entry.is_external))
245 .collect::<Vec<_>>(),
246 vec![
247 (rel_path(""), false),
248 (rel_path("deps"), false),
249 (rel_path("deps/dep-dir2"), true),
250 (rel_path("deps/dep-dir3"), true),
251 (rel_path("src"), false),
252 (rel_path("src/a.rs"), false),
253 (rel_path("src/b.rs"), false),
254 ]
255 );
256
257 assert_eq!(
258 tree.entry_for_path(rel_path("deps/dep-dir2")).unwrap().kind,
259 EntryKind::UnloadedDir
260 );
261 });
262
263 // Expand one of the symlinked directories.
264 tree.read_with(cx, |tree, _| {
265 tree.as_local()
266 .unwrap()
267 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3").into()])
268 })
269 .recv()
270 .await;
271
272 // The expanded directory's contents are loaded. Subdirectories are
273 // not scanned yet.
274 tree.read_with(cx, |tree, _| {
275 assert_eq!(
276 tree.entries(true, 0)
277 .map(|entry| (entry.path.as_ref(), entry.is_external))
278 .collect::<Vec<_>>(),
279 vec![
280 (rel_path(""), false),
281 (rel_path("deps"), false),
282 (rel_path("deps/dep-dir2"), true),
283 (rel_path("deps/dep-dir3"), true),
284 (rel_path("deps/dep-dir3/deps"), true),
285 (rel_path("deps/dep-dir3/src"), true),
286 (rel_path("src"), false),
287 (rel_path("src/a.rs"), false),
288 (rel_path("src/b.rs"), false),
289 ]
290 );
291 });
292 assert_eq!(
293 mem::take(&mut *tree_updates.lock()),
294 &[
295 (rel_path("deps/dep-dir3").into(), PathChange::Loaded),
296 (rel_path("deps/dep-dir3/deps").into(), PathChange::Loaded),
297 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded)
298 ]
299 );
300
301 // Expand a subdirectory of one of the symlinked directories.
302 tree.read_with(cx, |tree, _| {
303 tree.as_local()
304 .unwrap()
305 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3/src").into()])
306 })
307 .recv()
308 .await;
309
310 // The expanded subdirectory's contents are loaded.
311 tree.read_with(cx, |tree, _| {
312 assert_eq!(
313 tree.entries(true, 0)
314 .map(|entry| (entry.path.as_ref(), entry.is_external))
315 .collect::<Vec<_>>(),
316 vec![
317 (rel_path(""), false),
318 (rel_path("deps"), false),
319 (rel_path("deps/dep-dir2"), true),
320 (rel_path("deps/dep-dir3"), true),
321 (rel_path("deps/dep-dir3/deps"), true),
322 (rel_path("deps/dep-dir3/src"), true),
323 (rel_path("deps/dep-dir3/src/e.rs"), true),
324 (rel_path("deps/dep-dir3/src/f.rs"), true),
325 (rel_path("src"), false),
326 (rel_path("src/a.rs"), false),
327 (rel_path("src/b.rs"), false),
328 ]
329 );
330 });
331
332 assert_eq!(
333 mem::take(&mut *tree_updates.lock()),
334 &[
335 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded),
336 (
337 rel_path("deps/dep-dir3/src/e.rs").into(),
338 PathChange::Loaded
339 ),
340 (
341 rel_path("deps/dep-dir3/src/f.rs").into(),
342 PathChange::Loaded
343 )
344 ]
345 );
346}
347
348#[cfg(target_os = "macos")]
349#[gpui::test]
350async fn test_renaming_case_only(cx: &mut TestAppContext) {
351 cx.executor().allow_parking();
352 init_test(cx);
353
354 const OLD_NAME: &str = "aaa.rs";
355 const NEW_NAME: &str = "AAA.rs";
356
357 let fs = Arc::new(RealFs::new(None, cx.executor()));
358 let temp_root = TempTree::new(json!({
359 OLD_NAME: "",
360 }));
361
362 let tree = Worktree::local(
363 temp_root.path(),
364 true,
365 fs.clone(),
366 Default::default(),
367 true,
368 &mut cx.to_async(),
369 )
370 .await
371 .unwrap();
372
373 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
374 .await;
375 tree.read_with(cx, |tree, _| {
376 assert_eq!(
377 tree.entries(true, 0)
378 .map(|entry| entry.path.as_ref())
379 .collect::<Vec<_>>(),
380 vec![rel_path(""), rel_path(OLD_NAME)]
381 );
382 });
383
384 fs.rename(
385 &temp_root.path().join(OLD_NAME),
386 &temp_root.path().join(NEW_NAME),
387 fs::RenameOptions {
388 overwrite: true,
389 ignore_if_exists: true,
390 create_parents: false,
391 },
392 )
393 .await
394 .unwrap();
395
396 tree.flush_fs_events(cx).await;
397
398 tree.read_with(cx, |tree, _| {
399 assert_eq!(
400 tree.entries(true, 0)
401 .map(|entry| entry.path.as_ref())
402 .collect::<Vec<_>>(),
403 vec![rel_path(""), rel_path(NEW_NAME)]
404 );
405 });
406}
407
408#[gpui::test]
409async fn test_open_gitignored_files(cx: &mut TestAppContext) {
410 init_test(cx);
411 let fs = FakeFs::new(cx.background_executor.clone());
412 fs.insert_tree(
413 "/root",
414 json!({
415 ".gitignore": "node_modules\n",
416 "one": {
417 "node_modules": {
418 "a": {
419 "a1.js": "a1",
420 "a2.js": "a2",
421 },
422 "b": {
423 "b1.js": "b1",
424 "b2.js": "b2",
425 },
426 "c": {
427 "c1.js": "c1",
428 "c2.js": "c2",
429 }
430 },
431 },
432 "two": {
433 "x.js": "",
434 "y.js": "",
435 },
436 }),
437 )
438 .await;
439
440 let tree = Worktree::local(
441 Path::new("/root"),
442 true,
443 fs.clone(),
444 Default::default(),
445 true,
446 &mut cx.to_async(),
447 )
448 .await
449 .unwrap();
450
451 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
452 .await;
453
454 tree.read_with(cx, |tree, _| {
455 assert_eq!(
456 tree.entries(true, 0)
457 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
458 .collect::<Vec<_>>(),
459 vec![
460 (rel_path(""), false),
461 (rel_path(".gitignore"), false),
462 (rel_path("one"), false),
463 (rel_path("one/node_modules"), true),
464 (rel_path("two"), false),
465 (rel_path("two/x.js"), false),
466 (rel_path("two/y.js"), false),
467 ]
468 );
469 });
470
471 // Open a file that is nested inside of a gitignored directory that
472 // has not yet been expanded.
473 let prev_read_dir_count = fs.read_dir_call_count();
474 let loaded = tree
475 .update(cx, |tree, cx| {
476 tree.load_file(rel_path("one/node_modules/b/b1.js"), cx)
477 })
478 .await
479 .unwrap();
480
481 tree.read_with(cx, |tree, _| {
482 assert_eq!(
483 tree.entries(true, 0)
484 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
485 .collect::<Vec<_>>(),
486 vec![
487 (rel_path(""), false),
488 (rel_path(".gitignore"), false),
489 (rel_path("one"), false),
490 (rel_path("one/node_modules"), true),
491 (rel_path("one/node_modules/a"), true),
492 (rel_path("one/node_modules/b"), true),
493 (rel_path("one/node_modules/b/b1.js"), true),
494 (rel_path("one/node_modules/b/b2.js"), true),
495 (rel_path("one/node_modules/c"), true),
496 (rel_path("two"), false),
497 (rel_path("two/x.js"), false),
498 (rel_path("two/y.js"), false),
499 ]
500 );
501
502 assert_eq!(
503 loaded.file.path.as_ref(),
504 rel_path("one/node_modules/b/b1.js")
505 );
506
507 // Only the newly-expanded directories are scanned.
508 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
509 });
510
511 // Open another file in a different subdirectory of the same
512 // gitignored directory.
513 let prev_read_dir_count = fs.read_dir_call_count();
514 let loaded = tree
515 .update(cx, |tree, cx| {
516 tree.load_file(rel_path("one/node_modules/a/a2.js"), cx)
517 })
518 .await
519 .unwrap();
520
521 tree.read_with(cx, |tree, _| {
522 assert_eq!(
523 tree.entries(true, 0)
524 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
525 .collect::<Vec<_>>(),
526 vec![
527 (rel_path(""), false),
528 (rel_path(".gitignore"), false),
529 (rel_path("one"), false),
530 (rel_path("one/node_modules"), true),
531 (rel_path("one/node_modules/a"), true),
532 (rel_path("one/node_modules/a/a1.js"), true),
533 (rel_path("one/node_modules/a/a2.js"), true),
534 (rel_path("one/node_modules/b"), true),
535 (rel_path("one/node_modules/b/b1.js"), true),
536 (rel_path("one/node_modules/b/b2.js"), true),
537 (rel_path("one/node_modules/c"), true),
538 (rel_path("two"), false),
539 (rel_path("two/x.js"), false),
540 (rel_path("two/y.js"), false),
541 ]
542 );
543
544 assert_eq!(
545 loaded.file.path.as_ref(),
546 rel_path("one/node_modules/a/a2.js")
547 );
548
549 // Only the newly-expanded directory is scanned.
550 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
551 });
552
553 let path = PathBuf::from("/root/one/node_modules/c/lib");
554
555 // No work happens when files and directories change within an unloaded directory.
556 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
557 // When we open a directory, we check each ancestor whether it's a git
558 // repository. That means we have an fs.metadata call per ancestor that we
559 // need to subtract here.
560 let ancestors = path.ancestors().count();
561
562 fs.create_dir(path.as_ref()).await.unwrap();
563 cx.executor().run_until_parked();
564
565 assert_eq!(
566 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count - ancestors,
567 0
568 );
569}
570
571#[gpui::test]
572async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
573 init_test(cx);
574 let fs = FakeFs::new(cx.background_executor.clone());
575 fs.insert_tree(
576 "/root",
577 json!({
578 ".gitignore": "node_modules\n",
579 "a": {
580 "a.js": "",
581 },
582 "b": {
583 "b.js": "",
584 },
585 "node_modules": {
586 "c": {
587 "c.js": "",
588 },
589 "d": {
590 "d.js": "",
591 "e": {
592 "e1.js": "",
593 "e2.js": "",
594 },
595 "f": {
596 "f1.js": "",
597 "f2.js": "",
598 }
599 },
600 },
601 }),
602 )
603 .await;
604
605 let tree = Worktree::local(
606 Path::new("/root"),
607 true,
608 fs.clone(),
609 Default::default(),
610 true,
611 &mut cx.to_async(),
612 )
613 .await
614 .unwrap();
615
616 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
617 .await;
618
619 // Open a file within the gitignored directory, forcing some of its
620 // subdirectories to be read, but not all.
621 let read_dir_count_1 = fs.read_dir_call_count();
622 tree.read_with(cx, |tree, _| {
623 tree.as_local()
624 .unwrap()
625 .refresh_entries_for_paths(vec![rel_path("node_modules/d/d.js").into()])
626 })
627 .recv()
628 .await;
629
630 // Those subdirectories are now loaded.
631 tree.read_with(cx, |tree, _| {
632 assert_eq!(
633 tree.entries(true, 0)
634 .map(|e| (e.path.as_ref(), e.is_ignored))
635 .collect::<Vec<_>>(),
636 &[
637 (rel_path(""), false),
638 (rel_path(".gitignore"), false),
639 (rel_path("a"), false),
640 (rel_path("a/a.js"), false),
641 (rel_path("b"), false),
642 (rel_path("b/b.js"), false),
643 (rel_path("node_modules"), true),
644 (rel_path("node_modules/c"), true),
645 (rel_path("node_modules/d"), true),
646 (rel_path("node_modules/d/d.js"), true),
647 (rel_path("node_modules/d/e"), true),
648 (rel_path("node_modules/d/f"), true),
649 ]
650 );
651 });
652 let read_dir_count_2 = fs.read_dir_call_count();
653 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
654
655 // Update the gitignore so that node_modules is no longer ignored,
656 // but a subdirectory is ignored
657 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
658 .await
659 .unwrap();
660 cx.executor().run_until_parked();
661
662 // All of the directories that are no longer ignored are now loaded.
663 tree.read_with(cx, |tree, _| {
664 assert_eq!(
665 tree.entries(true, 0)
666 .map(|e| (e.path.as_ref(), e.is_ignored))
667 .collect::<Vec<_>>(),
668 &[
669 (rel_path(""), false),
670 (rel_path(".gitignore"), false),
671 (rel_path("a"), false),
672 (rel_path("a/a.js"), false),
673 (rel_path("b"), false),
674 (rel_path("b/b.js"), false),
675 // This directory is no longer ignored
676 (rel_path("node_modules"), false),
677 (rel_path("node_modules/c"), false),
678 (rel_path("node_modules/c/c.js"), false),
679 (rel_path("node_modules/d"), false),
680 (rel_path("node_modules/d/d.js"), false),
681 // This subdirectory is now ignored
682 (rel_path("node_modules/d/e"), true),
683 (rel_path("node_modules/d/f"), false),
684 (rel_path("node_modules/d/f/f1.js"), false),
685 (rel_path("node_modules/d/f/f2.js"), false),
686 ]
687 );
688 });
689
690 // Each of the newly-loaded directories is scanned only once.
691 let read_dir_count_3 = fs.read_dir_call_count();
692 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
693}
694
695#[gpui::test]
696async fn test_write_file(cx: &mut TestAppContext) {
697 init_test(cx);
698 cx.executor().allow_parking();
699 let dir = TempTree::new(json!({
700 ".git": {},
701 ".gitignore": "ignored-dir\n",
702 "tracked-dir": {},
703 "ignored-dir": {}
704 }));
705
706 let worktree = Worktree::local(
707 dir.path(),
708 true,
709 Arc::new(RealFs::new(None, cx.executor())),
710 Default::default(),
711 true,
712 &mut cx.to_async(),
713 )
714 .await
715 .unwrap();
716
717 #[cfg(not(target_os = "macos"))]
718 fs::fs_watcher::global(|_| {}).unwrap();
719
720 cx.read(|cx| worktree.read(cx).as_local().unwrap().scan_complete())
721 .await;
722 worktree.flush_fs_events(cx).await;
723
724 worktree
725 .update(cx, |tree, cx| {
726 tree.write_file(
727 rel_path("tracked-dir/file.txt").into(),
728 "hello".into(),
729 Default::default(),
730 encoding_rs::UTF_8,
731 false,
732 cx,
733 )
734 })
735 .await
736 .unwrap();
737 worktree
738 .update(cx, |tree, cx| {
739 tree.write_file(
740 rel_path("ignored-dir/file.txt").into(),
741 "world".into(),
742 Default::default(),
743 encoding_rs::UTF_8,
744 false,
745 cx,
746 )
747 })
748 .await
749 .unwrap();
750 worktree.read_with(cx, |tree, _| {
751 let tracked = tree
752 .entry_for_path(rel_path("tracked-dir/file.txt"))
753 .unwrap();
754 let ignored = tree
755 .entry_for_path(rel_path("ignored-dir/file.txt"))
756 .unwrap();
757 assert!(!tracked.is_ignored);
758 assert!(ignored.is_ignored);
759 });
760}
761
762#[gpui::test]
763async fn test_file_scan_inclusions(cx: &mut TestAppContext) {
764 init_test(cx);
765 cx.executor().allow_parking();
766 let dir = TempTree::new(json!({
767 ".gitignore": "**/target\n/node_modules\ntop_level.txt\n",
768 "target": {
769 "index": "blah2"
770 },
771 "node_modules": {
772 ".DS_Store": "",
773 "prettier": {
774 "package.json": "{}",
775 },
776 "package.json": "//package.json"
777 },
778 "src": {
779 ".DS_Store": "",
780 "foo": {
781 "foo.rs": "mod another;\n",
782 "another.rs": "// another",
783 },
784 "bar": {
785 "bar.rs": "// bar",
786 },
787 "lib.rs": "mod foo;\nmod bar;\n",
788 },
789 "top_level.txt": "top level file",
790 ".DS_Store": "",
791 }));
792 cx.update(|cx| {
793 cx.update_global::<SettingsStore, _>(|store, cx| {
794 store.update_user_settings(cx, |settings| {
795 settings.project.worktree.file_scan_exclusions = Some(vec![]);
796 settings.project.worktree.file_scan_inclusions = Some(vec![
797 "node_modules/**/package.json".to_string(),
798 "**/.DS_Store".to_string(),
799 ]);
800 });
801 });
802 });
803
804 let tree = Worktree::local(
805 dir.path(),
806 true,
807 Arc::new(RealFs::new(None, cx.executor())),
808 Default::default(),
809 true,
810 &mut cx.to_async(),
811 )
812 .await
813 .unwrap();
814 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
815 .await;
816 tree.flush_fs_events(cx).await;
817 tree.read_with(cx, |tree, _| {
818 // Assert that file_scan_inclusions overrides file_scan_exclusions.
819 check_worktree_entries(
820 tree,
821 &[],
822 &["target", "node_modules"],
823 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
824 &[
825 "node_modules/prettier/package.json",
826 ".DS_Store",
827 "node_modules/.DS_Store",
828 "src/.DS_Store",
829 ],
830 )
831 });
832}
833
834#[gpui::test]
835async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext) {
836 init_test(cx);
837 cx.executor().allow_parking();
838 let dir = TempTree::new(json!({
839 ".gitignore": "**/target\n/node_modules\n",
840 "target": {
841 "index": "blah2"
842 },
843 "node_modules": {
844 ".DS_Store": "",
845 "prettier": {
846 "package.json": "{}",
847 },
848 },
849 "src": {
850 ".DS_Store": "",
851 "foo": {
852 "foo.rs": "mod another;\n",
853 "another.rs": "// another",
854 },
855 },
856 ".DS_Store": "",
857 }));
858
859 cx.update(|cx| {
860 cx.update_global::<SettingsStore, _>(|store, cx| {
861 store.update_user_settings(cx, |settings| {
862 settings.project.worktree.file_scan_exclusions =
863 Some(vec!["**/.DS_Store".to_string()]);
864 settings.project.worktree.file_scan_inclusions =
865 Some(vec!["**/.DS_Store".to_string()]);
866 });
867 });
868 });
869
870 let tree = Worktree::local(
871 dir.path(),
872 true,
873 Arc::new(RealFs::new(None, cx.executor())),
874 Default::default(),
875 true,
876 &mut cx.to_async(),
877 )
878 .await
879 .unwrap();
880 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
881 .await;
882 tree.flush_fs_events(cx).await;
883 tree.read_with(cx, |tree, _| {
884 // Assert that file_scan_inclusions overrides file_scan_exclusions.
885 check_worktree_entries(
886 tree,
887 &[".DS_Store, src/.DS_Store"],
888 &["target", "node_modules"],
889 &["src/foo/another.rs", "src/foo/foo.rs", ".gitignore"],
890 &[],
891 )
892 });
893}
894
895#[gpui::test]
896async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppContext) {
897 init_test(cx);
898 cx.executor().allow_parking();
899 let dir = TempTree::new(json!({
900 ".gitignore": "**/target\n/node_modules/\n",
901 "target": {
902 "index": "blah2"
903 },
904 "node_modules": {
905 ".DS_Store": "",
906 "prettier": {
907 "package.json": "{}",
908 },
909 },
910 "src": {
911 ".DS_Store": "",
912 "foo": {
913 "foo.rs": "mod another;\n",
914 "another.rs": "// another",
915 },
916 },
917 ".DS_Store": "",
918 }));
919
920 cx.update(|cx| {
921 cx.update_global::<SettingsStore, _>(|store, cx| {
922 store.update_user_settings(cx, |settings| {
923 settings.project.worktree.file_scan_exclusions = Some(vec![]);
924 settings.project.worktree.file_scan_inclusions =
925 Some(vec!["node_modules/**".to_string()]);
926 });
927 });
928 });
929 let tree = Worktree::local(
930 dir.path(),
931 true,
932 Arc::new(RealFs::new(None, cx.executor())),
933 Default::default(),
934 true,
935 &mut cx.to_async(),
936 )
937 .await
938 .unwrap();
939 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
940 .await;
941 tree.flush_fs_events(cx).await;
942
943 tree.read_with(cx, |tree, _| {
944 assert!(
945 tree.entry_for_path(rel_path("node_modules"))
946 .is_some_and(|f| f.is_always_included)
947 );
948 assert!(
949 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
950 .is_some_and(|f| f.is_always_included)
951 );
952 });
953
954 cx.update(|cx| {
955 cx.update_global::<SettingsStore, _>(|store, cx| {
956 store.update_user_settings(cx, |settings| {
957 settings.project.worktree.file_scan_exclusions = Some(vec![]);
958 settings.project.worktree.file_scan_inclusions = Some(vec![]);
959 });
960 });
961 });
962 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
963 .await;
964 tree.flush_fs_events(cx).await;
965
966 tree.read_with(cx, |tree, _| {
967 assert!(
968 tree.entry_for_path(rel_path("node_modules"))
969 .is_some_and(|f| !f.is_always_included)
970 );
971 assert!(
972 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
973 .is_some_and(|f| !f.is_always_included)
974 );
975 });
976}
977
978#[gpui::test]
979async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
980 init_test(cx);
981 cx.executor().allow_parking();
982 let dir = TempTree::new(json!({
983 ".gitignore": "**/target\n/node_modules\n",
984 "target": {
985 "index": "blah2"
986 },
987 "node_modules": {
988 ".DS_Store": "",
989 "prettier": {
990 "package.json": "{}",
991 },
992 },
993 "src": {
994 ".DS_Store": "",
995 "foo": {
996 "foo.rs": "mod another;\n",
997 "another.rs": "// another",
998 },
999 "bar": {
1000 "bar.rs": "// bar",
1001 },
1002 "lib.rs": "mod foo;\nmod bar;\n",
1003 },
1004 ".DS_Store": "",
1005 }));
1006 cx.update(|cx| {
1007 cx.update_global::<SettingsStore, _>(|store, cx| {
1008 store.update_user_settings(cx, |settings| {
1009 settings.project.worktree.file_scan_exclusions =
1010 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
1011 });
1012 });
1013 });
1014
1015 let tree = Worktree::local(
1016 dir.path(),
1017 true,
1018 Arc::new(RealFs::new(None, cx.executor())),
1019 Default::default(),
1020 true,
1021 &mut cx.to_async(),
1022 )
1023 .await
1024 .unwrap();
1025 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1026 .await;
1027 tree.flush_fs_events(cx).await;
1028 tree.read_with(cx, |tree, _| {
1029 check_worktree_entries(
1030 tree,
1031 &[
1032 "src/foo/foo.rs",
1033 "src/foo/another.rs",
1034 "node_modules/.DS_Store",
1035 "src/.DS_Store",
1036 ".DS_Store",
1037 ],
1038 &["target", "node_modules"],
1039 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
1040 &[],
1041 )
1042 });
1043
1044 cx.update(|cx| {
1045 cx.update_global::<SettingsStore, _>(|store, cx| {
1046 store.update_user_settings(cx, |settings| {
1047 settings.project.worktree.file_scan_exclusions =
1048 Some(vec!["**/node_modules/**".to_string()]);
1049 });
1050 });
1051 });
1052 tree.flush_fs_events(cx).await;
1053 cx.executor().run_until_parked();
1054 tree.read_with(cx, |tree, _| {
1055 check_worktree_entries(
1056 tree,
1057 &[
1058 "node_modules/prettier/package.json",
1059 "node_modules/.DS_Store",
1060 "node_modules",
1061 ],
1062 &["target"],
1063 &[
1064 ".gitignore",
1065 "src/lib.rs",
1066 "src/bar/bar.rs",
1067 "src/foo/foo.rs",
1068 "src/foo/another.rs",
1069 "src/.DS_Store",
1070 ".DS_Store",
1071 ],
1072 &[],
1073 )
1074 });
1075}
1076
1077#[gpui::test]
1078async fn test_hidden_files(cx: &mut TestAppContext) {
1079 init_test(cx);
1080 cx.executor().allow_parking();
1081 let dir = TempTree::new(json!({
1082 ".gitignore": "**/target\n",
1083 ".hidden_file": "content",
1084 ".hidden_dir": {
1085 "nested.rs": "code",
1086 },
1087 "src": {
1088 "visible.rs": "code",
1089 },
1090 "logs": {
1091 "app.log": "logs",
1092 "debug.log": "logs",
1093 },
1094 "visible.txt": "content",
1095 }));
1096
1097 let tree = Worktree::local(
1098 dir.path(),
1099 true,
1100 Arc::new(RealFs::new(None, cx.executor())),
1101 Default::default(),
1102 true,
1103 &mut cx.to_async(),
1104 )
1105 .await
1106 .unwrap();
1107 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1108 .await;
1109 tree.flush_fs_events(cx).await;
1110
1111 tree.read_with(cx, |tree, _| {
1112 assert_eq!(
1113 tree.entries(true, 0)
1114 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1115 .collect::<Vec<_>>(),
1116 vec![
1117 (rel_path(""), false),
1118 (rel_path(".gitignore"), true),
1119 (rel_path(".hidden_dir"), true),
1120 (rel_path(".hidden_dir/nested.rs"), true),
1121 (rel_path(".hidden_file"), true),
1122 (rel_path("logs"), false),
1123 (rel_path("logs/app.log"), false),
1124 (rel_path("logs/debug.log"), false),
1125 (rel_path("src"), false),
1126 (rel_path("src/visible.rs"), false),
1127 (rel_path("visible.txt"), false),
1128 ]
1129 );
1130 });
1131
1132 cx.update(|cx| {
1133 cx.update_global::<SettingsStore, _>(|store, cx| {
1134 store.update_user_settings(cx, |settings| {
1135 settings.project.worktree.hidden_files = Some(vec!["**/*.log".to_string()]);
1136 });
1137 });
1138 });
1139 tree.flush_fs_events(cx).await;
1140 cx.executor().run_until_parked();
1141
1142 tree.read_with(cx, |tree, _| {
1143 assert_eq!(
1144 tree.entries(true, 0)
1145 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1146 .collect::<Vec<_>>(),
1147 vec![
1148 (rel_path(""), false),
1149 (rel_path(".gitignore"), false),
1150 (rel_path(".hidden_dir"), false),
1151 (rel_path(".hidden_dir/nested.rs"), false),
1152 (rel_path(".hidden_file"), false),
1153 (rel_path("logs"), false),
1154 (rel_path("logs/app.log"), true),
1155 (rel_path("logs/debug.log"), true),
1156 (rel_path("src"), false),
1157 (rel_path("src/visible.rs"), false),
1158 (rel_path("visible.txt"), false),
1159 ]
1160 );
1161 });
1162}
1163
1164#[gpui::test]
1165async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
1166 init_test(cx);
1167 cx.executor().allow_parking();
1168 let dir = TempTree::new(json!({
1169 ".git": {
1170 "HEAD": "ref: refs/heads/main\n",
1171 "foo": "bar",
1172 },
1173 ".gitignore": "**/target\n/node_modules\ntest_output\n",
1174 "target": {
1175 "index": "blah2"
1176 },
1177 "node_modules": {
1178 ".DS_Store": "",
1179 "prettier": {
1180 "package.json": "{}",
1181 },
1182 },
1183 "src": {
1184 ".DS_Store": "",
1185 "foo": {
1186 "foo.rs": "mod another;\n",
1187 "another.rs": "// another",
1188 },
1189 "bar": {
1190 "bar.rs": "// bar",
1191 },
1192 "lib.rs": "mod foo;\nmod bar;\n",
1193 },
1194 ".DS_Store": "",
1195 }));
1196 cx.update(|cx| {
1197 cx.update_global::<SettingsStore, _>(|store, cx| {
1198 store.update_user_settings(cx, |settings| {
1199 settings.project.worktree.file_scan_exclusions = Some(vec![
1200 "**/.git".to_string(),
1201 "node_modules/".to_string(),
1202 "build_output".to_string(),
1203 ]);
1204 });
1205 });
1206 });
1207
1208 let tree = Worktree::local(
1209 dir.path(),
1210 true,
1211 Arc::new(RealFs::new(None, cx.executor())),
1212 Default::default(),
1213 true,
1214 &mut cx.to_async(),
1215 )
1216 .await
1217 .unwrap();
1218 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1219 .await;
1220 tree.flush_fs_events(cx).await;
1221 tree.read_with(cx, |tree, _| {
1222 check_worktree_entries(
1223 tree,
1224 &[
1225 ".git/HEAD",
1226 ".git/foo",
1227 "node_modules",
1228 "node_modules/.DS_Store",
1229 "node_modules/prettier",
1230 "node_modules/prettier/package.json",
1231 ],
1232 &["target"],
1233 &[
1234 ".DS_Store",
1235 "src/.DS_Store",
1236 "src/lib.rs",
1237 "src/foo/foo.rs",
1238 "src/foo/another.rs",
1239 "src/bar/bar.rs",
1240 ".gitignore",
1241 ],
1242 &[],
1243 )
1244 });
1245
1246 let new_excluded_dir = dir.path().join("build_output");
1247 let new_ignored_dir = dir.path().join("test_output");
1248 std::fs::create_dir_all(&new_excluded_dir)
1249 .unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
1250 std::fs::create_dir_all(&new_ignored_dir)
1251 .unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
1252 let node_modules_dir = dir.path().join("node_modules");
1253 let dot_git_dir = dir.path().join(".git");
1254 let src_dir = dir.path().join("src");
1255 for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
1256 assert!(
1257 existing_dir.is_dir(),
1258 "Expect {existing_dir:?} to be present in the FS already"
1259 );
1260 }
1261
1262 for directory_for_new_file in [
1263 new_excluded_dir,
1264 new_ignored_dir,
1265 node_modules_dir,
1266 dot_git_dir,
1267 src_dir,
1268 ] {
1269 std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
1270 .unwrap_or_else(|e| {
1271 panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
1272 });
1273 }
1274 tree.flush_fs_events(cx).await;
1275
1276 tree.read_with(cx, |tree, _| {
1277 check_worktree_entries(
1278 tree,
1279 &[
1280 ".git/HEAD",
1281 ".git/foo",
1282 ".git/new_file",
1283 "node_modules",
1284 "node_modules/.DS_Store",
1285 "node_modules/prettier",
1286 "node_modules/prettier/package.json",
1287 "node_modules/new_file",
1288 "build_output",
1289 "build_output/new_file",
1290 "test_output/new_file",
1291 ],
1292 &["target", "test_output"],
1293 &[
1294 ".DS_Store",
1295 "src/.DS_Store",
1296 "src/lib.rs",
1297 "src/foo/foo.rs",
1298 "src/foo/another.rs",
1299 "src/bar/bar.rs",
1300 "src/new_file",
1301 ".gitignore",
1302 ],
1303 &[],
1304 )
1305 });
1306}
1307
1308#[gpui::test]
1309async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
1310 init_test(cx);
1311 cx.executor().allow_parking();
1312 let dir = TempTree::new(json!({
1313 ".git": {
1314 "HEAD": "ref: refs/heads/main\n",
1315 "foo": "foo contents",
1316 },
1317 }));
1318 let dot_git_worktree_dir = dir.path().join(".git");
1319
1320 let tree = Worktree::local(
1321 dot_git_worktree_dir.clone(),
1322 true,
1323 Arc::new(RealFs::new(None, cx.executor())),
1324 Default::default(),
1325 true,
1326 &mut cx.to_async(),
1327 )
1328 .await
1329 .unwrap();
1330 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1331 .await;
1332 tree.flush_fs_events(cx).await;
1333 tree.read_with(cx, |tree, _| {
1334 check_worktree_entries(tree, &[], &["HEAD", "foo"], &[], &[])
1335 });
1336
1337 std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents")
1338 .unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}"));
1339 tree.flush_fs_events(cx).await;
1340 tree.read_with(cx, |tree, _| {
1341 check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[], &[])
1342 });
1343}
1344
1345#[gpui::test(iterations = 30)]
1346async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
1347 init_test(cx);
1348 let fs = FakeFs::new(cx.background_executor.clone());
1349 fs.insert_tree(
1350 "/root",
1351 json!({
1352 "b": {},
1353 "c": {},
1354 "d": {},
1355 }),
1356 )
1357 .await;
1358
1359 let tree = Worktree::local(
1360 "/root".as_ref(),
1361 true,
1362 fs,
1363 Default::default(),
1364 true,
1365 &mut cx.to_async(),
1366 )
1367 .await
1368 .unwrap();
1369
1370 let snapshot1 = tree.update(cx, |tree, cx| {
1371 let tree = tree.as_local_mut().unwrap();
1372 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1373 tree.observe_updates(0, cx, {
1374 let snapshot = snapshot.clone();
1375 let settings = tree.settings();
1376 move |update| {
1377 snapshot
1378 .lock()
1379 .apply_remote_update(update, &settings.file_scan_inclusions);
1380 async { true }
1381 }
1382 });
1383 snapshot
1384 });
1385
1386 let entry = tree
1387 .update(cx, |tree, cx| {
1388 tree.as_local_mut()
1389 .unwrap()
1390 .create_entry(rel_path("a/e").into(), true, None, cx)
1391 })
1392 .await
1393 .unwrap()
1394 .into_included()
1395 .unwrap();
1396 assert!(entry.is_dir());
1397
1398 cx.executor().run_until_parked();
1399 tree.read_with(cx, |tree, _| {
1400 assert_eq!(
1401 tree.entry_for_path(rel_path("a/e")).unwrap().kind,
1402 EntryKind::Dir
1403 );
1404 });
1405
1406 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1407 assert_eq!(
1408 snapshot1.lock().entries(true, 0).collect::<Vec<_>>(),
1409 snapshot2.entries(true, 0).collect::<Vec<_>>()
1410 );
1411}
1412
1413#[gpui::test]
1414async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1415 init_test(cx);
1416 cx.executor().allow_parking();
1417
1418 let fs_fake = FakeFs::new(cx.background_executor.clone());
1419 fs_fake
1420 .insert_tree(
1421 "/root",
1422 json!({
1423 "a": {},
1424 }),
1425 )
1426 .await;
1427
1428 let tree_fake = Worktree::local(
1429 "/root".as_ref(),
1430 true,
1431 fs_fake,
1432 Default::default(),
1433 true,
1434 &mut cx.to_async(),
1435 )
1436 .await
1437 .unwrap();
1438
1439 let entry = tree_fake
1440 .update(cx, |tree, cx| {
1441 tree.as_local_mut().unwrap().create_entry(
1442 rel_path("a/b/c/d.txt").into(),
1443 false,
1444 None,
1445 cx,
1446 )
1447 })
1448 .await
1449 .unwrap()
1450 .into_included()
1451 .unwrap();
1452 assert!(entry.is_file());
1453
1454 cx.executor().run_until_parked();
1455 tree_fake.read_with(cx, |tree, _| {
1456 assert!(
1457 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1458 .unwrap()
1459 .is_file()
1460 );
1461 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1462 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1463 });
1464
1465 let fs_real = Arc::new(RealFs::new(None, cx.executor()));
1466 let temp_root = TempTree::new(json!({
1467 "a": {}
1468 }));
1469
1470 let tree_real = Worktree::local(
1471 temp_root.path(),
1472 true,
1473 fs_real,
1474 Default::default(),
1475 true,
1476 &mut cx.to_async(),
1477 )
1478 .await
1479 .unwrap();
1480
1481 let entry = tree_real
1482 .update(cx, |tree, cx| {
1483 tree.as_local_mut().unwrap().create_entry(
1484 rel_path("a/b/c/d.txt").into(),
1485 false,
1486 None,
1487 cx,
1488 )
1489 })
1490 .await
1491 .unwrap()
1492 .into_included()
1493 .unwrap();
1494 assert!(entry.is_file());
1495
1496 cx.executor().run_until_parked();
1497 tree_real.read_with(cx, |tree, _| {
1498 assert!(
1499 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1500 .unwrap()
1501 .is_file()
1502 );
1503 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1504 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1505 });
1506
1507 // Test smallest change
1508 let entry = tree_real
1509 .update(cx, |tree, cx| {
1510 tree.as_local_mut().unwrap().create_entry(
1511 rel_path("a/b/c/e.txt").into(),
1512 false,
1513 None,
1514 cx,
1515 )
1516 })
1517 .await
1518 .unwrap()
1519 .into_included()
1520 .unwrap();
1521 assert!(entry.is_file());
1522
1523 cx.executor().run_until_parked();
1524 tree_real.read_with(cx, |tree, _| {
1525 assert!(
1526 tree.entry_for_path(rel_path("a/b/c/e.txt"))
1527 .unwrap()
1528 .is_file()
1529 );
1530 });
1531
1532 // Test largest change
1533 let entry = tree_real
1534 .update(cx, |tree, cx| {
1535 tree.as_local_mut().unwrap().create_entry(
1536 rel_path("d/e/f/g.txt").into(),
1537 false,
1538 None,
1539 cx,
1540 )
1541 })
1542 .await
1543 .unwrap()
1544 .into_included()
1545 .unwrap();
1546 assert!(entry.is_file());
1547
1548 cx.executor().run_until_parked();
1549 tree_real.read_with(cx, |tree, _| {
1550 assert!(
1551 tree.entry_for_path(rel_path("d/e/f/g.txt"))
1552 .unwrap()
1553 .is_file()
1554 );
1555 assert!(tree.entry_for_path(rel_path("d/e/f")).unwrap().is_dir());
1556 assert!(tree.entry_for_path(rel_path("d/e")).unwrap().is_dir());
1557 assert!(tree.entry_for_path(rel_path("d")).unwrap().is_dir());
1558 });
1559}
1560
1561#[gpui::test]
1562async fn test_create_file_in_expanded_gitignored_dir(cx: &mut TestAppContext) {
1563 // Tests the behavior of our worktree refresh when a file in a gitignored directory
1564 // is created.
1565 init_test(cx);
1566 let fs = FakeFs::new(cx.background_executor.clone());
1567 fs.insert_tree(
1568 "/root",
1569 json!({
1570 ".gitignore": "ignored_dir\n",
1571 "ignored_dir": {
1572 "existing_file.txt": "existing content",
1573 "another_file.txt": "another content",
1574 },
1575 }),
1576 )
1577 .await;
1578
1579 let tree = Worktree::local(
1580 Path::new("/root"),
1581 true,
1582 fs.clone(),
1583 Default::default(),
1584 true,
1585 &mut cx.to_async(),
1586 )
1587 .await
1588 .unwrap();
1589
1590 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1591 .await;
1592
1593 tree.read_with(cx, |tree, _| {
1594 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1595 assert!(ignored_dir.is_ignored);
1596 assert_eq!(ignored_dir.kind, EntryKind::UnloadedDir);
1597 });
1598
1599 tree.update(cx, |tree, cx| {
1600 tree.load_file(rel_path("ignored_dir/existing_file.txt"), cx)
1601 })
1602 .await
1603 .unwrap();
1604
1605 tree.read_with(cx, |tree, _| {
1606 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1607 assert!(ignored_dir.is_ignored);
1608 assert_eq!(ignored_dir.kind, EntryKind::Dir);
1609
1610 assert!(
1611 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
1612 .is_some()
1613 );
1614 assert!(
1615 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
1616 .is_some()
1617 );
1618 });
1619
1620 let entry = tree
1621 .update(cx, |tree, cx| {
1622 tree.create_entry(rel_path("ignored_dir/new_file.txt").into(), false, None, cx)
1623 })
1624 .await
1625 .unwrap();
1626 assert!(entry.into_included().is_some());
1627
1628 cx.executor().run_until_parked();
1629
1630 tree.read_with(cx, |tree, _| {
1631 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1632 assert!(ignored_dir.is_ignored);
1633 assert_eq!(
1634 ignored_dir.kind,
1635 EntryKind::Dir,
1636 "ignored_dir should still be loaded, not UnloadedDir"
1637 );
1638
1639 assert!(
1640 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
1641 .is_some(),
1642 "existing_file.txt should still be visible"
1643 );
1644 assert!(
1645 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
1646 .is_some(),
1647 "another_file.txt should still be visible"
1648 );
1649 assert!(
1650 tree.entry_for_path(rel_path("ignored_dir/new_file.txt"))
1651 .is_some(),
1652 "new_file.txt should be visible"
1653 );
1654 });
1655}
1656
1657#[gpui::test]
1658async fn test_fs_event_for_gitignored_dir_does_not_lose_contents(cx: &mut TestAppContext) {
1659 // Tests the behavior of our worktree refresh when a directory modification for a gitignored directory
1660 // is triggered.
1661 init_test(cx);
1662 let fs = FakeFs::new(cx.background_executor.clone());
1663 fs.insert_tree(
1664 "/root",
1665 json!({
1666 ".gitignore": "ignored_dir\n",
1667 "ignored_dir": {
1668 "file1.txt": "content1",
1669 "file2.txt": "content2",
1670 },
1671 }),
1672 )
1673 .await;
1674
1675 let tree = Worktree::local(
1676 Path::new("/root"),
1677 true,
1678 fs.clone(),
1679 Default::default(),
1680 true,
1681 &mut cx.to_async(),
1682 )
1683 .await
1684 .unwrap();
1685
1686 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1687 .await;
1688
1689 // Load a file to expand the ignored directory
1690 tree.update(cx, |tree, cx| {
1691 tree.load_file(rel_path("ignored_dir/file1.txt"), cx)
1692 })
1693 .await
1694 .unwrap();
1695
1696 tree.read_with(cx, |tree, _| {
1697 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1698 assert_eq!(ignored_dir.kind, EntryKind::Dir);
1699 assert!(
1700 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
1701 .is_some()
1702 );
1703 assert!(
1704 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
1705 .is_some()
1706 );
1707 });
1708
1709 fs.emit_fs_event("/root/ignored_dir", Some(fs::PathEventKind::Changed));
1710 tree.flush_fs_events(cx).await;
1711
1712 tree.read_with(cx, |tree, _| {
1713 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1714 assert_eq!(
1715 ignored_dir.kind,
1716 EntryKind::Dir,
1717 "ignored_dir should still be loaded (Dir), not UnloadedDir"
1718 );
1719 assert!(
1720 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
1721 .is_some(),
1722 "file1.txt should still be visible after directory fs event"
1723 );
1724 assert!(
1725 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
1726 .is_some(),
1727 "file2.txt should still be visible after directory fs event"
1728 );
1729 });
1730}
1731
1732#[gpui::test(iterations = 100)]
1733async fn test_random_worktree_operations_during_initial_scan(
1734 cx: &mut TestAppContext,
1735 mut rng: StdRng,
1736) {
1737 init_test(cx);
1738 let operations = env::var("OPERATIONS")
1739 .map(|o| o.parse().unwrap())
1740 .unwrap_or(5);
1741 let initial_entries = env::var("INITIAL_ENTRIES")
1742 .map(|o| o.parse().unwrap())
1743 .unwrap_or(20);
1744
1745 let root_dir = Path::new(path!("/test"));
1746 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1747 fs.as_fake().insert_tree(root_dir, json!({})).await;
1748 for _ in 0..initial_entries {
1749 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1750 }
1751 log::info!("generated initial tree");
1752
1753 let worktree = Worktree::local(
1754 root_dir,
1755 true,
1756 fs.clone(),
1757 Default::default(),
1758 true,
1759 &mut cx.to_async(),
1760 )
1761 .await
1762 .unwrap();
1763
1764 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1765 let updates = Arc::new(Mutex::new(Vec::new()));
1766 worktree.update(cx, |tree, cx| {
1767 check_worktree_change_events(tree, cx);
1768
1769 tree.as_local_mut().unwrap().observe_updates(0, cx, {
1770 let updates = updates.clone();
1771 move |update| {
1772 updates.lock().push(update);
1773 async { true }
1774 }
1775 });
1776 });
1777
1778 for _ in 0..operations {
1779 worktree
1780 .update(cx, |worktree, cx| {
1781 randomly_mutate_worktree(worktree, &mut rng, cx)
1782 })
1783 .await
1784 .log_err();
1785 worktree.read_with(cx, |tree, _| {
1786 tree.as_local().unwrap().snapshot().check_invariants(true)
1787 });
1788
1789 if rng.random_bool(0.6) {
1790 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1791 }
1792 }
1793
1794 worktree
1795 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1796 .await;
1797
1798 cx.executor().run_until_parked();
1799
1800 let final_snapshot = worktree.read_with(cx, |tree, _| {
1801 let tree = tree.as_local().unwrap();
1802 let snapshot = tree.snapshot();
1803 snapshot.check_invariants(true);
1804 snapshot
1805 });
1806
1807 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
1808
1809 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1810 let mut updated_snapshot = snapshot.clone();
1811 for update in updates.lock().iter() {
1812 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1813 updated_snapshot
1814 .apply_remote_update(update.clone(), &settings.file_scan_inclusions);
1815 }
1816 }
1817
1818 assert_eq!(
1819 updated_snapshot.entries(true, 0).collect::<Vec<_>>(),
1820 final_snapshot.entries(true, 0).collect::<Vec<_>>(),
1821 "wrong updates after snapshot {i}: {updates:#?}",
1822 );
1823 }
1824}
1825
1826#[gpui::test(iterations = 100)]
1827async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1828 init_test(cx);
1829 let operations = env::var("OPERATIONS")
1830 .map(|o| o.parse().unwrap())
1831 .unwrap_or(40);
1832 let initial_entries = env::var("INITIAL_ENTRIES")
1833 .map(|o| o.parse().unwrap())
1834 .unwrap_or(20);
1835
1836 let root_dir = Path::new(path!("/test"));
1837 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1838 fs.as_fake().insert_tree(root_dir, json!({})).await;
1839 for _ in 0..initial_entries {
1840 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1841 }
1842 log::info!("generated initial tree");
1843
1844 let worktree = Worktree::local(
1845 root_dir,
1846 true,
1847 fs.clone(),
1848 Default::default(),
1849 true,
1850 &mut cx.to_async(),
1851 )
1852 .await
1853 .unwrap();
1854
1855 let updates = Arc::new(Mutex::new(Vec::new()));
1856 worktree.update(cx, |tree, cx| {
1857 check_worktree_change_events(tree, cx);
1858
1859 tree.as_local_mut().unwrap().observe_updates(0, cx, {
1860 let updates = updates.clone();
1861 move |update| {
1862 updates.lock().push(update);
1863 async { true }
1864 }
1865 });
1866 });
1867
1868 worktree
1869 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1870 .await;
1871
1872 fs.as_fake().pause_events();
1873 let mut snapshots = Vec::new();
1874 let mut mutations_len = operations;
1875 while mutations_len > 1 {
1876 if rng.random_bool(0.2) {
1877 worktree
1878 .update(cx, |worktree, cx| {
1879 randomly_mutate_worktree(worktree, &mut rng, cx)
1880 })
1881 .await
1882 .log_err();
1883 } else {
1884 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1885 }
1886
1887 let buffered_event_count = fs.as_fake().buffered_event_count();
1888 if buffered_event_count > 0 && rng.random_bool(0.3) {
1889 let len = rng.random_range(0..=buffered_event_count);
1890 log::info!("flushing {} events", len);
1891 fs.as_fake().flush_events(len);
1892 } else {
1893 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1894 mutations_len -= 1;
1895 }
1896
1897 cx.executor().run_until_parked();
1898 if rng.random_bool(0.2) {
1899 log::info!("storing snapshot {}", snapshots.len());
1900 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1901 snapshots.push(snapshot);
1902 }
1903 }
1904
1905 log::info!("quiescing");
1906 fs.as_fake().flush_events(usize::MAX);
1907 cx.executor().run_until_parked();
1908
1909 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1910 snapshot.check_invariants(true);
1911 let expanded_paths = snapshot
1912 .expanded_entries()
1913 .map(|e| e.path.clone())
1914 .collect::<Vec<_>>();
1915
1916 {
1917 let new_worktree = Worktree::local(
1918 root_dir,
1919 true,
1920 fs.clone(),
1921 Default::default(),
1922 true,
1923 &mut cx.to_async(),
1924 )
1925 .await
1926 .unwrap();
1927 new_worktree
1928 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1929 .await;
1930 new_worktree
1931 .update(cx, |tree, _| {
1932 tree.as_local_mut()
1933 .unwrap()
1934 .refresh_entries_for_paths(expanded_paths)
1935 })
1936 .recv()
1937 .await;
1938 let new_snapshot =
1939 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1940 assert_eq!(
1941 snapshot.entries_without_ids(true),
1942 new_snapshot.entries_without_ids(true)
1943 );
1944 }
1945
1946 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
1947
1948 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1949 for update in updates.lock().iter() {
1950 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1951 prev_snapshot.apply_remote_update(update.clone(), &settings.file_scan_inclusions);
1952 }
1953 }
1954
1955 assert_eq!(
1956 prev_snapshot
1957 .entries(true, 0)
1958 .map(ignore_pending_dir)
1959 .collect::<Vec<_>>(),
1960 snapshot
1961 .entries(true, 0)
1962 .map(ignore_pending_dir)
1963 .collect::<Vec<_>>(),
1964 "wrong updates after snapshot {i}: {updates:#?}",
1965 );
1966 }
1967
1968 fn ignore_pending_dir(entry: &Entry) -> Entry {
1969 let mut entry = entry.clone();
1970 if entry.kind.is_dir() {
1971 entry.kind = EntryKind::Dir
1972 }
1973 entry
1974 }
1975}
1976
1977// The worktree's `UpdatedEntries` event can be used to follow along with
1978// all changes to the worktree's snapshot.
1979fn check_worktree_change_events(tree: &mut Worktree, cx: &mut Context<Worktree>) {
1980 let mut entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
1981 cx.subscribe(&cx.entity(), move |tree, _, event, _| {
1982 if let Event::UpdatedEntries(changes) = event {
1983 for (path, _, change_type) in changes.iter() {
1984 let entry = tree.entry_for_path(path).cloned();
1985 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1986 Ok(ix) | Err(ix) => ix,
1987 };
1988 match change_type {
1989 PathChange::Added => entries.insert(ix, entry.unwrap()),
1990 PathChange::Removed => drop(entries.remove(ix)),
1991 PathChange::Updated => {
1992 let entry = entry.unwrap();
1993 let existing_entry = entries.get_mut(ix).unwrap();
1994 assert_eq!(existing_entry.path, entry.path);
1995 *existing_entry = entry;
1996 }
1997 PathChange::AddedOrUpdated | PathChange::Loaded => {
1998 let entry = entry.unwrap();
1999 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
2000 *entries.get_mut(ix).unwrap() = entry;
2001 } else {
2002 entries.insert(ix, entry);
2003 }
2004 }
2005 }
2006 }
2007
2008 let new_entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
2009 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
2010 }
2011 })
2012 .detach();
2013}
2014
2015fn randomly_mutate_worktree(
2016 worktree: &mut Worktree,
2017 rng: &mut impl Rng,
2018 cx: &mut Context<Worktree>,
2019) -> Task<Result<()>> {
2020 log::info!("mutating worktree");
2021 let worktree = worktree.as_local_mut().unwrap();
2022 let snapshot = worktree.snapshot();
2023 let entry = snapshot.entries(false, 0).choose(rng).unwrap();
2024
2025 match rng.random_range(0_u32..100) {
2026 0..=33 if entry.path.as_ref() != RelPath::empty() => {
2027 log::info!("deleting entry {:?} ({})", entry.path, entry.id.to_usize());
2028 worktree.delete_entry(entry.id, false, cx).unwrap()
2029 }
2030 _ => {
2031 if entry.is_dir() {
2032 let child_path = entry.path.join(rel_path(&random_filename(rng)));
2033 let is_dir = rng.random_bool(0.3);
2034 log::info!(
2035 "creating {} at {:?}",
2036 if is_dir { "dir" } else { "file" },
2037 child_path,
2038 );
2039 let task = worktree.create_entry(child_path, is_dir, None, cx);
2040 cx.background_spawn(async move {
2041 task.await?;
2042 Ok(())
2043 })
2044 } else {
2045 log::info!(
2046 "overwriting file {:?} ({})",
2047 &entry.path,
2048 entry.id.to_usize()
2049 );
2050 let task = worktree.write_file(
2051 entry.path.clone(),
2052 "".into(),
2053 Default::default(),
2054 encoding_rs::UTF_8,
2055 false,
2056 cx,
2057 );
2058 cx.background_spawn(async move {
2059 task.await?;
2060 Ok(())
2061 })
2062 }
2063 }
2064 }
2065}
2066
2067async fn randomly_mutate_fs(
2068 fs: &Arc<dyn Fs>,
2069 root_path: &Path,
2070 insertion_probability: f64,
2071 rng: &mut impl Rng,
2072) {
2073 log::info!("mutating fs");
2074 let mut files = Vec::new();
2075 let mut dirs = Vec::new();
2076 for path in fs.as_fake().paths(false) {
2077 if path.starts_with(root_path) {
2078 if fs.is_file(&path).await {
2079 files.push(path);
2080 } else {
2081 dirs.push(path);
2082 }
2083 }
2084 }
2085
2086 if (files.is_empty() && dirs.len() == 1) || rng.random_bool(insertion_probability) {
2087 let path = dirs.choose(rng).unwrap();
2088 let new_path = path.join(random_filename(rng));
2089
2090 if rng.random() {
2091 log::info!(
2092 "creating dir {:?}",
2093 new_path.strip_prefix(root_path).unwrap()
2094 );
2095 fs.create_dir(&new_path).await.unwrap();
2096 } else {
2097 log::info!(
2098 "creating file {:?}",
2099 new_path.strip_prefix(root_path).unwrap()
2100 );
2101 fs.create_file(&new_path, Default::default()).await.unwrap();
2102 }
2103 } else if rng.random_bool(0.05) {
2104 let ignore_dir_path = dirs.choose(rng).unwrap();
2105 let ignore_path = ignore_dir_path.join(GITIGNORE);
2106
2107 let subdirs = dirs
2108 .iter()
2109 .filter(|d| d.starts_with(ignore_dir_path))
2110 .cloned()
2111 .collect::<Vec<_>>();
2112 let subfiles = files
2113 .iter()
2114 .filter(|d| d.starts_with(ignore_dir_path))
2115 .cloned()
2116 .collect::<Vec<_>>();
2117 let files_to_ignore = {
2118 let len = rng.random_range(0..=subfiles.len());
2119 subfiles.choose_multiple(rng, len)
2120 };
2121 let dirs_to_ignore = {
2122 let len = rng.random_range(0..subdirs.len());
2123 subdirs.choose_multiple(rng, len)
2124 };
2125
2126 let mut ignore_contents = String::new();
2127 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
2128 writeln!(
2129 ignore_contents,
2130 "{}",
2131 path_to_ignore
2132 .strip_prefix(ignore_dir_path)
2133 .unwrap()
2134 .to_str()
2135 .unwrap()
2136 )
2137 .unwrap();
2138 }
2139 log::info!(
2140 "creating gitignore {:?} with contents:\n{}",
2141 ignore_path.strip_prefix(root_path).unwrap(),
2142 ignore_contents
2143 );
2144 fs.save(
2145 &ignore_path,
2146 &ignore_contents.as_str().into(),
2147 Default::default(),
2148 )
2149 .await
2150 .unwrap();
2151 } else {
2152 let old_path = {
2153 let file_path = files.choose(rng);
2154 let dir_path = dirs[1..].choose(rng);
2155 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
2156 };
2157
2158 let is_rename = rng.random();
2159 if is_rename {
2160 let new_path_parent = dirs
2161 .iter()
2162 .filter(|d| !d.starts_with(old_path))
2163 .choose(rng)
2164 .unwrap();
2165
2166 let overwrite_existing_dir =
2167 !old_path.starts_with(new_path_parent) && rng.random_bool(0.3);
2168 let new_path = if overwrite_existing_dir {
2169 fs.remove_dir(
2170 new_path_parent,
2171 RemoveOptions {
2172 recursive: true,
2173 ignore_if_not_exists: true,
2174 },
2175 )
2176 .await
2177 .unwrap();
2178 new_path_parent.to_path_buf()
2179 } else {
2180 new_path_parent.join(random_filename(rng))
2181 };
2182
2183 log::info!(
2184 "renaming {:?} to {}{:?}",
2185 old_path.strip_prefix(root_path).unwrap(),
2186 if overwrite_existing_dir {
2187 "overwrite "
2188 } else {
2189 ""
2190 },
2191 new_path.strip_prefix(root_path).unwrap()
2192 );
2193 fs.rename(
2194 old_path,
2195 &new_path,
2196 fs::RenameOptions {
2197 overwrite: true,
2198 ignore_if_exists: true,
2199 create_parents: false,
2200 },
2201 )
2202 .await
2203 .unwrap();
2204 } else if fs.is_file(old_path).await {
2205 log::info!(
2206 "deleting file {:?}",
2207 old_path.strip_prefix(root_path).unwrap()
2208 );
2209 fs.remove_file(old_path, Default::default()).await.unwrap();
2210 } else {
2211 log::info!(
2212 "deleting dir {:?}",
2213 old_path.strip_prefix(root_path).unwrap()
2214 );
2215 fs.remove_dir(
2216 old_path,
2217 RemoveOptions {
2218 recursive: true,
2219 ignore_if_not_exists: true,
2220 },
2221 )
2222 .await
2223 .unwrap();
2224 }
2225 }
2226}
2227
2228fn random_filename(rng: &mut impl Rng) -> String {
2229 (0..6)
2230 .map(|_| rng.sample(rand::distr::Alphanumeric))
2231 .map(char::from)
2232 .collect()
2233}
2234
2235#[gpui::test]
2236async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
2237 init_test(cx);
2238 let fs = FakeFs::new(cx.background_executor.clone());
2239 fs.insert_tree("/", json!({".env": "PRIVATE=secret\n"}))
2240 .await;
2241 let tree = Worktree::local(
2242 Path::new("/.env"),
2243 true,
2244 fs.clone(),
2245 Default::default(),
2246 true,
2247 &mut cx.to_async(),
2248 )
2249 .await
2250 .unwrap();
2251 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2252 .await;
2253 tree.read_with(cx, |tree, _| {
2254 let entry = tree.entry_for_path(rel_path("")).unwrap();
2255 assert!(entry.is_private);
2256 });
2257}
2258
2259#[gpui::test]
2260async fn test_repository_above_root(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2261 init_test(cx);
2262
2263 let fs = FakeFs::new(executor);
2264 fs.insert_tree(
2265 path!("/root"),
2266 json!({
2267 ".git": {},
2268 "subproject": {
2269 "a.txt": "A"
2270 }
2271 }),
2272 )
2273 .await;
2274 let worktree = Worktree::local(
2275 path!("/root/subproject").as_ref(),
2276 true,
2277 fs.clone(),
2278 Arc::default(),
2279 true,
2280 &mut cx.to_async(),
2281 )
2282 .await
2283 .unwrap();
2284 worktree
2285 .update(cx, |worktree, _| {
2286 worktree.as_local().unwrap().scan_complete()
2287 })
2288 .await;
2289 cx.run_until_parked();
2290 let repos = worktree.update(cx, |worktree, _| {
2291 worktree.as_local().unwrap().repositories()
2292 });
2293 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2294
2295 fs.touch_path(path!("/root/subproject")).await;
2296 worktree
2297 .update(cx, |worktree, _| {
2298 worktree.as_local().unwrap().scan_complete()
2299 })
2300 .await;
2301 cx.run_until_parked();
2302
2303 let repos = worktree.update(cx, |worktree, _| {
2304 worktree.as_local().unwrap().repositories()
2305 });
2306 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2307}
2308
2309#[gpui::test]
2310async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2311 init_test(cx);
2312
2313 let home = paths::home_dir();
2314 let fs = FakeFs::new(executor);
2315 fs.insert_tree(
2316 home,
2317 json!({
2318 ".config": {
2319 "git": {
2320 "ignore": "foo\n/bar\nbaz\n"
2321 }
2322 },
2323 "project": {
2324 ".git": {},
2325 ".gitignore": "!baz",
2326 "foo": "",
2327 "bar": "",
2328 "sub": {
2329 "bar": "",
2330 },
2331 "subrepo": {
2332 ".git": {},
2333 "bar": ""
2334 },
2335 "baz": ""
2336 }
2337 }),
2338 )
2339 .await;
2340 let worktree = Worktree::local(
2341 home.join("project"),
2342 true,
2343 fs.clone(),
2344 Arc::default(),
2345 true,
2346 &mut cx.to_async(),
2347 )
2348 .await
2349 .unwrap();
2350 worktree
2351 .update(cx, |worktree, _| {
2352 worktree.as_local().unwrap().scan_complete()
2353 })
2354 .await;
2355 cx.run_until_parked();
2356
2357 // .gitignore overrides excludesFile, and anchored paths in excludesFile are resolved
2358 // relative to the nearest containing repository
2359 worktree.update(cx, |worktree, _cx| {
2360 check_worktree_entries(
2361 worktree,
2362 &[],
2363 &["foo", "bar", "subrepo/bar"],
2364 &["sub/bar", "baz"],
2365 &[],
2366 );
2367 });
2368
2369 // Ignore statuses are updated when excludesFile changes
2370 fs.write(
2371 &home.join(".config").join("git").join("ignore"),
2372 "/bar\nbaz\n".as_bytes(),
2373 )
2374 .await
2375 .unwrap();
2376 worktree
2377 .update(cx, |worktree, _| {
2378 worktree.as_local().unwrap().scan_complete()
2379 })
2380 .await;
2381 cx.run_until_parked();
2382
2383 worktree.update(cx, |worktree, _cx| {
2384 check_worktree_entries(
2385 worktree,
2386 &[],
2387 &["bar", "subrepo/bar"],
2388 &["foo", "sub/bar", "baz"],
2389 &[],
2390 );
2391 });
2392
2393 // Statuses are updated when .git added/removed
2394 fs.remove_dir(
2395 &home.join("project").join("subrepo").join(".git"),
2396 RemoveOptions {
2397 recursive: true,
2398 ..Default::default()
2399 },
2400 )
2401 .await
2402 .unwrap();
2403 worktree
2404 .update(cx, |worktree, _| {
2405 worktree.as_local().unwrap().scan_complete()
2406 })
2407 .await;
2408 cx.run_until_parked();
2409
2410 worktree.update(cx, |worktree, _cx| {
2411 check_worktree_entries(
2412 worktree,
2413 &[],
2414 &["bar"],
2415 &["foo", "sub/bar", "baz", "subrepo/bar"],
2416 &[],
2417 );
2418 });
2419}
2420
2421#[gpui::test]
2422async fn test_repo_exclude(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2423 init_test(cx);
2424
2425 let fs = FakeFs::new(executor);
2426 let project_dir = Path::new(path!("/project"));
2427 fs.insert_tree(
2428 project_dir,
2429 json!({
2430 ".git": {
2431 "info": {
2432 "exclude": ".env.*"
2433 }
2434 },
2435 ".env.example": "secret=xxxx",
2436 ".env.local": "secret=1234",
2437 ".gitignore": "!.env.example",
2438 "README.md": "# Repo Exclude",
2439 "src": {
2440 "main.rs": "fn main() {}",
2441 },
2442 }),
2443 )
2444 .await;
2445
2446 let worktree = Worktree::local(
2447 project_dir,
2448 true,
2449 fs.clone(),
2450 Default::default(),
2451 true,
2452 &mut cx.to_async(),
2453 )
2454 .await
2455 .unwrap();
2456 worktree
2457 .update(cx, |worktree, _| {
2458 worktree.as_local().unwrap().scan_complete()
2459 })
2460 .await;
2461 cx.run_until_parked();
2462
2463 // .gitignore overrides .git/info/exclude
2464 worktree.update(cx, |worktree, _cx| {
2465 let expected_excluded_paths = [];
2466 let expected_ignored_paths = [".env.local"];
2467 let expected_tracked_paths = [".env.example", "README.md", "src/main.rs"];
2468 let expected_included_paths = [];
2469
2470 check_worktree_entries(
2471 worktree,
2472 &expected_excluded_paths,
2473 &expected_ignored_paths,
2474 &expected_tracked_paths,
2475 &expected_included_paths,
2476 );
2477 });
2478
2479 // Ignore statuses are updated when .git/info/exclude file changes
2480 fs.write(
2481 &project_dir.join(DOT_GIT).join(REPO_EXCLUDE),
2482 ".env.example".as_bytes(),
2483 )
2484 .await
2485 .unwrap();
2486 worktree
2487 .update(cx, |worktree, _| {
2488 worktree.as_local().unwrap().scan_complete()
2489 })
2490 .await;
2491 cx.run_until_parked();
2492
2493 worktree.update(cx, |worktree, _cx| {
2494 let expected_excluded_paths = [];
2495 let expected_ignored_paths = [];
2496 let expected_tracked_paths = [".env.example", ".env.local", "README.md", "src/main.rs"];
2497 let expected_included_paths = [];
2498
2499 check_worktree_entries(
2500 worktree,
2501 &expected_excluded_paths,
2502 &expected_ignored_paths,
2503 &expected_tracked_paths,
2504 &expected_included_paths,
2505 );
2506 });
2507}
2508
2509#[track_caller]
2510fn check_worktree_entries(
2511 tree: &Worktree,
2512 expected_excluded_paths: &[&str],
2513 expected_ignored_paths: &[&str],
2514 expected_tracked_paths: &[&str],
2515 expected_included_paths: &[&str],
2516) {
2517 for path in expected_excluded_paths {
2518 let entry = tree.entry_for_path(rel_path(path));
2519 assert!(
2520 entry.is_none(),
2521 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2522 );
2523 }
2524 for path in expected_ignored_paths {
2525 let entry = tree
2526 .entry_for_path(rel_path(path))
2527 .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
2528 assert!(
2529 entry.is_ignored,
2530 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2531 );
2532 }
2533 for path in expected_tracked_paths {
2534 let entry = tree
2535 .entry_for_path(rel_path(path))
2536 .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
2537 assert!(
2538 !entry.is_ignored || entry.is_always_included,
2539 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2540 );
2541 }
2542 for path in expected_included_paths {
2543 let entry = tree
2544 .entry_for_path(rel_path(path))
2545 .unwrap_or_else(|| panic!("Missing entry for expected included path '{path}'"));
2546 assert!(
2547 entry.is_always_included,
2548 "expected path '{path}' to always be included, but got entry: {entry:?}",
2549 );
2550 }
2551}
2552
2553fn init_test(cx: &mut gpui::TestAppContext) {
2554 zlog::init_test();
2555
2556 cx.update(|cx| {
2557 let settings_store = SettingsStore::test(cx);
2558 cx.set_global(settings_store);
2559 });
2560}
2561
2562#[gpui::test]
2563async fn test_load_file_encoding(cx: &mut TestAppContext) {
2564 init_test(cx);
2565
2566 struct TestCase {
2567 name: &'static str,
2568 bytes: Vec<u8>,
2569 expected_text: &'static str,
2570 }
2571
2572 // --- Success Cases ---
2573 let success_cases = vec![
2574 TestCase {
2575 name: "utf8.txt",
2576 bytes: "ใใใซใกใฏ".as_bytes().to_vec(),
2577 expected_text: "ใใใซใกใฏ",
2578 },
2579 TestCase {
2580 name: "sjis.txt",
2581 bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
2582 expected_text: "ใใใซใกใฏ",
2583 },
2584 TestCase {
2585 name: "eucjp.txt",
2586 bytes: vec![0xa4, 0xb3, 0xa4, 0xf3, 0xa4, 0xcb, 0xa4, 0xc1, 0xa4, 0xcf],
2587 expected_text: "ใใใซใกใฏ",
2588 },
2589 TestCase {
2590 name: "iso2022jp.txt",
2591 bytes: vec![
2592 0x1b, 0x24, 0x42, 0x24, 0x33, 0x24, 0x73, 0x24, 0x4b, 0x24, 0x41, 0x24, 0x4f, 0x1b,
2593 0x28, 0x42,
2594 ],
2595 expected_text: "ใใใซใกใฏ",
2596 },
2597 TestCase {
2598 name: "win1252.txt",
2599 bytes: vec![0x43, 0x61, 0x66, 0xe9],
2600 expected_text: "Cafรฉ",
2601 },
2602 TestCase {
2603 name: "gbk.txt",
2604 bytes: vec![
2605 0xbd, 0xf1, 0xcc, 0xec, 0xcc, 0xec, 0xc6, 0xf8, 0xb2, 0xbb, 0xb4, 0xed,
2606 ],
2607 expected_text: "ไปๅคฉๅคฉๆฐไธ้",
2608 },
2609 // UTF-16LE with BOM
2610 TestCase {
2611 name: "utf16le_bom.txt",
2612 bytes: vec![
2613 0xFF, 0xFE, // BOM
2614 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F, 0x30,
2615 ],
2616 expected_text: "ใใใซใกใฏ",
2617 },
2618 // UTF-16BE with BOM
2619 TestCase {
2620 name: "utf16be_bom.txt",
2621 bytes: vec![
2622 0xFE, 0xFF, // BOM
2623 0x30, 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F,
2624 ],
2625 expected_text: "ใใใซใกใฏ",
2626 },
2627 // UTF-16LE without BOM (ASCII only)
2628 // This relies on the "null byte heuristic" we implemented.
2629 // "ABC" -> 41 00 42 00 43 00
2630 TestCase {
2631 name: "utf16le_ascii_no_bom.txt",
2632 bytes: vec![0x41, 0x00, 0x42, 0x00, 0x43, 0x00],
2633 expected_text: "ABC",
2634 },
2635 ];
2636
2637 // --- Failure Cases ---
2638 let failure_cases = vec![
2639 // Binary File (Should be detected by heuristic and return Error)
2640 // Contains random bytes and mixed nulls that don't match UTF-16 patterns
2641 TestCase {
2642 name: "binary.bin",
2643 bytes: vec![0x00, 0xFF, 0x12, 0x00, 0x99, 0x88, 0x77, 0x66, 0x00],
2644 expected_text: "", // Not used
2645 },
2646 ];
2647
2648 let root_path = if cfg!(windows) {
2649 Path::new("C:\\root")
2650 } else {
2651 Path::new("/root")
2652 };
2653
2654 let fs = FakeFs::new(cx.background_executor.clone());
2655 fs.create_dir(root_path).await.unwrap();
2656
2657 for case in success_cases.iter().chain(failure_cases.iter()) {
2658 let path = root_path.join(case.name);
2659 fs.write(&path, &case.bytes).await.unwrap();
2660 }
2661
2662 let tree = Worktree::local(
2663 root_path,
2664 true,
2665 fs,
2666 Default::default(),
2667 true,
2668 &mut cx.to_async(),
2669 )
2670 .await
2671 .unwrap();
2672
2673 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2674 .await;
2675
2676 let rel_path = |name: &str| {
2677 RelPath::new(&Path::new(name), PathStyle::local())
2678 .unwrap()
2679 .into_arc()
2680 };
2681
2682 // Run Success Tests
2683 for case in success_cases {
2684 let loaded = tree
2685 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
2686 .await;
2687 if let Err(e) = &loaded {
2688 panic!("Failed to load success case '{}': {:?}", case.name, e);
2689 }
2690 let loaded = loaded.unwrap();
2691 assert_eq!(
2692 loaded.text, case.expected_text,
2693 "Encoding mismatch for file: {}",
2694 case.name
2695 );
2696 }
2697
2698 // Run Failure Tests
2699 for case in failure_cases {
2700 let loaded = tree
2701 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
2702 .await;
2703 assert!(
2704 loaded.is_err(),
2705 "Failure case '{}' unexpectedly succeeded! It should have been detected as binary.",
2706 case.name
2707 );
2708 let err_msg = loaded.unwrap_err().to_string();
2709 println!("Got expected error for {}: {}", case.name, err_msg);
2710 }
2711}
2712
2713#[gpui::test]
2714async fn test_write_file_encoding(cx: &mut gpui::TestAppContext) {
2715 init_test(cx);
2716 let fs = FakeFs::new(cx.executor());
2717
2718 let root_path = if cfg!(windows) {
2719 Path::new("C:\\root")
2720 } else {
2721 Path::new("/root")
2722 };
2723 fs.create_dir(root_path).await.unwrap();
2724
2725 let worktree = Worktree::local(
2726 root_path,
2727 true,
2728 fs.clone(),
2729 Default::default(),
2730 true,
2731 &mut cx.to_async(),
2732 )
2733 .await
2734 .unwrap();
2735
2736 // Define test case structure
2737 struct TestCase {
2738 name: &'static str,
2739 text: &'static str,
2740 encoding: &'static encoding_rs::Encoding,
2741 has_bom: bool,
2742 expected_bytes: Vec<u8>,
2743 }
2744
2745 let cases = vec![
2746 // Shift_JIS with Japanese
2747 TestCase {
2748 name: "Shift_JIS with Japanese",
2749 text: "ใใใซใกใฏ",
2750 encoding: encoding_rs::SHIFT_JIS,
2751 has_bom: false,
2752 expected_bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
2753 },
2754 // UTF-8 No BOM
2755 TestCase {
2756 name: "UTF-8 No BOM",
2757 text: "AB",
2758 encoding: encoding_rs::UTF_8,
2759 has_bom: false,
2760 expected_bytes: vec![0x41, 0x42],
2761 },
2762 // UTF-8 with BOM
2763 TestCase {
2764 name: "UTF-8 with BOM",
2765 text: "AB",
2766 encoding: encoding_rs::UTF_8,
2767 has_bom: true,
2768 expected_bytes: vec![0xEF, 0xBB, 0xBF, 0x41, 0x42],
2769 },
2770 // UTF-16LE No BOM with Japanese
2771 // NOTE: This passes thanks to the manual encoding fix implemented in `write_file`.
2772 TestCase {
2773 name: "UTF-16LE No BOM with Japanese",
2774 text: "ใใใซใกใฏ",
2775 encoding: encoding_rs::UTF_16LE,
2776 has_bom: false,
2777 expected_bytes: vec![0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f, 0x30],
2778 },
2779 // UTF-16LE with BOM
2780 TestCase {
2781 name: "UTF-16LE with BOM",
2782 text: "A",
2783 encoding: encoding_rs::UTF_16LE,
2784 has_bom: true,
2785 expected_bytes: vec![0xFF, 0xFE, 0x41, 0x00],
2786 },
2787 // UTF-16BE No BOM with Japanese
2788 // NOTE: This passes thanks to the manual encoding fix.
2789 TestCase {
2790 name: "UTF-16BE No BOM with Japanese",
2791 text: "ใใใซใกใฏ",
2792 encoding: encoding_rs::UTF_16BE,
2793 has_bom: false,
2794 expected_bytes: vec![0x30, 0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f],
2795 },
2796 // UTF-16BE with BOM
2797 TestCase {
2798 name: "UTF-16BE with BOM",
2799 text: "A",
2800 encoding: encoding_rs::UTF_16BE,
2801 has_bom: true,
2802 expected_bytes: vec![0xFE, 0xFF, 0x00, 0x41],
2803 },
2804 ];
2805
2806 for (i, case) in cases.into_iter().enumerate() {
2807 let file_name = format!("test_{}.txt", i);
2808 let path: Arc<Path> = Path::new(&file_name).into();
2809 let file_path = root_path.join(&file_name);
2810
2811 fs.insert_file(&file_path, "".into()).await;
2812
2813 let rel_path = RelPath::new(&path, PathStyle::local()).unwrap().into_arc();
2814 let text = text::Rope::from(case.text);
2815
2816 let task = worktree.update(cx, |wt, cx| {
2817 wt.write_file(
2818 rel_path,
2819 text,
2820 text::LineEnding::Unix,
2821 case.encoding,
2822 case.has_bom,
2823 cx,
2824 )
2825 });
2826
2827 if let Err(e) = task.await {
2828 panic!("Unexpected error in case '{}': {:?}", case.name, e);
2829 }
2830
2831 let bytes = fs.load_bytes(&file_path).await.unwrap();
2832
2833 assert_eq!(
2834 bytes, case.expected_bytes,
2835 "case '{}' mismatch. Expected {:?}, but got {:?}",
2836 case.name, case.expected_bytes, bytes
2837 );
2838 }
2839}
2840
2841#[gpui::test]
2842async fn test_refresh_entries_for_paths_creates_ancestors(cx: &mut TestAppContext) {
2843 init_test(cx);
2844 let fs = FakeFs::new(cx.background_executor.clone());
2845 fs.insert_tree(
2846 "/root",
2847 json!({
2848 "a": {
2849 "b": {
2850 "c": {
2851 "deep_file.txt": "content",
2852 "sibling.txt": "content"
2853 },
2854 "d": {
2855 "under_sibling_dir.txt": "content"
2856 }
2857 }
2858 }
2859 }),
2860 )
2861 .await;
2862
2863 let tree = Worktree::local(
2864 Path::new("/root"),
2865 true,
2866 fs.clone(),
2867 Default::default(),
2868 false, // Disable scanning so the initial scan doesn't discover any entries
2869 &mut cx.to_async(),
2870 )
2871 .await
2872 .unwrap();
2873
2874 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2875 .await;
2876
2877 tree.read_with(cx, |tree, _| {
2878 assert_eq!(
2879 tree.entries(true, 0)
2880 .map(|e| e.path.as_ref())
2881 .collect::<Vec<_>>(),
2882 &[rel_path("")],
2883 "Only root entry should exist when scanning is disabled"
2884 );
2885
2886 assert!(tree.entry_for_path(rel_path("a")).is_none());
2887 assert!(tree.entry_for_path(rel_path("a/b")).is_none());
2888 assert!(tree.entry_for_path(rel_path("a/b/c")).is_none());
2889 assert!(
2890 tree.entry_for_path(rel_path("a/b/c/deep_file.txt"))
2891 .is_none()
2892 );
2893 });
2894
2895 tree.read_with(cx, |tree, _| {
2896 tree.as_local()
2897 .unwrap()
2898 .refresh_entries_for_paths(vec![rel_path("a/b/c/deep_file.txt").into()])
2899 })
2900 .recv()
2901 .await;
2902
2903 tree.read_with(cx, |tree, _| {
2904 assert_eq!(
2905 tree.entries(true, 0)
2906 .map(|e| e.path.as_ref())
2907 .collect::<Vec<_>>(),
2908 &[
2909 rel_path(""),
2910 rel_path("a"),
2911 rel_path("a/b"),
2912 rel_path("a/b/c"),
2913 rel_path("a/b/c/deep_file.txt"),
2914 rel_path("a/b/c/sibling.txt"),
2915 rel_path("a/b/d"),
2916 ],
2917 "All ancestors should be created when refreshing a deeply nested path"
2918 );
2919 });
2920}