1mod worktree_settings;
2
3use anyhow::Result;
4use encoding_rs;
5use fs::{FakeFs, Fs, RealFs, RemoveOptions};
6use git::{DOT_GIT, GITIGNORE, REPO_EXCLUDE};
7use gpui::{AppContext as _, BackgroundExecutor, BorrowAppContext, Context, Task, TestAppContext};
8use parking_lot::Mutex;
9use postage::stream::Stream;
10use pretty_assertions::assert_eq;
11use rand::prelude::*;
12use worktree::{Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle};
13
14use serde_json::json;
15use settings::{SettingsStore, WorktreeId};
16use std::{
17 env,
18 fmt::Write,
19 mem,
20 path::{Path, PathBuf},
21 sync::Arc,
22};
23use util::{
24 ResultExt, path,
25 paths::PathStyle,
26 rel_path::{RelPath, rel_path},
27 test::TempTree,
28};
29
30#[gpui::test]
31async fn test_traversal(cx: &mut TestAppContext) {
32 init_test(cx);
33 let fs = FakeFs::new(cx.background_executor.clone());
34 fs.insert_tree(
35 "/root",
36 json!({
37 ".gitignore": "a/b\n",
38 "a": {
39 "b": "",
40 "c": "",
41 }
42 }),
43 )
44 .await;
45
46 let tree = Worktree::local(
47 Path::new("/root"),
48 true,
49 fs,
50 Default::default(),
51 true,
52 WorktreeId::from_proto(0),
53 &mut cx.to_async(),
54 )
55 .await
56 .unwrap();
57 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
58 .await;
59
60 tree.read_with(cx, |tree, _| {
61 assert_eq!(
62 tree.entries(false, 0)
63 .map(|entry| entry.path.as_ref())
64 .collect::<Vec<_>>(),
65 vec![
66 rel_path(""),
67 rel_path(".gitignore"),
68 rel_path("a"),
69 rel_path("a/c"),
70 ]
71 );
72 assert_eq!(
73 tree.entries(true, 0)
74 .map(|entry| entry.path.as_ref())
75 .collect::<Vec<_>>(),
76 vec![
77 rel_path(""),
78 rel_path(".gitignore"),
79 rel_path("a"),
80 rel_path("a/b"),
81 rel_path("a/c"),
82 ]
83 );
84 })
85}
86
87#[gpui::test(iterations = 10)]
88async fn test_circular_symlinks(cx: &mut TestAppContext) {
89 init_test(cx);
90 let fs = FakeFs::new(cx.background_executor.clone());
91 fs.insert_tree(
92 "/root",
93 json!({
94 "lib": {
95 "a": {
96 "a.txt": ""
97 },
98 "b": {
99 "b.txt": ""
100 }
101 }
102 }),
103 )
104 .await;
105 fs.create_symlink("/root/lib/a/lib".as_ref(), "..".into())
106 .await
107 .unwrap();
108 fs.create_symlink("/root/lib/b/lib".as_ref(), "..".into())
109 .await
110 .unwrap();
111
112 let tree = Worktree::local(
113 Path::new("/root"),
114 true,
115 fs.clone(),
116 Default::default(),
117 true,
118 WorktreeId::from_proto(0),
119 &mut cx.to_async(),
120 )
121 .await
122 .unwrap();
123
124 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
125 .await;
126
127 tree.read_with(cx, |tree, _| {
128 assert_eq!(
129 tree.entries(false, 0)
130 .map(|entry| entry.path.as_ref())
131 .collect::<Vec<_>>(),
132 vec![
133 rel_path(""),
134 rel_path("lib"),
135 rel_path("lib/a"),
136 rel_path("lib/a/a.txt"),
137 rel_path("lib/a/lib"),
138 rel_path("lib/b"),
139 rel_path("lib/b/b.txt"),
140 rel_path("lib/b/lib"),
141 ]
142 );
143 });
144
145 fs.rename(
146 Path::new("/root/lib/a/lib"),
147 Path::new("/root/lib/a/lib-2"),
148 Default::default(),
149 )
150 .await
151 .unwrap();
152 cx.executor().run_until_parked();
153 tree.read_with(cx, |tree, _| {
154 assert_eq!(
155 tree.entries(false, 0)
156 .map(|entry| entry.path.as_ref())
157 .collect::<Vec<_>>(),
158 vec![
159 rel_path(""),
160 rel_path("lib"),
161 rel_path("lib/a"),
162 rel_path("lib/a/a.txt"),
163 rel_path("lib/a/lib-2"),
164 rel_path("lib/b"),
165 rel_path("lib/b/b.txt"),
166 rel_path("lib/b/lib"),
167 ]
168 );
169 });
170}
171
172#[gpui::test]
173async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
174 init_test(cx);
175 let fs = FakeFs::new(cx.background_executor.clone());
176 fs.insert_tree(
177 "/root",
178 json!({
179 "dir1": {
180 "deps": {
181 // symlinks here
182 },
183 "src": {
184 "a.rs": "",
185 "b.rs": "",
186 },
187 },
188 "dir2": {
189 "src": {
190 "c.rs": "",
191 "d.rs": "",
192 }
193 },
194 "dir3": {
195 "deps": {},
196 "src": {
197 "e.rs": "",
198 "f.rs": "",
199 },
200 }
201 }),
202 )
203 .await;
204
205 // These symlinks point to directories outside of the worktree's root, dir1.
206 fs.create_symlink("/root/dir1/deps/dep-dir2".as_ref(), "../../dir2".into())
207 .await
208 .unwrap();
209 fs.create_symlink("/root/dir1/deps/dep-dir3".as_ref(), "../../dir3".into())
210 .await
211 .unwrap();
212
213 let tree = Worktree::local(
214 Path::new("/root/dir1"),
215 true,
216 fs.clone(),
217 Default::default(),
218 true,
219 WorktreeId::from_proto(0),
220 &mut cx.to_async(),
221 )
222 .await
223 .unwrap();
224
225 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
226 .await;
227
228 let tree_updates = Arc::new(Mutex::new(Vec::new()));
229 tree.update(cx, |_, cx| {
230 let tree_updates = tree_updates.clone();
231 cx.subscribe(&tree, move |_, _, event, _| {
232 if let Event::UpdatedEntries(update) = event {
233 tree_updates.lock().extend(
234 update
235 .iter()
236 .map(|(path, _, change)| (path.clone(), *change)),
237 );
238 }
239 })
240 .detach();
241 });
242
243 // The symlinked directories are not scanned by default.
244 tree.read_with(cx, |tree, _| {
245 assert_eq!(
246 tree.entries(true, 0)
247 .map(|entry| (entry.path.as_ref(), entry.is_external))
248 .collect::<Vec<_>>(),
249 vec![
250 (rel_path(""), false),
251 (rel_path("deps"), false),
252 (rel_path("deps/dep-dir2"), true),
253 (rel_path("deps/dep-dir3"), true),
254 (rel_path("src"), false),
255 (rel_path("src/a.rs"), false),
256 (rel_path("src/b.rs"), false),
257 ]
258 );
259
260 assert_eq!(
261 tree.entry_for_path(rel_path("deps/dep-dir2")).unwrap().kind,
262 EntryKind::UnloadedDir
263 );
264 });
265
266 // Expand one of the symlinked directories.
267 tree.read_with(cx, |tree, _| {
268 tree.as_local()
269 .unwrap()
270 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3").into()])
271 })
272 .recv()
273 .await;
274
275 // The expanded directory's contents are loaded. Subdirectories are
276 // not scanned yet.
277 tree.read_with(cx, |tree, _| {
278 assert_eq!(
279 tree.entries(true, 0)
280 .map(|entry| (entry.path.as_ref(), entry.is_external))
281 .collect::<Vec<_>>(),
282 vec![
283 (rel_path(""), false),
284 (rel_path("deps"), false),
285 (rel_path("deps/dep-dir2"), true),
286 (rel_path("deps/dep-dir3"), true),
287 (rel_path("deps/dep-dir3/deps"), true),
288 (rel_path("deps/dep-dir3/src"), true),
289 (rel_path("src"), false),
290 (rel_path("src/a.rs"), false),
291 (rel_path("src/b.rs"), false),
292 ]
293 );
294 });
295 assert_eq!(
296 mem::take(&mut *tree_updates.lock()),
297 &[
298 (rel_path("deps/dep-dir3").into(), PathChange::Loaded),
299 (rel_path("deps/dep-dir3/deps").into(), PathChange::Loaded),
300 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded)
301 ]
302 );
303
304 // Expand a subdirectory of one of the symlinked directories.
305 tree.read_with(cx, |tree, _| {
306 tree.as_local()
307 .unwrap()
308 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3/src").into()])
309 })
310 .recv()
311 .await;
312
313 // The expanded subdirectory's contents are loaded.
314 tree.read_with(cx, |tree, _| {
315 assert_eq!(
316 tree.entries(true, 0)
317 .map(|entry| (entry.path.as_ref(), entry.is_external))
318 .collect::<Vec<_>>(),
319 vec![
320 (rel_path(""), false),
321 (rel_path("deps"), false),
322 (rel_path("deps/dep-dir2"), true),
323 (rel_path("deps/dep-dir3"), true),
324 (rel_path("deps/dep-dir3/deps"), true),
325 (rel_path("deps/dep-dir3/src"), true),
326 (rel_path("deps/dep-dir3/src/e.rs"), true),
327 (rel_path("deps/dep-dir3/src/f.rs"), true),
328 (rel_path("src"), false),
329 (rel_path("src/a.rs"), false),
330 (rel_path("src/b.rs"), false),
331 ]
332 );
333 });
334
335 assert_eq!(
336 mem::take(&mut *tree_updates.lock()),
337 &[
338 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded),
339 (
340 rel_path("deps/dep-dir3/src/e.rs").into(),
341 PathChange::Loaded
342 ),
343 (
344 rel_path("deps/dep-dir3/src/f.rs").into(),
345 PathChange::Loaded
346 )
347 ]
348 );
349}
350
351#[cfg(target_os = "macos")]
352#[gpui::test]
353async fn test_renaming_case_only(cx: &mut TestAppContext) {
354 cx.executor().allow_parking();
355 init_test(cx);
356
357 const OLD_NAME: &str = "aaa.rs";
358 const NEW_NAME: &str = "AAA.rs";
359
360 let fs = Arc::new(RealFs::new(None, cx.executor()));
361 let temp_root = TempTree::new(json!({
362 OLD_NAME: "",
363 }));
364
365 let tree = Worktree::local(
366 temp_root.path(),
367 true,
368 fs.clone(),
369 Default::default(),
370 true,
371 WorktreeId::from_proto(0),
372 &mut cx.to_async(),
373 )
374 .await
375 .unwrap();
376
377 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
378 .await;
379 tree.read_with(cx, |tree, _| {
380 assert_eq!(
381 tree.entries(true, 0)
382 .map(|entry| entry.path.as_ref())
383 .collect::<Vec<_>>(),
384 vec![rel_path(""), rel_path(OLD_NAME)]
385 );
386 });
387
388 fs.rename(
389 &temp_root.path().join(OLD_NAME),
390 &temp_root.path().join(NEW_NAME),
391 fs::RenameOptions {
392 overwrite: true,
393 ignore_if_exists: true,
394 create_parents: false,
395 },
396 )
397 .await
398 .unwrap();
399
400 tree.flush_fs_events(cx).await;
401
402 tree.read_with(cx, |tree, _| {
403 assert_eq!(
404 tree.entries(true, 0)
405 .map(|entry| entry.path.as_ref())
406 .collect::<Vec<_>>(),
407 vec![rel_path(""), rel_path(NEW_NAME)]
408 );
409 });
410}
411
412#[gpui::test]
413async fn test_open_gitignored_files(cx: &mut TestAppContext) {
414 init_test(cx);
415 let fs = FakeFs::new(cx.background_executor.clone());
416 fs.insert_tree(
417 "/root",
418 json!({
419 ".gitignore": "node_modules\n",
420 "one": {
421 "node_modules": {
422 "a": {
423 "a1.js": "a1",
424 "a2.js": "a2",
425 },
426 "b": {
427 "b1.js": "b1",
428 "b2.js": "b2",
429 },
430 "c": {
431 "c1.js": "c1",
432 "c2.js": "c2",
433 }
434 },
435 },
436 "two": {
437 "x.js": "",
438 "y.js": "",
439 },
440 }),
441 )
442 .await;
443
444 let tree = Worktree::local(
445 Path::new("/root"),
446 true,
447 fs.clone(),
448 Default::default(),
449 true,
450 WorktreeId::from_proto(0),
451 &mut cx.to_async(),
452 )
453 .await
454 .unwrap();
455
456 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
457 .await;
458
459 tree.read_with(cx, |tree, _| {
460 assert_eq!(
461 tree.entries(true, 0)
462 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
463 .collect::<Vec<_>>(),
464 vec![
465 (rel_path(""), false),
466 (rel_path(".gitignore"), false),
467 (rel_path("one"), false),
468 (rel_path("one/node_modules"), true),
469 (rel_path("two"), false),
470 (rel_path("two/x.js"), false),
471 (rel_path("two/y.js"), false),
472 ]
473 );
474 });
475
476 // Open a file that is nested inside of a gitignored directory that
477 // has not yet been expanded.
478 let prev_read_dir_count = fs.read_dir_call_count();
479 let loaded = tree
480 .update(cx, |tree, cx| {
481 tree.load_file(rel_path("one/node_modules/b/b1.js"), cx)
482 })
483 .await
484 .unwrap();
485
486 tree.read_with(cx, |tree, _| {
487 assert_eq!(
488 tree.entries(true, 0)
489 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
490 .collect::<Vec<_>>(),
491 vec![
492 (rel_path(""), false),
493 (rel_path(".gitignore"), false),
494 (rel_path("one"), false),
495 (rel_path("one/node_modules"), true),
496 (rel_path("one/node_modules/a"), true),
497 (rel_path("one/node_modules/b"), true),
498 (rel_path("one/node_modules/b/b1.js"), true),
499 (rel_path("one/node_modules/b/b2.js"), true),
500 (rel_path("one/node_modules/c"), true),
501 (rel_path("two"), false),
502 (rel_path("two/x.js"), false),
503 (rel_path("two/y.js"), false),
504 ]
505 );
506
507 assert_eq!(
508 loaded.file.path.as_ref(),
509 rel_path("one/node_modules/b/b1.js")
510 );
511
512 // Only the newly-expanded directories are scanned.
513 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
514 });
515
516 // Open another file in a different subdirectory of the same
517 // gitignored directory.
518 let prev_read_dir_count = fs.read_dir_call_count();
519 let loaded = tree
520 .update(cx, |tree, cx| {
521 tree.load_file(rel_path("one/node_modules/a/a2.js"), cx)
522 })
523 .await
524 .unwrap();
525
526 tree.read_with(cx, |tree, _| {
527 assert_eq!(
528 tree.entries(true, 0)
529 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
530 .collect::<Vec<_>>(),
531 vec![
532 (rel_path(""), false),
533 (rel_path(".gitignore"), false),
534 (rel_path("one"), false),
535 (rel_path("one/node_modules"), true),
536 (rel_path("one/node_modules/a"), true),
537 (rel_path("one/node_modules/a/a1.js"), true),
538 (rel_path("one/node_modules/a/a2.js"), true),
539 (rel_path("one/node_modules/b"), true),
540 (rel_path("one/node_modules/b/b1.js"), true),
541 (rel_path("one/node_modules/b/b2.js"), true),
542 (rel_path("one/node_modules/c"), true),
543 (rel_path("two"), false),
544 (rel_path("two/x.js"), false),
545 (rel_path("two/y.js"), false),
546 ]
547 );
548
549 assert_eq!(
550 loaded.file.path.as_ref(),
551 rel_path("one/node_modules/a/a2.js")
552 );
553
554 // Only the newly-expanded directory is scanned.
555 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
556 });
557
558 let path = PathBuf::from("/root/one/node_modules/c/lib");
559
560 // No work happens when files and directories change within an unloaded directory.
561 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
562 // When we open a directory, we check each ancestor whether it's a git
563 // repository. That means we have an fs.metadata call per ancestor that we
564 // need to subtract here.
565 let ancestors = path.ancestors().count();
566
567 fs.create_dir(path.as_ref()).await.unwrap();
568 cx.executor().run_until_parked();
569
570 assert_eq!(
571 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count - ancestors,
572 0
573 );
574}
575
576#[gpui::test]
577async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
578 init_test(cx);
579 let fs = FakeFs::new(cx.background_executor.clone());
580 fs.insert_tree(
581 "/root",
582 json!({
583 ".gitignore": "node_modules\n",
584 "a": {
585 "a.js": "",
586 },
587 "b": {
588 "b.js": "",
589 },
590 "node_modules": {
591 "c": {
592 "c.js": "",
593 },
594 "d": {
595 "d.js": "",
596 "e": {
597 "e1.js": "",
598 "e2.js": "",
599 },
600 "f": {
601 "f1.js": "",
602 "f2.js": "",
603 }
604 },
605 },
606 }),
607 )
608 .await;
609
610 let tree = Worktree::local(
611 Path::new("/root"),
612 true,
613 fs.clone(),
614 Default::default(),
615 true,
616 WorktreeId::from_proto(0),
617 &mut cx.to_async(),
618 )
619 .await
620 .unwrap();
621
622 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
623 .await;
624
625 // Open a file within the gitignored directory, forcing some of its
626 // subdirectories to be read, but not all.
627 let read_dir_count_1 = fs.read_dir_call_count();
628 tree.read_with(cx, |tree, _| {
629 tree.as_local()
630 .unwrap()
631 .refresh_entries_for_paths(vec![rel_path("node_modules/d/d.js").into()])
632 })
633 .recv()
634 .await;
635
636 // Those subdirectories are now loaded.
637 tree.read_with(cx, |tree, _| {
638 assert_eq!(
639 tree.entries(true, 0)
640 .map(|e| (e.path.as_ref(), e.is_ignored))
641 .collect::<Vec<_>>(),
642 &[
643 (rel_path(""), false),
644 (rel_path(".gitignore"), false),
645 (rel_path("a"), false),
646 (rel_path("a/a.js"), false),
647 (rel_path("b"), false),
648 (rel_path("b/b.js"), false),
649 (rel_path("node_modules"), true),
650 (rel_path("node_modules/c"), true),
651 (rel_path("node_modules/d"), true),
652 (rel_path("node_modules/d/d.js"), true),
653 (rel_path("node_modules/d/e"), true),
654 (rel_path("node_modules/d/f"), true),
655 ]
656 );
657 });
658 let read_dir_count_2 = fs.read_dir_call_count();
659 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
660
661 // Update the gitignore so that node_modules is no longer ignored,
662 // but a subdirectory is ignored
663 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
664 .await
665 .unwrap();
666 cx.executor().run_until_parked();
667
668 // All of the directories that are no longer ignored are now loaded.
669 tree.read_with(cx, |tree, _| {
670 assert_eq!(
671 tree.entries(true, 0)
672 .map(|e| (e.path.as_ref(), e.is_ignored))
673 .collect::<Vec<_>>(),
674 &[
675 (rel_path(""), false),
676 (rel_path(".gitignore"), false),
677 (rel_path("a"), false),
678 (rel_path("a/a.js"), false),
679 (rel_path("b"), false),
680 (rel_path("b/b.js"), false),
681 // This directory is no longer ignored
682 (rel_path("node_modules"), false),
683 (rel_path("node_modules/c"), false),
684 (rel_path("node_modules/c/c.js"), false),
685 (rel_path("node_modules/d"), false),
686 (rel_path("node_modules/d/d.js"), false),
687 // This subdirectory is now ignored
688 (rel_path("node_modules/d/e"), true),
689 (rel_path("node_modules/d/f"), false),
690 (rel_path("node_modules/d/f/f1.js"), false),
691 (rel_path("node_modules/d/f/f2.js"), false),
692 ]
693 );
694 });
695
696 // Each of the newly-loaded directories is scanned only once.
697 let read_dir_count_3 = fs.read_dir_call_count();
698 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
699}
700
701#[gpui::test]
702async fn test_write_file(cx: &mut TestAppContext) {
703 init_test(cx);
704 cx.executor().allow_parking();
705 let dir = TempTree::new(json!({
706 ".git": {},
707 ".gitignore": "ignored-dir\n",
708 "tracked-dir": {},
709 "ignored-dir": {}
710 }));
711
712 let worktree = Worktree::local(
713 dir.path(),
714 true,
715 Arc::new(RealFs::new(None, cx.executor())),
716 Default::default(),
717 true,
718 WorktreeId::from_proto(0),
719 &mut cx.to_async(),
720 )
721 .await
722 .unwrap();
723
724 #[cfg(not(target_os = "macos"))]
725 fs::fs_watcher::global(|_| {}).unwrap();
726
727 cx.read(|cx| worktree.read(cx).as_local().unwrap().scan_complete())
728 .await;
729 worktree.flush_fs_events(cx).await;
730
731 worktree
732 .update(cx, |tree, cx| {
733 tree.write_file(
734 rel_path("tracked-dir/file.txt").into(),
735 "hello".into(),
736 Default::default(),
737 encoding_rs::UTF_8,
738 false,
739 cx,
740 )
741 })
742 .await
743 .unwrap();
744 worktree
745 .update(cx, |tree, cx| {
746 tree.write_file(
747 rel_path("ignored-dir/file.txt").into(),
748 "world".into(),
749 Default::default(),
750 encoding_rs::UTF_8,
751 false,
752 cx,
753 )
754 })
755 .await
756 .unwrap();
757 worktree.read_with(cx, |tree, _| {
758 let tracked = tree
759 .entry_for_path(rel_path("tracked-dir/file.txt"))
760 .unwrap();
761 let ignored = tree
762 .entry_for_path(rel_path("ignored-dir/file.txt"))
763 .unwrap();
764 assert!(!tracked.is_ignored);
765 assert!(ignored.is_ignored);
766 });
767}
768
769#[gpui::test]
770async fn test_file_scan_inclusions(cx: &mut TestAppContext) {
771 init_test(cx);
772 cx.executor().allow_parking();
773 let dir = TempTree::new(json!({
774 ".gitignore": "**/target\n/node_modules\ntop_level.txt\n",
775 "target": {
776 "index": "blah2"
777 },
778 "node_modules": {
779 ".DS_Store": "",
780 "prettier": {
781 "package.json": "{}",
782 },
783 "package.json": "//package.json"
784 },
785 "src": {
786 ".DS_Store": "",
787 "foo": {
788 "foo.rs": "mod another;\n",
789 "another.rs": "// another",
790 },
791 "bar": {
792 "bar.rs": "// bar",
793 },
794 "lib.rs": "mod foo;\nmod bar;\n",
795 },
796 "top_level.txt": "top level file",
797 ".DS_Store": "",
798 }));
799 cx.update(|cx| {
800 cx.update_global::<SettingsStore, _>(|store, cx| {
801 store.update_user_settings(cx, |settings| {
802 settings.project.worktree.file_scan_exclusions = Some(vec![]);
803 settings.project.worktree.file_scan_inclusions = Some(vec![
804 "node_modules/**/package.json".to_string(),
805 "**/.DS_Store".to_string(),
806 ]);
807 });
808 });
809 });
810
811 let tree = Worktree::local(
812 dir.path(),
813 true,
814 Arc::new(RealFs::new(None, cx.executor())),
815 Default::default(),
816 true,
817 WorktreeId::from_proto(0),
818 &mut cx.to_async(),
819 )
820 .await
821 .unwrap();
822 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
823 .await;
824 tree.flush_fs_events(cx).await;
825 tree.read_with(cx, |tree, _| {
826 // Assert that file_scan_inclusions overrides file_scan_exclusions.
827 check_worktree_entries(
828 tree,
829 &[],
830 &["target", "node_modules"],
831 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
832 &[
833 "node_modules/prettier/package.json",
834 ".DS_Store",
835 "node_modules/.DS_Store",
836 "src/.DS_Store",
837 ],
838 )
839 });
840}
841
842#[gpui::test]
843async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext) {
844 init_test(cx);
845 cx.executor().allow_parking();
846 let dir = TempTree::new(json!({
847 ".gitignore": "**/target\n/node_modules\n",
848 "target": {
849 "index": "blah2"
850 },
851 "node_modules": {
852 ".DS_Store": "",
853 "prettier": {
854 "package.json": "{}",
855 },
856 },
857 "src": {
858 ".DS_Store": "",
859 "foo": {
860 "foo.rs": "mod another;\n",
861 "another.rs": "// another",
862 },
863 },
864 ".DS_Store": "",
865 }));
866
867 cx.update(|cx| {
868 cx.update_global::<SettingsStore, _>(|store, cx| {
869 store.update_user_settings(cx, |settings| {
870 settings.project.worktree.file_scan_exclusions =
871 Some(vec!["**/.DS_Store".to_string()]);
872 settings.project.worktree.file_scan_inclusions =
873 Some(vec!["**/.DS_Store".to_string()]);
874 });
875 });
876 });
877
878 let tree = Worktree::local(
879 dir.path(),
880 true,
881 Arc::new(RealFs::new(None, cx.executor())),
882 Default::default(),
883 true,
884 WorktreeId::from_proto(0),
885 &mut cx.to_async(),
886 )
887 .await
888 .unwrap();
889 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
890 .await;
891 tree.flush_fs_events(cx).await;
892 tree.read_with(cx, |tree, _| {
893 // Assert that file_scan_inclusions overrides file_scan_exclusions.
894 check_worktree_entries(
895 tree,
896 &[".DS_Store, src/.DS_Store"],
897 &["target", "node_modules"],
898 &["src/foo/another.rs", "src/foo/foo.rs", ".gitignore"],
899 &[],
900 )
901 });
902}
903
904#[gpui::test]
905async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppContext) {
906 init_test(cx);
907 cx.executor().allow_parking();
908 let dir = TempTree::new(json!({
909 ".gitignore": "**/target\n/node_modules/\n",
910 "target": {
911 "index": "blah2"
912 },
913 "node_modules": {
914 ".DS_Store": "",
915 "prettier": {
916 "package.json": "{}",
917 },
918 },
919 "src": {
920 ".DS_Store": "",
921 "foo": {
922 "foo.rs": "mod another;\n",
923 "another.rs": "// another",
924 },
925 },
926 ".DS_Store": "",
927 }));
928
929 cx.update(|cx| {
930 cx.update_global::<SettingsStore, _>(|store, cx| {
931 store.update_user_settings(cx, |settings| {
932 settings.project.worktree.file_scan_exclusions = Some(vec![]);
933 settings.project.worktree.file_scan_inclusions =
934 Some(vec!["node_modules/**".to_string()]);
935 });
936 });
937 });
938 let tree = Worktree::local(
939 dir.path(),
940 true,
941 Arc::new(RealFs::new(None, cx.executor())),
942 Default::default(),
943 true,
944 WorktreeId::from_proto(0),
945 &mut cx.to_async(),
946 )
947 .await
948 .unwrap();
949 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
950 .await;
951 tree.flush_fs_events(cx).await;
952
953 tree.read_with(cx, |tree, _| {
954 assert!(
955 tree.entry_for_path(rel_path("node_modules"))
956 .is_some_and(|f| f.is_always_included)
957 );
958 assert!(
959 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
960 .is_some_and(|f| f.is_always_included)
961 );
962 });
963
964 cx.update(|cx| {
965 cx.update_global::<SettingsStore, _>(|store, cx| {
966 store.update_user_settings(cx, |settings| {
967 settings.project.worktree.file_scan_exclusions = Some(vec![]);
968 settings.project.worktree.file_scan_inclusions = Some(vec![]);
969 });
970 });
971 });
972 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
973 .await;
974 tree.flush_fs_events(cx).await;
975
976 tree.read_with(cx, |tree, _| {
977 assert!(
978 tree.entry_for_path(rel_path("node_modules"))
979 .is_some_and(|f| !f.is_always_included)
980 );
981 assert!(
982 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
983 .is_some_and(|f| !f.is_always_included)
984 );
985 });
986}
987
988#[gpui::test]
989async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
990 init_test(cx);
991 cx.executor().allow_parking();
992 let dir = TempTree::new(json!({
993 ".gitignore": "**/target\n/node_modules\n",
994 "target": {
995 "index": "blah2"
996 },
997 "node_modules": {
998 ".DS_Store": "",
999 "prettier": {
1000 "package.json": "{}",
1001 },
1002 },
1003 "src": {
1004 ".DS_Store": "",
1005 "foo": {
1006 "foo.rs": "mod another;\n",
1007 "another.rs": "// another",
1008 },
1009 "bar": {
1010 "bar.rs": "// bar",
1011 },
1012 "lib.rs": "mod foo;\nmod bar;\n",
1013 },
1014 ".DS_Store": "",
1015 }));
1016 cx.update(|cx| {
1017 cx.update_global::<SettingsStore, _>(|store, cx| {
1018 store.update_user_settings(cx, |settings| {
1019 settings.project.worktree.file_scan_exclusions =
1020 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
1021 });
1022 });
1023 });
1024
1025 let tree = Worktree::local(
1026 dir.path(),
1027 true,
1028 Arc::new(RealFs::new(None, cx.executor())),
1029 Default::default(),
1030 true,
1031 WorktreeId::from_proto(0),
1032 &mut cx.to_async(),
1033 )
1034 .await
1035 .unwrap();
1036 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1037 .await;
1038 tree.flush_fs_events(cx).await;
1039 tree.read_with(cx, |tree, _| {
1040 check_worktree_entries(
1041 tree,
1042 &[
1043 "src/foo/foo.rs",
1044 "src/foo/another.rs",
1045 "node_modules/.DS_Store",
1046 "src/.DS_Store",
1047 ".DS_Store",
1048 ],
1049 &["target", "node_modules"],
1050 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
1051 &[],
1052 )
1053 });
1054
1055 cx.update(|cx| {
1056 cx.update_global::<SettingsStore, _>(|store, cx| {
1057 store.update_user_settings(cx, |settings| {
1058 settings.project.worktree.file_scan_exclusions =
1059 Some(vec!["**/node_modules/**".to_string()]);
1060 });
1061 });
1062 });
1063 tree.flush_fs_events(cx).await;
1064 cx.executor().run_until_parked();
1065 tree.read_with(cx, |tree, _| {
1066 check_worktree_entries(
1067 tree,
1068 &[
1069 "node_modules/prettier/package.json",
1070 "node_modules/.DS_Store",
1071 "node_modules",
1072 ],
1073 &["target"],
1074 &[
1075 ".gitignore",
1076 "src/lib.rs",
1077 "src/bar/bar.rs",
1078 "src/foo/foo.rs",
1079 "src/foo/another.rs",
1080 "src/.DS_Store",
1081 ".DS_Store",
1082 ],
1083 &[],
1084 )
1085 });
1086}
1087
1088#[gpui::test]
1089async fn test_hidden_files(cx: &mut TestAppContext) {
1090 init_test(cx);
1091 cx.executor().allow_parking();
1092 let dir = TempTree::new(json!({
1093 ".gitignore": "**/target\n",
1094 ".hidden_file": "content",
1095 ".hidden_dir": {
1096 "nested.rs": "code",
1097 },
1098 "src": {
1099 "visible.rs": "code",
1100 },
1101 "logs": {
1102 "app.log": "logs",
1103 "debug.log": "logs",
1104 },
1105 "visible.txt": "content",
1106 }));
1107
1108 let tree = Worktree::local(
1109 dir.path(),
1110 true,
1111 Arc::new(RealFs::new(None, cx.executor())),
1112 Default::default(),
1113 true,
1114 WorktreeId::from_proto(0),
1115 &mut cx.to_async(),
1116 )
1117 .await
1118 .unwrap();
1119 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1120 .await;
1121 tree.flush_fs_events(cx).await;
1122
1123 tree.read_with(cx, |tree, _| {
1124 assert_eq!(
1125 tree.entries(true, 0)
1126 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1127 .collect::<Vec<_>>(),
1128 vec![
1129 (rel_path(""), false),
1130 (rel_path(".gitignore"), true),
1131 (rel_path(".hidden_dir"), true),
1132 (rel_path(".hidden_dir/nested.rs"), true),
1133 (rel_path(".hidden_file"), true),
1134 (rel_path("logs"), false),
1135 (rel_path("logs/app.log"), false),
1136 (rel_path("logs/debug.log"), false),
1137 (rel_path("src"), false),
1138 (rel_path("src/visible.rs"), false),
1139 (rel_path("visible.txt"), false),
1140 ]
1141 );
1142 });
1143
1144 cx.update(|cx| {
1145 cx.update_global::<SettingsStore, _>(|store, cx| {
1146 store.update_user_settings(cx, |settings| {
1147 settings.project.worktree.hidden_files = Some(vec!["**/*.log".to_string()]);
1148 });
1149 });
1150 });
1151 tree.flush_fs_events(cx).await;
1152 cx.executor().run_until_parked();
1153
1154 tree.read_with(cx, |tree, _| {
1155 assert_eq!(
1156 tree.entries(true, 0)
1157 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1158 .collect::<Vec<_>>(),
1159 vec![
1160 (rel_path(""), false),
1161 (rel_path(".gitignore"), false),
1162 (rel_path(".hidden_dir"), false),
1163 (rel_path(".hidden_dir/nested.rs"), false),
1164 (rel_path(".hidden_file"), false),
1165 (rel_path("logs"), false),
1166 (rel_path("logs/app.log"), true),
1167 (rel_path("logs/debug.log"), true),
1168 (rel_path("src"), false),
1169 (rel_path("src/visible.rs"), false),
1170 (rel_path("visible.txt"), false),
1171 ]
1172 );
1173 });
1174}
1175
1176#[gpui::test]
1177async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
1178 init_test(cx);
1179 cx.executor().allow_parking();
1180 let dir = TempTree::new(json!({
1181 ".git": {
1182 "HEAD": "ref: refs/heads/main\n",
1183 "foo": "bar",
1184 },
1185 ".gitignore": "**/target\n/node_modules\ntest_output\n",
1186 "target": {
1187 "index": "blah2"
1188 },
1189 "node_modules": {
1190 ".DS_Store": "",
1191 "prettier": {
1192 "package.json": "{}",
1193 },
1194 },
1195 "src": {
1196 ".DS_Store": "",
1197 "foo": {
1198 "foo.rs": "mod another;\n",
1199 "another.rs": "// another",
1200 },
1201 "bar": {
1202 "bar.rs": "// bar",
1203 },
1204 "lib.rs": "mod foo;\nmod bar;\n",
1205 },
1206 ".DS_Store": "",
1207 }));
1208 cx.update(|cx| {
1209 cx.update_global::<SettingsStore, _>(|store, cx| {
1210 store.update_user_settings(cx, |settings| {
1211 settings.project.worktree.file_scan_exclusions = Some(vec![
1212 "**/.git".to_string(),
1213 "node_modules/".to_string(),
1214 "build_output".to_string(),
1215 ]);
1216 });
1217 });
1218 });
1219
1220 let tree = Worktree::local(
1221 dir.path(),
1222 true,
1223 Arc::new(RealFs::new(None, cx.executor())),
1224 Default::default(),
1225 true,
1226 WorktreeId::from_proto(0),
1227 &mut cx.to_async(),
1228 )
1229 .await
1230 .unwrap();
1231 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1232 .await;
1233 tree.flush_fs_events(cx).await;
1234 tree.read_with(cx, |tree, _| {
1235 check_worktree_entries(
1236 tree,
1237 &[
1238 ".git/HEAD",
1239 ".git/foo",
1240 "node_modules",
1241 "node_modules/.DS_Store",
1242 "node_modules/prettier",
1243 "node_modules/prettier/package.json",
1244 ],
1245 &["target"],
1246 &[
1247 ".DS_Store",
1248 "src/.DS_Store",
1249 "src/lib.rs",
1250 "src/foo/foo.rs",
1251 "src/foo/another.rs",
1252 "src/bar/bar.rs",
1253 ".gitignore",
1254 ],
1255 &[],
1256 )
1257 });
1258
1259 let new_excluded_dir = dir.path().join("build_output");
1260 let new_ignored_dir = dir.path().join("test_output");
1261 std::fs::create_dir_all(&new_excluded_dir)
1262 .unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
1263 std::fs::create_dir_all(&new_ignored_dir)
1264 .unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
1265 let node_modules_dir = dir.path().join("node_modules");
1266 let dot_git_dir = dir.path().join(".git");
1267 let src_dir = dir.path().join("src");
1268 for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
1269 assert!(
1270 existing_dir.is_dir(),
1271 "Expect {existing_dir:?} to be present in the FS already"
1272 );
1273 }
1274
1275 for directory_for_new_file in [
1276 new_excluded_dir,
1277 new_ignored_dir,
1278 node_modules_dir,
1279 dot_git_dir,
1280 src_dir,
1281 ] {
1282 std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
1283 .unwrap_or_else(|e| {
1284 panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
1285 });
1286 }
1287 tree.flush_fs_events(cx).await;
1288
1289 tree.read_with(cx, |tree, _| {
1290 check_worktree_entries(
1291 tree,
1292 &[
1293 ".git/HEAD",
1294 ".git/foo",
1295 ".git/new_file",
1296 "node_modules",
1297 "node_modules/.DS_Store",
1298 "node_modules/prettier",
1299 "node_modules/prettier/package.json",
1300 "node_modules/new_file",
1301 "build_output",
1302 "build_output/new_file",
1303 "test_output/new_file",
1304 ],
1305 &["target", "test_output"],
1306 &[
1307 ".DS_Store",
1308 "src/.DS_Store",
1309 "src/lib.rs",
1310 "src/foo/foo.rs",
1311 "src/foo/another.rs",
1312 "src/bar/bar.rs",
1313 "src/new_file",
1314 ".gitignore",
1315 ],
1316 &[],
1317 )
1318 });
1319}
1320
1321#[gpui::test]
1322async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
1323 init_test(cx);
1324 cx.executor().allow_parking();
1325 let dir = TempTree::new(json!({
1326 ".git": {
1327 "HEAD": "ref: refs/heads/main\n",
1328 "foo": "foo contents",
1329 },
1330 }));
1331 let dot_git_worktree_dir = dir.path().join(".git");
1332
1333 let tree = Worktree::local(
1334 dot_git_worktree_dir.clone(),
1335 true,
1336 Arc::new(RealFs::new(None, cx.executor())),
1337 Default::default(),
1338 true,
1339 WorktreeId::from_proto(0),
1340 &mut cx.to_async(),
1341 )
1342 .await
1343 .unwrap();
1344 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1345 .await;
1346 tree.flush_fs_events(cx).await;
1347 tree.read_with(cx, |tree, _| {
1348 check_worktree_entries(tree, &[], &["HEAD", "foo"], &[], &[])
1349 });
1350
1351 std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents")
1352 .unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}"));
1353 tree.flush_fs_events(cx).await;
1354 tree.read_with(cx, |tree, _| {
1355 check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[], &[])
1356 });
1357}
1358
1359#[gpui::test(iterations = 30)]
1360async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
1361 init_test(cx);
1362 let fs = FakeFs::new(cx.background_executor.clone());
1363 fs.insert_tree(
1364 "/root",
1365 json!({
1366 "b": {},
1367 "c": {},
1368 "d": {},
1369 }),
1370 )
1371 .await;
1372
1373 let tree = Worktree::local(
1374 "/root".as_ref(),
1375 true,
1376 fs,
1377 Default::default(),
1378 true,
1379 WorktreeId::from_proto(0),
1380 &mut cx.to_async(),
1381 )
1382 .await
1383 .unwrap();
1384
1385 let snapshot1 = tree.update(cx, |tree, cx| {
1386 let tree = tree.as_local_mut().unwrap();
1387 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1388 tree.observe_updates(0, cx, {
1389 let snapshot = snapshot.clone();
1390 let settings = tree.settings();
1391 move |update| {
1392 snapshot
1393 .lock()
1394 .apply_remote_update(update, &settings.file_scan_inclusions);
1395 async { true }
1396 }
1397 });
1398 snapshot
1399 });
1400
1401 let entry = tree
1402 .update(cx, |tree, cx| {
1403 tree.as_local_mut()
1404 .unwrap()
1405 .create_entry(rel_path("a/e").into(), true, None, cx)
1406 })
1407 .await
1408 .unwrap()
1409 .into_included()
1410 .unwrap();
1411 assert!(entry.is_dir());
1412
1413 cx.executor().run_until_parked();
1414 tree.read_with(cx, |tree, _| {
1415 assert_eq!(
1416 tree.entry_for_path(rel_path("a/e")).unwrap().kind,
1417 EntryKind::Dir
1418 );
1419 });
1420
1421 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1422 assert_eq!(
1423 snapshot1.lock().entries(true, 0).collect::<Vec<_>>(),
1424 snapshot2.entries(true, 0).collect::<Vec<_>>()
1425 );
1426}
1427
1428#[gpui::test]
1429async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1430 init_test(cx);
1431 cx.executor().allow_parking();
1432
1433 let fs_fake = FakeFs::new(cx.background_executor.clone());
1434 fs_fake
1435 .insert_tree(
1436 "/root",
1437 json!({
1438 "a": {},
1439 }),
1440 )
1441 .await;
1442
1443 let tree_fake = Worktree::local(
1444 "/root".as_ref(),
1445 true,
1446 fs_fake,
1447 Default::default(),
1448 true,
1449 WorktreeId::from_proto(0),
1450 &mut cx.to_async(),
1451 )
1452 .await
1453 .unwrap();
1454
1455 let entry = tree_fake
1456 .update(cx, |tree, cx| {
1457 tree.as_local_mut().unwrap().create_entry(
1458 rel_path("a/b/c/d.txt").into(),
1459 false,
1460 None,
1461 cx,
1462 )
1463 })
1464 .await
1465 .unwrap()
1466 .into_included()
1467 .unwrap();
1468 assert!(entry.is_file());
1469
1470 cx.executor().run_until_parked();
1471 tree_fake.read_with(cx, |tree, _| {
1472 assert!(
1473 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1474 .unwrap()
1475 .is_file()
1476 );
1477 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1478 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1479 });
1480
1481 let fs_real = Arc::new(RealFs::new(None, cx.executor()));
1482 let temp_root = TempTree::new(json!({
1483 "a": {}
1484 }));
1485
1486 let tree_real = Worktree::local(
1487 temp_root.path(),
1488 true,
1489 fs_real,
1490 Default::default(),
1491 true,
1492 WorktreeId::from_proto(0),
1493 &mut cx.to_async(),
1494 )
1495 .await
1496 .unwrap();
1497
1498 let entry = tree_real
1499 .update(cx, |tree, cx| {
1500 tree.as_local_mut().unwrap().create_entry(
1501 rel_path("a/b/c/d.txt").into(),
1502 false,
1503 None,
1504 cx,
1505 )
1506 })
1507 .await
1508 .unwrap()
1509 .into_included()
1510 .unwrap();
1511 assert!(entry.is_file());
1512
1513 cx.executor().run_until_parked();
1514 tree_real.read_with(cx, |tree, _| {
1515 assert!(
1516 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1517 .unwrap()
1518 .is_file()
1519 );
1520 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1521 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1522 });
1523
1524 // Test smallest change
1525 let entry = tree_real
1526 .update(cx, |tree, cx| {
1527 tree.as_local_mut().unwrap().create_entry(
1528 rel_path("a/b/c/e.txt").into(),
1529 false,
1530 None,
1531 cx,
1532 )
1533 })
1534 .await
1535 .unwrap()
1536 .into_included()
1537 .unwrap();
1538 assert!(entry.is_file());
1539
1540 cx.executor().run_until_parked();
1541 tree_real.read_with(cx, |tree, _| {
1542 assert!(
1543 tree.entry_for_path(rel_path("a/b/c/e.txt"))
1544 .unwrap()
1545 .is_file()
1546 );
1547 });
1548
1549 // Test largest change
1550 let entry = tree_real
1551 .update(cx, |tree, cx| {
1552 tree.as_local_mut().unwrap().create_entry(
1553 rel_path("d/e/f/g.txt").into(),
1554 false,
1555 None,
1556 cx,
1557 )
1558 })
1559 .await
1560 .unwrap()
1561 .into_included()
1562 .unwrap();
1563 assert!(entry.is_file());
1564
1565 cx.executor().run_until_parked();
1566 tree_real.read_with(cx, |tree, _| {
1567 assert!(
1568 tree.entry_for_path(rel_path("d/e/f/g.txt"))
1569 .unwrap()
1570 .is_file()
1571 );
1572 assert!(tree.entry_for_path(rel_path("d/e/f")).unwrap().is_dir());
1573 assert!(tree.entry_for_path(rel_path("d/e")).unwrap().is_dir());
1574 assert!(tree.entry_for_path(rel_path("d")).unwrap().is_dir());
1575 });
1576}
1577
1578#[gpui::test]
1579async fn test_create_file_in_expanded_gitignored_dir(cx: &mut TestAppContext) {
1580 // Tests the behavior of our worktree refresh when a file in a gitignored directory
1581 // is created.
1582 init_test(cx);
1583 let fs = FakeFs::new(cx.background_executor.clone());
1584 fs.insert_tree(
1585 "/root",
1586 json!({
1587 ".gitignore": "ignored_dir\n",
1588 "ignored_dir": {
1589 "existing_file.txt": "existing content",
1590 "another_file.txt": "another content",
1591 },
1592 }),
1593 )
1594 .await;
1595
1596 let tree = Worktree::local(
1597 Path::new("/root"),
1598 true,
1599 fs.clone(),
1600 Default::default(),
1601 true,
1602 WorktreeId::from_proto(0),
1603 &mut cx.to_async(),
1604 )
1605 .await
1606 .unwrap();
1607
1608 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1609 .await;
1610
1611 tree.read_with(cx, |tree, _| {
1612 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1613 assert!(ignored_dir.is_ignored);
1614 assert_eq!(ignored_dir.kind, EntryKind::UnloadedDir);
1615 });
1616
1617 tree.update(cx, |tree, cx| {
1618 tree.load_file(rel_path("ignored_dir/existing_file.txt"), cx)
1619 })
1620 .await
1621 .unwrap();
1622
1623 tree.read_with(cx, |tree, _| {
1624 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1625 assert!(ignored_dir.is_ignored);
1626 assert_eq!(ignored_dir.kind, EntryKind::Dir);
1627
1628 assert!(
1629 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
1630 .is_some()
1631 );
1632 assert!(
1633 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
1634 .is_some()
1635 );
1636 });
1637
1638 let entry = tree
1639 .update(cx, |tree, cx| {
1640 tree.create_entry(rel_path("ignored_dir/new_file.txt").into(), false, None, cx)
1641 })
1642 .await
1643 .unwrap();
1644 assert!(entry.into_included().is_some());
1645
1646 cx.executor().run_until_parked();
1647
1648 tree.read_with(cx, |tree, _| {
1649 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1650 assert!(ignored_dir.is_ignored);
1651 assert_eq!(
1652 ignored_dir.kind,
1653 EntryKind::Dir,
1654 "ignored_dir should still be loaded, not UnloadedDir"
1655 );
1656
1657 assert!(
1658 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
1659 .is_some(),
1660 "existing_file.txt should still be visible"
1661 );
1662 assert!(
1663 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
1664 .is_some(),
1665 "another_file.txt should still be visible"
1666 );
1667 assert!(
1668 tree.entry_for_path(rel_path("ignored_dir/new_file.txt"))
1669 .is_some(),
1670 "new_file.txt should be visible"
1671 );
1672 });
1673}
1674
1675#[gpui::test]
1676async fn test_fs_event_for_gitignored_dir_does_not_lose_contents(cx: &mut TestAppContext) {
1677 // Tests the behavior of our worktree refresh when a directory modification for a gitignored directory
1678 // is triggered.
1679 init_test(cx);
1680 let fs = FakeFs::new(cx.background_executor.clone());
1681 fs.insert_tree(
1682 "/root",
1683 json!({
1684 ".gitignore": "ignored_dir\n",
1685 "ignored_dir": {
1686 "file1.txt": "content1",
1687 "file2.txt": "content2",
1688 },
1689 }),
1690 )
1691 .await;
1692
1693 let tree = Worktree::local(
1694 Path::new("/root"),
1695 true,
1696 fs.clone(),
1697 Default::default(),
1698 true,
1699 WorktreeId::from_proto(0),
1700 &mut cx.to_async(),
1701 )
1702 .await
1703 .unwrap();
1704
1705 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1706 .await;
1707
1708 // Load a file to expand the ignored directory
1709 tree.update(cx, |tree, cx| {
1710 tree.load_file(rel_path("ignored_dir/file1.txt"), cx)
1711 })
1712 .await
1713 .unwrap();
1714
1715 tree.read_with(cx, |tree, _| {
1716 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1717 assert_eq!(ignored_dir.kind, EntryKind::Dir);
1718 assert!(
1719 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
1720 .is_some()
1721 );
1722 assert!(
1723 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
1724 .is_some()
1725 );
1726 });
1727
1728 fs.emit_fs_event("/root/ignored_dir", Some(fs::PathEventKind::Changed));
1729 tree.flush_fs_events(cx).await;
1730
1731 tree.read_with(cx, |tree, _| {
1732 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1733 assert_eq!(
1734 ignored_dir.kind,
1735 EntryKind::Dir,
1736 "ignored_dir should still be loaded (Dir), not UnloadedDir"
1737 );
1738 assert!(
1739 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
1740 .is_some(),
1741 "file1.txt should still be visible after directory fs event"
1742 );
1743 assert!(
1744 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
1745 .is_some(),
1746 "file2.txt should still be visible after directory fs event"
1747 );
1748 });
1749}
1750
1751#[gpui::test(iterations = 100)]
1752async fn test_random_worktree_operations_during_initial_scan(
1753 cx: &mut TestAppContext,
1754 mut rng: StdRng,
1755) {
1756 init_test(cx);
1757 let operations = env::var("OPERATIONS")
1758 .map(|o| o.parse().unwrap())
1759 .unwrap_or(5);
1760 let initial_entries = env::var("INITIAL_ENTRIES")
1761 .map(|o| o.parse().unwrap())
1762 .unwrap_or(20);
1763
1764 let root_dir = Path::new(path!("/test"));
1765 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1766 fs.as_fake().insert_tree(root_dir, json!({})).await;
1767 for _ in 0..initial_entries {
1768 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1769 }
1770 log::info!("generated initial tree");
1771
1772 let worktree = Worktree::local(
1773 root_dir,
1774 true,
1775 fs.clone(),
1776 Default::default(),
1777 true,
1778 WorktreeId::from_proto(0),
1779 &mut cx.to_async(),
1780 )
1781 .await
1782 .unwrap();
1783
1784 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1785 let updates = Arc::new(Mutex::new(Vec::new()));
1786 worktree.update(cx, |tree, cx| {
1787 check_worktree_change_events(tree, cx);
1788
1789 tree.as_local_mut().unwrap().observe_updates(0, cx, {
1790 let updates = updates.clone();
1791 move |update| {
1792 updates.lock().push(update);
1793 async { true }
1794 }
1795 });
1796 });
1797
1798 for _ in 0..operations {
1799 worktree
1800 .update(cx, |worktree, cx| {
1801 randomly_mutate_worktree(worktree, &mut rng, cx)
1802 })
1803 .await
1804 .log_err();
1805 worktree.read_with(cx, |tree, _| {
1806 tree.as_local().unwrap().snapshot().check_invariants(true)
1807 });
1808
1809 if rng.random_bool(0.6) {
1810 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1811 }
1812 }
1813
1814 worktree
1815 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1816 .await;
1817
1818 cx.executor().run_until_parked();
1819
1820 let final_snapshot = worktree.read_with(cx, |tree, _| {
1821 let tree = tree.as_local().unwrap();
1822 let snapshot = tree.snapshot();
1823 snapshot.check_invariants(true);
1824 snapshot
1825 });
1826
1827 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
1828
1829 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1830 let mut updated_snapshot = snapshot.clone();
1831 for update in updates.lock().iter() {
1832 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1833 updated_snapshot
1834 .apply_remote_update(update.clone(), &settings.file_scan_inclusions);
1835 }
1836 }
1837
1838 assert_eq!(
1839 updated_snapshot.entries(true, 0).collect::<Vec<_>>(),
1840 final_snapshot.entries(true, 0).collect::<Vec<_>>(),
1841 "wrong updates after snapshot {i}: {updates:#?}",
1842 );
1843 }
1844}
1845
1846#[gpui::test(iterations = 100)]
1847async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1848 init_test(cx);
1849 let operations = env::var("OPERATIONS")
1850 .map(|o| o.parse().unwrap())
1851 .unwrap_or(40);
1852 let initial_entries = env::var("INITIAL_ENTRIES")
1853 .map(|o| o.parse().unwrap())
1854 .unwrap_or(20);
1855
1856 let root_dir = Path::new(path!("/test"));
1857 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1858 fs.as_fake().insert_tree(root_dir, json!({})).await;
1859 for _ in 0..initial_entries {
1860 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1861 }
1862 log::info!("generated initial tree");
1863
1864 let worktree = Worktree::local(
1865 root_dir,
1866 true,
1867 fs.clone(),
1868 Default::default(),
1869 true,
1870 WorktreeId::from_proto(0),
1871 &mut cx.to_async(),
1872 )
1873 .await
1874 .unwrap();
1875
1876 let updates = Arc::new(Mutex::new(Vec::new()));
1877 worktree.update(cx, |tree, cx| {
1878 check_worktree_change_events(tree, cx);
1879
1880 tree.as_local_mut().unwrap().observe_updates(0, cx, {
1881 let updates = updates.clone();
1882 move |update| {
1883 updates.lock().push(update);
1884 async { true }
1885 }
1886 });
1887 });
1888
1889 worktree
1890 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1891 .await;
1892
1893 fs.as_fake().pause_events();
1894 let mut snapshots = Vec::new();
1895 let mut mutations_len = operations;
1896 while mutations_len > 1 {
1897 if rng.random_bool(0.2) {
1898 worktree
1899 .update(cx, |worktree, cx| {
1900 randomly_mutate_worktree(worktree, &mut rng, cx)
1901 })
1902 .await
1903 .log_err();
1904 } else {
1905 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1906 }
1907
1908 let buffered_event_count = fs.as_fake().buffered_event_count();
1909 if buffered_event_count > 0 && rng.random_bool(0.3) {
1910 let len = rng.random_range(0..=buffered_event_count);
1911 log::info!("flushing {} events", len);
1912 fs.as_fake().flush_events(len);
1913 } else {
1914 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1915 mutations_len -= 1;
1916 }
1917
1918 cx.executor().run_until_parked();
1919 if rng.random_bool(0.2) {
1920 log::info!("storing snapshot {}", snapshots.len());
1921 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1922 snapshots.push(snapshot);
1923 }
1924 }
1925
1926 log::info!("quiescing");
1927 fs.as_fake().flush_events(usize::MAX);
1928 cx.executor().run_until_parked();
1929
1930 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1931 snapshot.check_invariants(true);
1932 let expanded_paths = snapshot
1933 .expanded_entries()
1934 .map(|e| e.path.clone())
1935 .collect::<Vec<_>>();
1936
1937 {
1938 let new_worktree = Worktree::local(
1939 root_dir,
1940 true,
1941 fs.clone(),
1942 Default::default(),
1943 true,
1944 WorktreeId::from_proto(0),
1945 &mut cx.to_async(),
1946 )
1947 .await
1948 .unwrap();
1949 new_worktree
1950 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1951 .await;
1952 new_worktree
1953 .update(cx, |tree, _| {
1954 tree.as_local_mut()
1955 .unwrap()
1956 .refresh_entries_for_paths(expanded_paths)
1957 })
1958 .recv()
1959 .await;
1960 let new_snapshot =
1961 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1962 assert_eq!(
1963 snapshot.entries_without_ids(true),
1964 new_snapshot.entries_without_ids(true)
1965 );
1966 }
1967
1968 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
1969
1970 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1971 for update in updates.lock().iter() {
1972 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1973 prev_snapshot.apply_remote_update(update.clone(), &settings.file_scan_inclusions);
1974 }
1975 }
1976
1977 assert_eq!(
1978 prev_snapshot
1979 .entries(true, 0)
1980 .map(ignore_pending_dir)
1981 .collect::<Vec<_>>(),
1982 snapshot
1983 .entries(true, 0)
1984 .map(ignore_pending_dir)
1985 .collect::<Vec<_>>(),
1986 "wrong updates after snapshot {i}: {updates:#?}",
1987 );
1988 }
1989
1990 fn ignore_pending_dir(entry: &Entry) -> Entry {
1991 let mut entry = entry.clone();
1992 if entry.kind.is_dir() {
1993 entry.kind = EntryKind::Dir
1994 }
1995 entry
1996 }
1997}
1998
1999// The worktree's `UpdatedEntries` event can be used to follow along with
2000// all changes to the worktree's snapshot.
2001fn check_worktree_change_events(tree: &mut Worktree, cx: &mut Context<Worktree>) {
2002 let mut entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
2003 cx.subscribe(&cx.entity(), move |tree, _, event, _| {
2004 if let Event::UpdatedEntries(changes) = event {
2005 for (path, _, change_type) in changes.iter() {
2006 let entry = tree.entry_for_path(path).cloned();
2007 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
2008 Ok(ix) | Err(ix) => ix,
2009 };
2010 match change_type {
2011 PathChange::Added => entries.insert(ix, entry.unwrap()),
2012 PathChange::Removed => drop(entries.remove(ix)),
2013 PathChange::Updated => {
2014 let entry = entry.unwrap();
2015 let existing_entry = entries.get_mut(ix).unwrap();
2016 assert_eq!(existing_entry.path, entry.path);
2017 *existing_entry = entry;
2018 }
2019 PathChange::AddedOrUpdated | PathChange::Loaded => {
2020 let entry = entry.unwrap();
2021 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
2022 *entries.get_mut(ix).unwrap() = entry;
2023 } else {
2024 entries.insert(ix, entry);
2025 }
2026 }
2027 }
2028 }
2029
2030 let new_entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
2031 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
2032 }
2033 })
2034 .detach();
2035}
2036
2037fn randomly_mutate_worktree(
2038 worktree: &mut Worktree,
2039 rng: &mut impl Rng,
2040 cx: &mut Context<Worktree>,
2041) -> Task<Result<()>> {
2042 log::info!("mutating worktree");
2043 let worktree = worktree.as_local_mut().unwrap();
2044 let snapshot = worktree.snapshot();
2045 let entry = snapshot.entries(false, 0).choose(rng).unwrap();
2046
2047 match rng.random_range(0_u32..100) {
2048 0..=33 if entry.path.as_ref() != RelPath::empty() => {
2049 log::info!("deleting entry {:?} ({})", entry.path, entry.id.to_usize());
2050 worktree.delete_entry(entry.id, false, cx).unwrap()
2051 }
2052 _ => {
2053 if entry.is_dir() {
2054 let child_path = entry.path.join(rel_path(&random_filename(rng)));
2055 let is_dir = rng.random_bool(0.3);
2056 log::info!(
2057 "creating {} at {:?}",
2058 if is_dir { "dir" } else { "file" },
2059 child_path,
2060 );
2061 let task = worktree.create_entry(child_path, is_dir, None, cx);
2062 cx.background_spawn(async move {
2063 task.await?;
2064 Ok(())
2065 })
2066 } else {
2067 log::info!(
2068 "overwriting file {:?} ({})",
2069 &entry.path,
2070 entry.id.to_usize()
2071 );
2072 let task = worktree.write_file(
2073 entry.path.clone(),
2074 "".into(),
2075 Default::default(),
2076 encoding_rs::UTF_8,
2077 false,
2078 cx,
2079 );
2080 cx.background_spawn(async move {
2081 task.await?;
2082 Ok(())
2083 })
2084 }
2085 }
2086 }
2087}
2088
2089async fn randomly_mutate_fs(
2090 fs: &Arc<dyn Fs>,
2091 root_path: &Path,
2092 insertion_probability: f64,
2093 rng: &mut impl Rng,
2094) {
2095 log::info!("mutating fs");
2096 let mut files = Vec::new();
2097 let mut dirs = Vec::new();
2098 for path in fs.as_fake().paths(false) {
2099 if path.starts_with(root_path) {
2100 if fs.is_file(&path).await {
2101 files.push(path);
2102 } else {
2103 dirs.push(path);
2104 }
2105 }
2106 }
2107
2108 if (files.is_empty() && dirs.len() == 1) || rng.random_bool(insertion_probability) {
2109 let path = dirs.choose(rng).unwrap();
2110 let new_path = path.join(random_filename(rng));
2111
2112 if rng.random() {
2113 log::info!(
2114 "creating dir {:?}",
2115 new_path.strip_prefix(root_path).unwrap()
2116 );
2117 fs.create_dir(&new_path).await.unwrap();
2118 } else {
2119 log::info!(
2120 "creating file {:?}",
2121 new_path.strip_prefix(root_path).unwrap()
2122 );
2123 fs.create_file(&new_path, Default::default()).await.unwrap();
2124 }
2125 } else if rng.random_bool(0.05) {
2126 let ignore_dir_path = dirs.choose(rng).unwrap();
2127 let ignore_path = ignore_dir_path.join(GITIGNORE);
2128
2129 let subdirs = dirs
2130 .iter()
2131 .filter(|d| d.starts_with(ignore_dir_path))
2132 .cloned()
2133 .collect::<Vec<_>>();
2134 let subfiles = files
2135 .iter()
2136 .filter(|d| d.starts_with(ignore_dir_path))
2137 .cloned()
2138 .collect::<Vec<_>>();
2139 let files_to_ignore = {
2140 let len = rng.random_range(0..=subfiles.len());
2141 subfiles.choose_multiple(rng, len)
2142 };
2143 let dirs_to_ignore = {
2144 let len = rng.random_range(0..subdirs.len());
2145 subdirs.choose_multiple(rng, len)
2146 };
2147
2148 let mut ignore_contents = String::new();
2149 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
2150 writeln!(
2151 ignore_contents,
2152 "{}",
2153 path_to_ignore
2154 .strip_prefix(ignore_dir_path)
2155 .unwrap()
2156 .to_str()
2157 .unwrap()
2158 )
2159 .unwrap();
2160 }
2161 log::info!(
2162 "creating gitignore {:?} with contents:\n{}",
2163 ignore_path.strip_prefix(root_path).unwrap(),
2164 ignore_contents
2165 );
2166 fs.save(
2167 &ignore_path,
2168 &ignore_contents.as_str().into(),
2169 Default::default(),
2170 )
2171 .await
2172 .unwrap();
2173 } else {
2174 let old_path = {
2175 let file_path = files.choose(rng);
2176 let dir_path = dirs[1..].choose(rng);
2177 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
2178 };
2179
2180 let is_rename = rng.random();
2181 if is_rename {
2182 let new_path_parent = dirs
2183 .iter()
2184 .filter(|d| !d.starts_with(old_path))
2185 .choose(rng)
2186 .unwrap();
2187
2188 let overwrite_existing_dir =
2189 !old_path.starts_with(new_path_parent) && rng.random_bool(0.3);
2190 let new_path = if overwrite_existing_dir {
2191 fs.remove_dir(
2192 new_path_parent,
2193 RemoveOptions {
2194 recursive: true,
2195 ignore_if_not_exists: true,
2196 },
2197 )
2198 .await
2199 .unwrap();
2200 new_path_parent.to_path_buf()
2201 } else {
2202 new_path_parent.join(random_filename(rng))
2203 };
2204
2205 log::info!(
2206 "renaming {:?} to {}{:?}",
2207 old_path.strip_prefix(root_path).unwrap(),
2208 if overwrite_existing_dir {
2209 "overwrite "
2210 } else {
2211 ""
2212 },
2213 new_path.strip_prefix(root_path).unwrap()
2214 );
2215 fs.rename(
2216 old_path,
2217 &new_path,
2218 fs::RenameOptions {
2219 overwrite: true,
2220 ignore_if_exists: true,
2221 create_parents: false,
2222 },
2223 )
2224 .await
2225 .unwrap();
2226 } else if fs.is_file(old_path).await {
2227 log::info!(
2228 "deleting file {:?}",
2229 old_path.strip_prefix(root_path).unwrap()
2230 );
2231 fs.remove_file(old_path, Default::default()).await.unwrap();
2232 } else {
2233 log::info!(
2234 "deleting dir {:?}",
2235 old_path.strip_prefix(root_path).unwrap()
2236 );
2237 fs.remove_dir(
2238 old_path,
2239 RemoveOptions {
2240 recursive: true,
2241 ignore_if_not_exists: true,
2242 },
2243 )
2244 .await
2245 .unwrap();
2246 }
2247 }
2248}
2249
2250fn random_filename(rng: &mut impl Rng) -> String {
2251 (0..6)
2252 .map(|_| rng.sample(rand::distr::Alphanumeric))
2253 .map(char::from)
2254 .collect()
2255}
2256
2257#[gpui::test]
2258async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
2259 init_test(cx);
2260 let fs = FakeFs::new(cx.background_executor.clone());
2261 fs.insert_tree("/", json!({".env": "PRIVATE=secret\n"}))
2262 .await;
2263 let tree = Worktree::local(
2264 Path::new("/.env"),
2265 true,
2266 fs.clone(),
2267 Default::default(),
2268 true,
2269 WorktreeId::from_proto(0),
2270 &mut cx.to_async(),
2271 )
2272 .await
2273 .unwrap();
2274 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2275 .await;
2276 tree.read_with(cx, |tree, _| {
2277 let entry = tree.entry_for_path(rel_path("")).unwrap();
2278 assert!(entry.is_private);
2279 });
2280}
2281
2282#[gpui::test]
2283async fn test_repository_above_root(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2284 init_test(cx);
2285
2286 let fs = FakeFs::new(executor);
2287 fs.insert_tree(
2288 path!("/root"),
2289 json!({
2290 ".git": {},
2291 "subproject": {
2292 "a.txt": "A"
2293 }
2294 }),
2295 )
2296 .await;
2297 let worktree = Worktree::local(
2298 path!("/root/subproject").as_ref(),
2299 true,
2300 fs.clone(),
2301 Arc::default(),
2302 true,
2303 WorktreeId::from_proto(0),
2304 &mut cx.to_async(),
2305 )
2306 .await
2307 .unwrap();
2308 worktree
2309 .update(cx, |worktree, _| {
2310 worktree.as_local().unwrap().scan_complete()
2311 })
2312 .await;
2313 cx.run_until_parked();
2314 let repos = worktree.update(cx, |worktree, _| {
2315 worktree.as_local().unwrap().repositories()
2316 });
2317 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2318
2319 fs.touch_path(path!("/root/subproject")).await;
2320 worktree
2321 .update(cx, |worktree, _| {
2322 worktree.as_local().unwrap().scan_complete()
2323 })
2324 .await;
2325 cx.run_until_parked();
2326
2327 let repos = worktree.update(cx, |worktree, _| {
2328 worktree.as_local().unwrap().repositories()
2329 });
2330 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2331}
2332
2333#[gpui::test]
2334async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2335 init_test(cx);
2336
2337 let home = paths::home_dir();
2338 let fs = FakeFs::new(executor);
2339 fs.insert_tree(
2340 home,
2341 json!({
2342 ".config": {
2343 "git": {
2344 "ignore": "foo\n/bar\nbaz\n"
2345 }
2346 },
2347 "project": {
2348 ".git": {},
2349 ".gitignore": "!baz",
2350 "foo": "",
2351 "bar": "",
2352 "sub": {
2353 "bar": "",
2354 },
2355 "subrepo": {
2356 ".git": {},
2357 "bar": ""
2358 },
2359 "baz": ""
2360 }
2361 }),
2362 )
2363 .await;
2364 let worktree = Worktree::local(
2365 home.join("project"),
2366 true,
2367 fs.clone(),
2368 Arc::default(),
2369 true,
2370 WorktreeId::from_proto(0),
2371 &mut cx.to_async(),
2372 )
2373 .await
2374 .unwrap();
2375 worktree
2376 .update(cx, |worktree, _| {
2377 worktree.as_local().unwrap().scan_complete()
2378 })
2379 .await;
2380 cx.run_until_parked();
2381
2382 // .gitignore overrides excludesFile, and anchored paths in excludesFile are resolved
2383 // relative to the nearest containing repository
2384 worktree.update(cx, |worktree, _cx| {
2385 check_worktree_entries(
2386 worktree,
2387 &[],
2388 &["foo", "bar", "subrepo/bar"],
2389 &["sub/bar", "baz"],
2390 &[],
2391 );
2392 });
2393
2394 // Ignore statuses are updated when excludesFile changes
2395 fs.write(
2396 &home.join(".config").join("git").join("ignore"),
2397 "/bar\nbaz\n".as_bytes(),
2398 )
2399 .await
2400 .unwrap();
2401 worktree
2402 .update(cx, |worktree, _| {
2403 worktree.as_local().unwrap().scan_complete()
2404 })
2405 .await;
2406 cx.run_until_parked();
2407
2408 worktree.update(cx, |worktree, _cx| {
2409 check_worktree_entries(
2410 worktree,
2411 &[],
2412 &["bar", "subrepo/bar"],
2413 &["foo", "sub/bar", "baz"],
2414 &[],
2415 );
2416 });
2417
2418 // Statuses are updated when .git added/removed
2419 fs.remove_dir(
2420 &home.join("project").join("subrepo").join(".git"),
2421 RemoveOptions {
2422 recursive: true,
2423 ..Default::default()
2424 },
2425 )
2426 .await
2427 .unwrap();
2428 worktree
2429 .update(cx, |worktree, _| {
2430 worktree.as_local().unwrap().scan_complete()
2431 })
2432 .await;
2433 cx.run_until_parked();
2434
2435 worktree.update(cx, |worktree, _cx| {
2436 check_worktree_entries(
2437 worktree,
2438 &[],
2439 &["bar"],
2440 &["foo", "sub/bar", "baz", "subrepo/bar"],
2441 &[],
2442 );
2443 });
2444}
2445
2446#[gpui::test]
2447async fn test_repo_exclude(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2448 init_test(cx);
2449
2450 let fs = FakeFs::new(executor);
2451 let project_dir = Path::new(path!("/project"));
2452 fs.insert_tree(
2453 project_dir,
2454 json!({
2455 ".git": {
2456 "info": {
2457 "exclude": ".env.*"
2458 }
2459 },
2460 ".env.example": "secret=xxxx",
2461 ".env.local": "secret=1234",
2462 ".gitignore": "!.env.example",
2463 "README.md": "# Repo Exclude",
2464 "src": {
2465 "main.rs": "fn main() {}",
2466 },
2467 }),
2468 )
2469 .await;
2470
2471 let worktree = Worktree::local(
2472 project_dir,
2473 true,
2474 fs.clone(),
2475 Default::default(),
2476 true,
2477 WorktreeId::from_proto(0),
2478 &mut cx.to_async(),
2479 )
2480 .await
2481 .unwrap();
2482 worktree
2483 .update(cx, |worktree, _| {
2484 worktree.as_local().unwrap().scan_complete()
2485 })
2486 .await;
2487 cx.run_until_parked();
2488
2489 // .gitignore overrides .git/info/exclude
2490 worktree.update(cx, |worktree, _cx| {
2491 let expected_excluded_paths = [];
2492 let expected_ignored_paths = [".env.local"];
2493 let expected_tracked_paths = [".env.example", "README.md", "src/main.rs"];
2494 let expected_included_paths = [];
2495
2496 check_worktree_entries(
2497 worktree,
2498 &expected_excluded_paths,
2499 &expected_ignored_paths,
2500 &expected_tracked_paths,
2501 &expected_included_paths,
2502 );
2503 });
2504
2505 // Ignore statuses are updated when .git/info/exclude file changes
2506 fs.write(
2507 &project_dir.join(DOT_GIT).join(REPO_EXCLUDE),
2508 ".env.example".as_bytes(),
2509 )
2510 .await
2511 .unwrap();
2512 worktree
2513 .update(cx, |worktree, _| {
2514 worktree.as_local().unwrap().scan_complete()
2515 })
2516 .await;
2517 cx.run_until_parked();
2518
2519 worktree.update(cx, |worktree, _cx| {
2520 let expected_excluded_paths = [];
2521 let expected_ignored_paths = [];
2522 let expected_tracked_paths = [".env.example", ".env.local", "README.md", "src/main.rs"];
2523 let expected_included_paths = [];
2524
2525 check_worktree_entries(
2526 worktree,
2527 &expected_excluded_paths,
2528 &expected_ignored_paths,
2529 &expected_tracked_paths,
2530 &expected_included_paths,
2531 );
2532 });
2533}
2534
2535#[track_caller]
2536fn check_worktree_entries(
2537 tree: &Worktree,
2538 expected_excluded_paths: &[&str],
2539 expected_ignored_paths: &[&str],
2540 expected_tracked_paths: &[&str],
2541 expected_included_paths: &[&str],
2542) {
2543 for path in expected_excluded_paths {
2544 let entry = tree.entry_for_path(rel_path(path));
2545 assert!(
2546 entry.is_none(),
2547 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2548 );
2549 }
2550 for path in expected_ignored_paths {
2551 let entry = tree
2552 .entry_for_path(rel_path(path))
2553 .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
2554 assert!(
2555 entry.is_ignored,
2556 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2557 );
2558 }
2559 for path in expected_tracked_paths {
2560 let entry = tree
2561 .entry_for_path(rel_path(path))
2562 .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
2563 assert!(
2564 !entry.is_ignored || entry.is_always_included,
2565 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2566 );
2567 }
2568 for path in expected_included_paths {
2569 let entry = tree
2570 .entry_for_path(rel_path(path))
2571 .unwrap_or_else(|| panic!("Missing entry for expected included path '{path}'"));
2572 assert!(
2573 entry.is_always_included,
2574 "expected path '{path}' to always be included, but got entry: {entry:?}",
2575 );
2576 }
2577}
2578
2579fn init_test(cx: &mut gpui::TestAppContext) {
2580 zlog::init_test();
2581
2582 cx.update(|cx| {
2583 let settings_store = SettingsStore::test(cx);
2584 cx.set_global(settings_store);
2585 });
2586}
2587
2588#[gpui::test]
2589async fn test_load_file_encoding(cx: &mut TestAppContext) {
2590 init_test(cx);
2591
2592 struct TestCase {
2593 name: &'static str,
2594 bytes: Vec<u8>,
2595 expected_text: &'static str,
2596 }
2597
2598 // --- Success Cases ---
2599 let success_cases = vec![
2600 TestCase {
2601 name: "utf8.txt",
2602 bytes: "ใใใซใกใฏ".as_bytes().to_vec(),
2603 expected_text: "ใใใซใกใฏ",
2604 },
2605 TestCase {
2606 name: "sjis.txt",
2607 bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
2608 expected_text: "ใใใซใกใฏ",
2609 },
2610 TestCase {
2611 name: "eucjp.txt",
2612 bytes: vec![0xa4, 0xb3, 0xa4, 0xf3, 0xa4, 0xcb, 0xa4, 0xc1, 0xa4, 0xcf],
2613 expected_text: "ใใใซใกใฏ",
2614 },
2615 TestCase {
2616 name: "iso2022jp.txt",
2617 bytes: vec![
2618 0x1b, 0x24, 0x42, 0x24, 0x33, 0x24, 0x73, 0x24, 0x4b, 0x24, 0x41, 0x24, 0x4f, 0x1b,
2619 0x28, 0x42,
2620 ],
2621 expected_text: "ใใใซใกใฏ",
2622 },
2623 TestCase {
2624 name: "win1252.txt",
2625 bytes: vec![0x43, 0x61, 0x66, 0xe9],
2626 expected_text: "Cafรฉ",
2627 },
2628 TestCase {
2629 name: "gbk.txt",
2630 bytes: vec![
2631 0xbd, 0xf1, 0xcc, 0xec, 0xcc, 0xec, 0xc6, 0xf8, 0xb2, 0xbb, 0xb4, 0xed,
2632 ],
2633 expected_text: "ไปๅคฉๅคฉๆฐไธ้",
2634 },
2635 // UTF-16LE with BOM
2636 TestCase {
2637 name: "utf16le_bom.txt",
2638 bytes: vec![
2639 0xFF, 0xFE, // BOM
2640 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F, 0x30,
2641 ],
2642 expected_text: "ใใใซใกใฏ",
2643 },
2644 // UTF-16BE with BOM
2645 TestCase {
2646 name: "utf16be_bom.txt",
2647 bytes: vec![
2648 0xFE, 0xFF, // BOM
2649 0x30, 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F,
2650 ],
2651 expected_text: "ใใใซใกใฏ",
2652 },
2653 // UTF-16LE without BOM (ASCII only)
2654 // This relies on the "null byte heuristic" we implemented.
2655 // "ABC" -> 41 00 42 00 43 00
2656 TestCase {
2657 name: "utf16le_ascii_no_bom.txt",
2658 bytes: vec![0x41, 0x00, 0x42, 0x00, 0x43, 0x00],
2659 expected_text: "ABC",
2660 },
2661 ];
2662
2663 // --- Failure Cases ---
2664 let failure_cases = vec![
2665 // Binary File (Should be detected by heuristic and return Error)
2666 // Contains random bytes and mixed nulls that don't match UTF-16 patterns
2667 TestCase {
2668 name: "binary.bin",
2669 bytes: vec![0x00, 0xFF, 0x12, 0x00, 0x99, 0x88, 0x77, 0x66, 0x00],
2670 expected_text: "", // Not used
2671 },
2672 ];
2673
2674 let root_path = if cfg!(windows) {
2675 Path::new("C:\\root")
2676 } else {
2677 Path::new("/root")
2678 };
2679
2680 let fs = FakeFs::new(cx.background_executor.clone());
2681 fs.create_dir(root_path).await.unwrap();
2682
2683 for case in success_cases.iter().chain(failure_cases.iter()) {
2684 let path = root_path.join(case.name);
2685 fs.write(&path, &case.bytes).await.unwrap();
2686 }
2687
2688 let tree = Worktree::local(
2689 root_path,
2690 true,
2691 fs,
2692 Default::default(),
2693 true,
2694 WorktreeId::from_proto(0),
2695 &mut cx.to_async(),
2696 )
2697 .await
2698 .unwrap();
2699
2700 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2701 .await;
2702
2703 let rel_path = |name: &str| {
2704 RelPath::new(&Path::new(name), PathStyle::local())
2705 .unwrap()
2706 .into_arc()
2707 };
2708
2709 // Run Success Tests
2710 for case in success_cases {
2711 let loaded = tree
2712 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
2713 .await;
2714 if let Err(e) = &loaded {
2715 panic!("Failed to load success case '{}': {:?}", case.name, e);
2716 }
2717 let loaded = loaded.unwrap();
2718 assert_eq!(
2719 loaded.text, case.expected_text,
2720 "Encoding mismatch for file: {}",
2721 case.name
2722 );
2723 }
2724
2725 // Run Failure Tests
2726 for case in failure_cases {
2727 let loaded = tree
2728 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
2729 .await;
2730 assert!(
2731 loaded.is_err(),
2732 "Failure case '{}' unexpectedly succeeded! It should have been detected as binary.",
2733 case.name
2734 );
2735 let err_msg = loaded.unwrap_err().to_string();
2736 println!("Got expected error for {}: {}", case.name, err_msg);
2737 }
2738}
2739
2740#[gpui::test]
2741async fn test_write_file_encoding(cx: &mut gpui::TestAppContext) {
2742 init_test(cx);
2743 let fs = FakeFs::new(cx.executor());
2744
2745 let root_path = if cfg!(windows) {
2746 Path::new("C:\\root")
2747 } else {
2748 Path::new("/root")
2749 };
2750 fs.create_dir(root_path).await.unwrap();
2751
2752 let worktree = Worktree::local(
2753 root_path,
2754 true,
2755 fs.clone(),
2756 Default::default(),
2757 true,
2758 WorktreeId::from_proto(0),
2759 &mut cx.to_async(),
2760 )
2761 .await
2762 .unwrap();
2763
2764 // Define test case structure
2765 struct TestCase {
2766 name: &'static str,
2767 text: &'static str,
2768 encoding: &'static encoding_rs::Encoding,
2769 has_bom: bool,
2770 expected_bytes: Vec<u8>,
2771 }
2772
2773 let cases = vec![
2774 // Shift_JIS with Japanese
2775 TestCase {
2776 name: "Shift_JIS with Japanese",
2777 text: "ใใใซใกใฏ",
2778 encoding: encoding_rs::SHIFT_JIS,
2779 has_bom: false,
2780 expected_bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
2781 },
2782 // UTF-8 No BOM
2783 TestCase {
2784 name: "UTF-8 No BOM",
2785 text: "AB",
2786 encoding: encoding_rs::UTF_8,
2787 has_bom: false,
2788 expected_bytes: vec![0x41, 0x42],
2789 },
2790 // UTF-8 with BOM
2791 TestCase {
2792 name: "UTF-8 with BOM",
2793 text: "AB",
2794 encoding: encoding_rs::UTF_8,
2795 has_bom: true,
2796 expected_bytes: vec![0xEF, 0xBB, 0xBF, 0x41, 0x42],
2797 },
2798 // UTF-16LE No BOM with Japanese
2799 // NOTE: This passes thanks to the manual encoding fix implemented in `write_file`.
2800 TestCase {
2801 name: "UTF-16LE No BOM with Japanese",
2802 text: "ใใใซใกใฏ",
2803 encoding: encoding_rs::UTF_16LE,
2804 has_bom: false,
2805 expected_bytes: vec![0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f, 0x30],
2806 },
2807 // UTF-16LE with BOM
2808 TestCase {
2809 name: "UTF-16LE with BOM",
2810 text: "A",
2811 encoding: encoding_rs::UTF_16LE,
2812 has_bom: true,
2813 expected_bytes: vec![0xFF, 0xFE, 0x41, 0x00],
2814 },
2815 // UTF-16BE No BOM with Japanese
2816 // NOTE: This passes thanks to the manual encoding fix.
2817 TestCase {
2818 name: "UTF-16BE No BOM with Japanese",
2819 text: "ใใใซใกใฏ",
2820 encoding: encoding_rs::UTF_16BE,
2821 has_bom: false,
2822 expected_bytes: vec![0x30, 0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f],
2823 },
2824 // UTF-16BE with BOM
2825 TestCase {
2826 name: "UTF-16BE with BOM",
2827 text: "A",
2828 encoding: encoding_rs::UTF_16BE,
2829 has_bom: true,
2830 expected_bytes: vec![0xFE, 0xFF, 0x00, 0x41],
2831 },
2832 ];
2833
2834 for (i, case) in cases.into_iter().enumerate() {
2835 let file_name = format!("test_{}.txt", i);
2836 let path: Arc<Path> = Path::new(&file_name).into();
2837 let file_path = root_path.join(&file_name);
2838
2839 fs.insert_file(&file_path, "".into()).await;
2840
2841 let rel_path = RelPath::new(&path, PathStyle::local()).unwrap().into_arc();
2842 let text = text::Rope::from(case.text);
2843
2844 let task = worktree.update(cx, |wt, cx| {
2845 wt.write_file(
2846 rel_path,
2847 text,
2848 text::LineEnding::Unix,
2849 case.encoding,
2850 case.has_bom,
2851 cx,
2852 )
2853 });
2854
2855 if let Err(e) = task.await {
2856 panic!("Unexpected error in case '{}': {:?}", case.name, e);
2857 }
2858
2859 let bytes = fs.load_bytes(&file_path).await.unwrap();
2860
2861 assert_eq!(
2862 bytes, case.expected_bytes,
2863 "case '{}' mismatch. Expected {:?}, but got {:?}",
2864 case.name, case.expected_bytes, bytes
2865 );
2866 }
2867}
2868
2869#[gpui::test]
2870async fn test_refresh_entries_for_paths_creates_ancestors(cx: &mut TestAppContext) {
2871 init_test(cx);
2872 let fs = FakeFs::new(cx.background_executor.clone());
2873 fs.insert_tree(
2874 "/root",
2875 json!({
2876 "a": {
2877 "b": {
2878 "c": {
2879 "deep_file.txt": "content",
2880 "sibling.txt": "content"
2881 },
2882 "d": {
2883 "under_sibling_dir.txt": "content"
2884 }
2885 }
2886 }
2887 }),
2888 )
2889 .await;
2890
2891 let tree = Worktree::local(
2892 Path::new("/root"),
2893 true,
2894 fs.clone(),
2895 Default::default(),
2896 false, // Disable scanning so the initial scan doesn't discover any entries
2897 WorktreeId::from_proto(0),
2898 &mut cx.to_async(),
2899 )
2900 .await
2901 .unwrap();
2902
2903 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2904 .await;
2905
2906 tree.read_with(cx, |tree, _| {
2907 assert_eq!(
2908 tree.entries(true, 0)
2909 .map(|e| e.path.as_ref())
2910 .collect::<Vec<_>>(),
2911 &[rel_path("")],
2912 "Only root entry should exist when scanning is disabled"
2913 );
2914
2915 assert!(tree.entry_for_path(rel_path("a")).is_none());
2916 assert!(tree.entry_for_path(rel_path("a/b")).is_none());
2917 assert!(tree.entry_for_path(rel_path("a/b/c")).is_none());
2918 assert!(
2919 tree.entry_for_path(rel_path("a/b/c/deep_file.txt"))
2920 .is_none()
2921 );
2922 });
2923
2924 tree.read_with(cx, |tree, _| {
2925 tree.as_local()
2926 .unwrap()
2927 .refresh_entries_for_paths(vec![rel_path("a/b/c/deep_file.txt").into()])
2928 })
2929 .recv()
2930 .await;
2931
2932 tree.read_with(cx, |tree, _| {
2933 assert_eq!(
2934 tree.entries(true, 0)
2935 .map(|e| e.path.as_ref())
2936 .collect::<Vec<_>>(),
2937 &[
2938 rel_path(""),
2939 rel_path("a"),
2940 rel_path("a/b"),
2941 rel_path("a/b/c"),
2942 rel_path("a/b/c/deep_file.txt"),
2943 rel_path("a/b/c/sibling.txt"),
2944 rel_path("a/b/d"),
2945 ],
2946 "All ancestors should be created when refreshing a deeply nested path"
2947 );
2948 });
2949}