1mod worktree_settings;
2
3use anyhow::Result;
4use encoding_rs;
5use fs::{FakeFs, Fs, RealFs, RemoveOptions};
6use git::{DOT_GIT, GITIGNORE, REPO_EXCLUDE};
7use gpui::{AppContext as _, BackgroundExecutor, BorrowAppContext, Context, Task, TestAppContext};
8use parking_lot::Mutex;
9use postage::stream::Stream;
10use pretty_assertions::assert_eq;
11use rand::prelude::*;
12use worktree::{Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle};
13
14use serde_json::json;
15use settings::{SettingsStore, WorktreeId};
16use std::{
17 cell::Cell,
18 env,
19 fmt::Write,
20 mem,
21 path::{Path, PathBuf},
22 rc::Rc,
23 sync::Arc,
24};
25use util::{
26 ResultExt, path,
27 paths::PathStyle,
28 rel_path::{RelPath, rel_path},
29 test::TempTree,
30};
31
32#[gpui::test]
33async fn test_traversal(cx: &mut TestAppContext) {
34 init_test(cx);
35 let fs = FakeFs::new(cx.background_executor.clone());
36 fs.insert_tree(
37 "/root",
38 json!({
39 ".gitignore": "a/b\n",
40 "a": {
41 "b": "",
42 "c": "",
43 }
44 }),
45 )
46 .await;
47
48 let tree = Worktree::local(
49 Path::new("/root"),
50 true,
51 fs,
52 Default::default(),
53 true,
54 WorktreeId::from_proto(0),
55 &mut cx.to_async(),
56 )
57 .await
58 .unwrap();
59 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
60 .await;
61
62 tree.read_with(cx, |tree, _| {
63 assert_eq!(
64 tree.entries(false, 0)
65 .map(|entry| entry.path.as_ref())
66 .collect::<Vec<_>>(),
67 vec![
68 rel_path(""),
69 rel_path(".gitignore"),
70 rel_path("a"),
71 rel_path("a/c"),
72 ]
73 );
74 assert_eq!(
75 tree.entries(true, 0)
76 .map(|entry| entry.path.as_ref())
77 .collect::<Vec<_>>(),
78 vec![
79 rel_path(""),
80 rel_path(".gitignore"),
81 rel_path("a"),
82 rel_path("a/b"),
83 rel_path("a/c"),
84 ]
85 );
86 })
87}
88
89#[gpui::test(iterations = 10)]
90async fn test_circular_symlinks(cx: &mut TestAppContext) {
91 init_test(cx);
92 let fs = FakeFs::new(cx.background_executor.clone());
93 fs.insert_tree(
94 "/root",
95 json!({
96 "lib": {
97 "a": {
98 "a.txt": ""
99 },
100 "b": {
101 "b.txt": ""
102 }
103 }
104 }),
105 )
106 .await;
107 fs.create_symlink("/root/lib/a/lib".as_ref(), "..".into())
108 .await
109 .unwrap();
110 fs.create_symlink("/root/lib/b/lib".as_ref(), "..".into())
111 .await
112 .unwrap();
113
114 let tree = Worktree::local(
115 Path::new("/root"),
116 true,
117 fs.clone(),
118 Default::default(),
119 true,
120 WorktreeId::from_proto(0),
121 &mut cx.to_async(),
122 )
123 .await
124 .unwrap();
125
126 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
127 .await;
128
129 tree.read_with(cx, |tree, _| {
130 assert_eq!(
131 tree.entries(false, 0)
132 .map(|entry| entry.path.as_ref())
133 .collect::<Vec<_>>(),
134 vec![
135 rel_path(""),
136 rel_path("lib"),
137 rel_path("lib/a"),
138 rel_path("lib/a/a.txt"),
139 rel_path("lib/a/lib"),
140 rel_path("lib/b"),
141 rel_path("lib/b/b.txt"),
142 rel_path("lib/b/lib"),
143 ]
144 );
145 });
146
147 fs.rename(
148 Path::new("/root/lib/a/lib"),
149 Path::new("/root/lib/a/lib-2"),
150 Default::default(),
151 )
152 .await
153 .unwrap();
154 cx.executor().run_until_parked();
155 tree.read_with(cx, |tree, _| {
156 assert_eq!(
157 tree.entries(false, 0)
158 .map(|entry| entry.path.as_ref())
159 .collect::<Vec<_>>(),
160 vec![
161 rel_path(""),
162 rel_path("lib"),
163 rel_path("lib/a"),
164 rel_path("lib/a/a.txt"),
165 rel_path("lib/a/lib-2"),
166 rel_path("lib/b"),
167 rel_path("lib/b/b.txt"),
168 rel_path("lib/b/lib"),
169 ]
170 );
171 });
172}
173
174#[gpui::test]
175async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
176 init_test(cx);
177 let fs = FakeFs::new(cx.background_executor.clone());
178 fs.insert_tree(
179 "/root",
180 json!({
181 "dir1": {
182 "deps": {
183 // symlinks here
184 },
185 "src": {
186 "a.rs": "",
187 "b.rs": "",
188 },
189 },
190 "dir2": {
191 "src": {
192 "c.rs": "",
193 "d.rs": "",
194 }
195 },
196 "dir3": {
197 "deps": {},
198 "src": {
199 "e.rs": "",
200 "f.rs": "",
201 },
202 }
203 }),
204 )
205 .await;
206
207 // These symlinks point to directories outside of the worktree's root, dir1.
208 fs.create_symlink("/root/dir1/deps/dep-dir2".as_ref(), "../../dir2".into())
209 .await
210 .unwrap();
211 fs.create_symlink("/root/dir1/deps/dep-dir3".as_ref(), "../../dir3".into())
212 .await
213 .unwrap();
214
215 let tree = Worktree::local(
216 Path::new("/root/dir1"),
217 true,
218 fs.clone(),
219 Default::default(),
220 true,
221 WorktreeId::from_proto(0),
222 &mut cx.to_async(),
223 )
224 .await
225 .unwrap();
226
227 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
228 .await;
229
230 let tree_updates = Arc::new(Mutex::new(Vec::new()));
231 tree.update(cx, |_, cx| {
232 let tree_updates = tree_updates.clone();
233 cx.subscribe(&tree, move |_, _, event, _| {
234 if let Event::UpdatedEntries(update) = event {
235 tree_updates.lock().extend(
236 update
237 .iter()
238 .map(|(path, _, change)| (path.clone(), *change)),
239 );
240 }
241 })
242 .detach();
243 });
244
245 // The symlinked directories are not scanned by default.
246 tree.read_with(cx, |tree, _| {
247 assert_eq!(
248 tree.entries(true, 0)
249 .map(|entry| (entry.path.as_ref(), entry.is_external))
250 .collect::<Vec<_>>(),
251 vec![
252 (rel_path(""), false),
253 (rel_path("deps"), false),
254 (rel_path("deps/dep-dir2"), true),
255 (rel_path("deps/dep-dir3"), true),
256 (rel_path("src"), false),
257 (rel_path("src/a.rs"), false),
258 (rel_path("src/b.rs"), false),
259 ]
260 );
261
262 assert_eq!(
263 tree.entry_for_path(rel_path("deps/dep-dir2")).unwrap().kind,
264 EntryKind::UnloadedDir
265 );
266 });
267
268 // Expand one of the symlinked directories.
269 tree.read_with(cx, |tree, _| {
270 tree.as_local()
271 .unwrap()
272 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3").into()])
273 })
274 .recv()
275 .await;
276
277 // The expanded directory's contents are loaded. Subdirectories are
278 // not scanned yet.
279 tree.read_with(cx, |tree, _| {
280 assert_eq!(
281 tree.entries(true, 0)
282 .map(|entry| (entry.path.as_ref(), entry.is_external))
283 .collect::<Vec<_>>(),
284 vec![
285 (rel_path(""), false),
286 (rel_path("deps"), false),
287 (rel_path("deps/dep-dir2"), true),
288 (rel_path("deps/dep-dir3"), true),
289 (rel_path("deps/dep-dir3/deps"), true),
290 (rel_path("deps/dep-dir3/src"), true),
291 (rel_path("src"), false),
292 (rel_path("src/a.rs"), false),
293 (rel_path("src/b.rs"), false),
294 ]
295 );
296 });
297 assert_eq!(
298 mem::take(&mut *tree_updates.lock()),
299 &[
300 (rel_path("deps/dep-dir3").into(), PathChange::Loaded),
301 (rel_path("deps/dep-dir3/deps").into(), PathChange::Loaded),
302 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded)
303 ]
304 );
305
306 // Expand a subdirectory of one of the symlinked directories.
307 tree.read_with(cx, |tree, _| {
308 tree.as_local()
309 .unwrap()
310 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3/src").into()])
311 })
312 .recv()
313 .await;
314
315 // The expanded subdirectory's contents are loaded.
316 tree.read_with(cx, |tree, _| {
317 assert_eq!(
318 tree.entries(true, 0)
319 .map(|entry| (entry.path.as_ref(), entry.is_external))
320 .collect::<Vec<_>>(),
321 vec![
322 (rel_path(""), false),
323 (rel_path("deps"), false),
324 (rel_path("deps/dep-dir2"), true),
325 (rel_path("deps/dep-dir3"), true),
326 (rel_path("deps/dep-dir3/deps"), true),
327 (rel_path("deps/dep-dir3/src"), true),
328 (rel_path("deps/dep-dir3/src/e.rs"), true),
329 (rel_path("deps/dep-dir3/src/f.rs"), true),
330 (rel_path("src"), false),
331 (rel_path("src/a.rs"), false),
332 (rel_path("src/b.rs"), false),
333 ]
334 );
335 });
336
337 assert_eq!(
338 mem::take(&mut *tree_updates.lock()),
339 &[
340 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded),
341 (
342 rel_path("deps/dep-dir3/src/e.rs").into(),
343 PathChange::Loaded
344 ),
345 (
346 rel_path("deps/dep-dir3/src/f.rs").into(),
347 PathChange::Loaded
348 )
349 ]
350 );
351}
352
353#[cfg(target_os = "macos")]
354#[gpui::test]
355async fn test_renaming_case_only(cx: &mut TestAppContext) {
356 cx.executor().allow_parking();
357 init_test(cx);
358
359 const OLD_NAME: &str = "aaa.rs";
360 const NEW_NAME: &str = "AAA.rs";
361
362 let fs = Arc::new(RealFs::new(None, cx.executor()));
363 let temp_root = TempTree::new(json!({
364 OLD_NAME: "",
365 }));
366
367 let tree = Worktree::local(
368 temp_root.path(),
369 true,
370 fs.clone(),
371 Default::default(),
372 true,
373 WorktreeId::from_proto(0),
374 &mut cx.to_async(),
375 )
376 .await
377 .unwrap();
378
379 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
380 .await;
381 tree.read_with(cx, |tree, _| {
382 assert_eq!(
383 tree.entries(true, 0)
384 .map(|entry| entry.path.as_ref())
385 .collect::<Vec<_>>(),
386 vec![rel_path(""), rel_path(OLD_NAME)]
387 );
388 });
389
390 fs.rename(
391 &temp_root.path().join(OLD_NAME),
392 &temp_root.path().join(NEW_NAME),
393 fs::RenameOptions {
394 overwrite: true,
395 ignore_if_exists: true,
396 create_parents: false,
397 },
398 )
399 .await
400 .unwrap();
401
402 tree.flush_fs_events(cx).await;
403
404 tree.read_with(cx, |tree, _| {
405 assert_eq!(
406 tree.entries(true, 0)
407 .map(|entry| entry.path.as_ref())
408 .collect::<Vec<_>>(),
409 vec![rel_path(""), rel_path(NEW_NAME)]
410 );
411 });
412}
413
414#[gpui::test]
415async fn test_root_rescan_reconciles_stale_state(cx: &mut TestAppContext) {
416 init_test(cx);
417 let fs = FakeFs::new(cx.background_executor.clone());
418 fs.insert_tree(
419 "/root",
420 json!({
421 "old.txt": "",
422 }),
423 )
424 .await;
425
426 let tree = Worktree::local(
427 Path::new("/root"),
428 true,
429 fs.clone(),
430 Default::default(),
431 true,
432 WorktreeId::from_proto(0),
433 &mut cx.to_async(),
434 )
435 .await
436 .unwrap();
437
438 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
439 .await;
440
441 tree.read_with(cx, |tree, _| {
442 assert_eq!(
443 tree.entries(true, 0)
444 .map(|entry| entry.path.as_ref())
445 .collect::<Vec<_>>(),
446 vec![rel_path(""), rel_path("old.txt")]
447 );
448 });
449
450 fs.pause_events();
451 fs.remove_file(Path::new("/root/old.txt"), RemoveOptions::default())
452 .await
453 .unwrap();
454 fs.insert_file(Path::new("/root/new.txt"), Vec::new()).await;
455 assert_eq!(fs.buffered_event_count(), 2);
456 fs.clear_buffered_events();
457
458 tree.read_with(cx, |tree, _| {
459 assert!(tree.entry_for_path(rel_path("old.txt")).is_some());
460 assert!(tree.entry_for_path(rel_path("new.txt")).is_none());
461 });
462
463 fs.emit_fs_event("/root", Some(fs::PathEventKind::Rescan));
464 fs.unpause_events_and_flush();
465 tree.flush_fs_events(cx).await;
466
467 tree.read_with(cx, |tree, _| {
468 assert!(tree.entry_for_path(rel_path("old.txt")).is_none());
469 assert!(tree.entry_for_path(rel_path("new.txt")).is_some());
470 assert_eq!(
471 tree.entries(true, 0)
472 .map(|entry| entry.path.as_ref())
473 .collect::<Vec<_>>(),
474 vec![rel_path(""), rel_path("new.txt")]
475 );
476 });
477}
478
479#[gpui::test]
480async fn test_subtree_rescan_reports_unchanged_descendants_as_updated(cx: &mut TestAppContext) {
481 init_test(cx);
482 let fs = FakeFs::new(cx.background_executor.clone());
483 fs.insert_tree(
484 "/root",
485 json!({
486 "dir": {
487 "child.txt": "",
488 "nested": {
489 "grandchild.txt": "",
490 },
491 "remove": {
492 "removed.txt": "",
493 }
494 },
495 "other.txt": "",
496 }),
497 )
498 .await;
499
500 let tree = Worktree::local(
501 Path::new("/root"),
502 true,
503 fs.clone(),
504 Default::default(),
505 true,
506 WorktreeId::from_proto(0),
507 &mut cx.to_async(),
508 )
509 .await
510 .unwrap();
511
512 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
513 .await;
514
515 let tree_updates = Arc::new(Mutex::new(Vec::new()));
516 tree.update(cx, |_, cx| {
517 let tree_updates = tree_updates.clone();
518 cx.subscribe(&tree, move |_, _, event, _| {
519 if let Event::UpdatedEntries(update) = event {
520 tree_updates.lock().extend(
521 update
522 .iter()
523 .filter(|(path, _, _)| path.as_ref() != rel_path("fs-event-sentinel"))
524 .map(|(path, _, change)| (path.clone(), *change)),
525 );
526 }
527 })
528 .detach();
529 });
530 fs.pause_events();
531 fs.insert_file("/root/dir/new.txt", b"new content".to_vec())
532 .await;
533 fs.remove_dir(
534 "/root/dir/remove".as_ref(),
535 RemoveOptions {
536 recursive: true,
537 ignore_if_not_exists: false,
538 },
539 )
540 .await
541 .unwrap();
542 fs.clear_buffered_events();
543 fs.unpause_events_and_flush();
544
545 fs.emit_fs_event("/root/dir", Some(fs::PathEventKind::Rescan));
546 tree.flush_fs_events(cx).await;
547
548 assert_eq!(
549 mem::take(&mut *tree_updates.lock()),
550 &[
551 (rel_path("dir").into(), PathChange::Updated),
552 (rel_path("dir/child.txt").into(), PathChange::Updated),
553 (rel_path("dir/nested").into(), PathChange::Updated),
554 (
555 rel_path("dir/nested/grandchild.txt").into(),
556 PathChange::Updated
557 ),
558 (rel_path("dir/new.txt").into(), PathChange::Added),
559 (rel_path("dir/remove").into(), PathChange::Removed),
560 (
561 rel_path("dir/remove/removed.txt").into(),
562 PathChange::Removed
563 ),
564 ]
565 );
566
567 tree.read_with(cx, |tree, _| {
568 assert!(tree.entry_for_path(rel_path("other.txt")).is_some());
569 });
570}
571
572#[gpui::test]
573async fn test_open_gitignored_files(cx: &mut TestAppContext) {
574 init_test(cx);
575 let fs = FakeFs::new(cx.background_executor.clone());
576 fs.insert_tree(
577 "/root",
578 json!({
579 ".gitignore": "node_modules\n",
580 "one": {
581 "node_modules": {
582 "a": {
583 "a1.js": "a1",
584 "a2.js": "a2",
585 },
586 "b": {
587 "b1.js": "b1",
588 "b2.js": "b2",
589 },
590 "c": {
591 "c1.js": "c1",
592 "c2.js": "c2",
593 }
594 },
595 },
596 "two": {
597 "x.js": "",
598 "y.js": "",
599 },
600 }),
601 )
602 .await;
603
604 let tree = Worktree::local(
605 Path::new("/root"),
606 true,
607 fs.clone(),
608 Default::default(),
609 true,
610 WorktreeId::from_proto(0),
611 &mut cx.to_async(),
612 )
613 .await
614 .unwrap();
615
616 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
617 .await;
618
619 tree.read_with(cx, |tree, _| {
620 assert_eq!(
621 tree.entries(true, 0)
622 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
623 .collect::<Vec<_>>(),
624 vec![
625 (rel_path(""), false),
626 (rel_path(".gitignore"), false),
627 (rel_path("one"), false),
628 (rel_path("one/node_modules"), true),
629 (rel_path("two"), false),
630 (rel_path("two/x.js"), false),
631 (rel_path("two/y.js"), false),
632 ]
633 );
634 });
635
636 // Open a file that is nested inside of a gitignored directory that
637 // has not yet been expanded.
638 let prev_read_dir_count = fs.read_dir_call_count();
639 let loaded = tree
640 .update(cx, |tree, cx| {
641 tree.load_file(rel_path("one/node_modules/b/b1.js"), cx)
642 })
643 .await
644 .unwrap();
645
646 tree.read_with(cx, |tree, _| {
647 assert_eq!(
648 tree.entries(true, 0)
649 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
650 .collect::<Vec<_>>(),
651 vec![
652 (rel_path(""), false),
653 (rel_path(".gitignore"), false),
654 (rel_path("one"), false),
655 (rel_path("one/node_modules"), true),
656 (rel_path("one/node_modules/a"), true),
657 (rel_path("one/node_modules/b"), true),
658 (rel_path("one/node_modules/b/b1.js"), true),
659 (rel_path("one/node_modules/b/b2.js"), true),
660 (rel_path("one/node_modules/c"), true),
661 (rel_path("two"), false),
662 (rel_path("two/x.js"), false),
663 (rel_path("two/y.js"), false),
664 ]
665 );
666
667 assert_eq!(
668 loaded.file.path.as_ref(),
669 rel_path("one/node_modules/b/b1.js")
670 );
671
672 // Only the newly-expanded directories are scanned.
673 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
674 });
675
676 // Open another file in a different subdirectory of the same
677 // gitignored directory.
678 let prev_read_dir_count = fs.read_dir_call_count();
679 let loaded = tree
680 .update(cx, |tree, cx| {
681 tree.load_file(rel_path("one/node_modules/a/a2.js"), cx)
682 })
683 .await
684 .unwrap();
685
686 tree.read_with(cx, |tree, _| {
687 assert_eq!(
688 tree.entries(true, 0)
689 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
690 .collect::<Vec<_>>(),
691 vec![
692 (rel_path(""), false),
693 (rel_path(".gitignore"), false),
694 (rel_path("one"), false),
695 (rel_path("one/node_modules"), true),
696 (rel_path("one/node_modules/a"), true),
697 (rel_path("one/node_modules/a/a1.js"), true),
698 (rel_path("one/node_modules/a/a2.js"), true),
699 (rel_path("one/node_modules/b"), true),
700 (rel_path("one/node_modules/b/b1.js"), true),
701 (rel_path("one/node_modules/b/b2.js"), true),
702 (rel_path("one/node_modules/c"), true),
703 (rel_path("two"), false),
704 (rel_path("two/x.js"), false),
705 (rel_path("two/y.js"), false),
706 ]
707 );
708
709 assert_eq!(
710 loaded.file.path.as_ref(),
711 rel_path("one/node_modules/a/a2.js")
712 );
713
714 // Only the newly-expanded directory is scanned.
715 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
716 });
717
718 let path = PathBuf::from("/root/one/node_modules/c/lib");
719
720 // No work happens when files and directories change within an unloaded directory.
721 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
722 // When we open a directory, we check each ancestor whether it's a git
723 // repository. That means we have an fs.metadata call per ancestor that we
724 // need to subtract here.
725 let ancestors = path.ancestors().count();
726
727 fs.create_dir(path.as_ref()).await.unwrap();
728 cx.executor().run_until_parked();
729
730 assert_eq!(
731 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count - ancestors,
732 0
733 );
734}
735
736#[gpui::test]
737async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
738 init_test(cx);
739 let fs = FakeFs::new(cx.background_executor.clone());
740 fs.insert_tree(
741 "/root",
742 json!({
743 ".gitignore": "node_modules\n",
744 "a": {
745 "a.js": "",
746 },
747 "b": {
748 "b.js": "",
749 },
750 "node_modules": {
751 "c": {
752 "c.js": "",
753 },
754 "d": {
755 "d.js": "",
756 "e": {
757 "e1.js": "",
758 "e2.js": "",
759 },
760 "f": {
761 "f1.js": "",
762 "f2.js": "",
763 }
764 },
765 },
766 }),
767 )
768 .await;
769
770 let tree = Worktree::local(
771 Path::new("/root"),
772 true,
773 fs.clone(),
774 Default::default(),
775 true,
776 WorktreeId::from_proto(0),
777 &mut cx.to_async(),
778 )
779 .await
780 .unwrap();
781
782 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
783 .await;
784
785 // Open a file within the gitignored directory, forcing some of its
786 // subdirectories to be read, but not all.
787 let read_dir_count_1 = fs.read_dir_call_count();
788 tree.read_with(cx, |tree, _| {
789 tree.as_local()
790 .unwrap()
791 .refresh_entries_for_paths(vec![rel_path("node_modules/d/d.js").into()])
792 })
793 .recv()
794 .await;
795
796 // Those subdirectories are now loaded.
797 tree.read_with(cx, |tree, _| {
798 assert_eq!(
799 tree.entries(true, 0)
800 .map(|e| (e.path.as_ref(), e.is_ignored))
801 .collect::<Vec<_>>(),
802 &[
803 (rel_path(""), false),
804 (rel_path(".gitignore"), false),
805 (rel_path("a"), false),
806 (rel_path("a/a.js"), false),
807 (rel_path("b"), false),
808 (rel_path("b/b.js"), false),
809 (rel_path("node_modules"), true),
810 (rel_path("node_modules/c"), true),
811 (rel_path("node_modules/d"), true),
812 (rel_path("node_modules/d/d.js"), true),
813 (rel_path("node_modules/d/e"), true),
814 (rel_path("node_modules/d/f"), true),
815 ]
816 );
817 });
818 let read_dir_count_2 = fs.read_dir_call_count();
819 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
820
821 // Update the gitignore so that node_modules is no longer ignored,
822 // but a subdirectory is ignored
823 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
824 .await
825 .unwrap();
826 cx.executor().run_until_parked();
827
828 // All of the directories that are no longer ignored are now loaded.
829 tree.read_with(cx, |tree, _| {
830 assert_eq!(
831 tree.entries(true, 0)
832 .map(|e| (e.path.as_ref(), e.is_ignored))
833 .collect::<Vec<_>>(),
834 &[
835 (rel_path(""), false),
836 (rel_path(".gitignore"), false),
837 (rel_path("a"), false),
838 (rel_path("a/a.js"), false),
839 (rel_path("b"), false),
840 (rel_path("b/b.js"), false),
841 // This directory is no longer ignored
842 (rel_path("node_modules"), false),
843 (rel_path("node_modules/c"), false),
844 (rel_path("node_modules/c/c.js"), false),
845 (rel_path("node_modules/d"), false),
846 (rel_path("node_modules/d/d.js"), false),
847 // This subdirectory is now ignored
848 (rel_path("node_modules/d/e"), true),
849 (rel_path("node_modules/d/f"), false),
850 (rel_path("node_modules/d/f/f1.js"), false),
851 (rel_path("node_modules/d/f/f2.js"), false),
852 ]
853 );
854 });
855
856 // Each of the newly-loaded directories is scanned only once.
857 let read_dir_count_3 = fs.read_dir_call_count();
858 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
859}
860
861#[gpui::test]
862async fn test_write_file(cx: &mut TestAppContext) {
863 init_test(cx);
864 cx.executor().allow_parking();
865 let dir = TempTree::new(json!({
866 ".git": {},
867 ".gitignore": "ignored-dir\n",
868 "tracked-dir": {},
869 "ignored-dir": {}
870 }));
871
872 let worktree = Worktree::local(
873 dir.path(),
874 true,
875 Arc::new(RealFs::new(None, cx.executor())),
876 Default::default(),
877 true,
878 WorktreeId::from_proto(0),
879 &mut cx.to_async(),
880 )
881 .await
882 .unwrap();
883
884 #[cfg(not(target_os = "macos"))]
885 fs::fs_watcher::global(|_| {}).unwrap();
886
887 cx.read(|cx| worktree.read(cx).as_local().unwrap().scan_complete())
888 .await;
889 worktree.flush_fs_events(cx).await;
890
891 worktree
892 .update(cx, |tree, cx| {
893 tree.write_file(
894 rel_path("tracked-dir/file.txt").into(),
895 "hello".into(),
896 Default::default(),
897 encoding_rs::UTF_8,
898 false,
899 cx,
900 )
901 })
902 .await
903 .unwrap();
904 worktree
905 .update(cx, |tree, cx| {
906 tree.write_file(
907 rel_path("ignored-dir/file.txt").into(),
908 "world".into(),
909 Default::default(),
910 encoding_rs::UTF_8,
911 false,
912 cx,
913 )
914 })
915 .await
916 .unwrap();
917 worktree.read_with(cx, |tree, _| {
918 let tracked = tree
919 .entry_for_path(rel_path("tracked-dir/file.txt"))
920 .unwrap();
921 let ignored = tree
922 .entry_for_path(rel_path("ignored-dir/file.txt"))
923 .unwrap();
924 assert!(!tracked.is_ignored);
925 assert!(ignored.is_ignored);
926 });
927}
928
929#[gpui::test]
930async fn test_file_scan_inclusions(cx: &mut TestAppContext) {
931 init_test(cx);
932 cx.executor().allow_parking();
933 let dir = TempTree::new(json!({
934 ".gitignore": "**/target\n/node_modules\ntop_level.txt\n",
935 "target": {
936 "index": "blah2"
937 },
938 "node_modules": {
939 ".DS_Store": "",
940 "prettier": {
941 "package.json": "{}",
942 },
943 "package.json": "//package.json"
944 },
945 "src": {
946 ".DS_Store": "",
947 "foo": {
948 "foo.rs": "mod another;\n",
949 "another.rs": "// another",
950 },
951 "bar": {
952 "bar.rs": "// bar",
953 },
954 "lib.rs": "mod foo;\nmod bar;\n",
955 },
956 "top_level.txt": "top level file",
957 ".DS_Store": "",
958 }));
959 cx.update(|cx| {
960 cx.update_global::<SettingsStore, _>(|store, cx| {
961 store.update_user_settings(cx, |settings| {
962 settings.project.worktree.file_scan_exclusions = Some(vec![]);
963 settings.project.worktree.file_scan_inclusions = Some(vec![
964 "node_modules/**/package.json".to_string(),
965 "**/.DS_Store".to_string(),
966 ]);
967 });
968 });
969 });
970
971 let tree = Worktree::local(
972 dir.path(),
973 true,
974 Arc::new(RealFs::new(None, cx.executor())),
975 Default::default(),
976 true,
977 WorktreeId::from_proto(0),
978 &mut cx.to_async(),
979 )
980 .await
981 .unwrap();
982 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
983 .await;
984 tree.flush_fs_events(cx).await;
985 tree.read_with(cx, |tree, _| {
986 // Assert that file_scan_inclusions overrides file_scan_exclusions.
987 check_worktree_entries(
988 tree,
989 &[],
990 &["target", "node_modules"],
991 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
992 &[
993 "node_modules/prettier/package.json",
994 ".DS_Store",
995 "node_modules/.DS_Store",
996 "src/.DS_Store",
997 ],
998 )
999 });
1000}
1001
1002#[gpui::test]
1003async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext) {
1004 init_test(cx);
1005 cx.executor().allow_parking();
1006 let dir = TempTree::new(json!({
1007 ".gitignore": "**/target\n/node_modules\n",
1008 "target": {
1009 "index": "blah2"
1010 },
1011 "node_modules": {
1012 ".DS_Store": "",
1013 "prettier": {
1014 "package.json": "{}",
1015 },
1016 },
1017 "src": {
1018 ".DS_Store": "",
1019 "foo": {
1020 "foo.rs": "mod another;\n",
1021 "another.rs": "// another",
1022 },
1023 },
1024 ".DS_Store": "",
1025 }));
1026
1027 cx.update(|cx| {
1028 cx.update_global::<SettingsStore, _>(|store, cx| {
1029 store.update_user_settings(cx, |settings| {
1030 settings.project.worktree.file_scan_exclusions =
1031 Some(vec!["**/.DS_Store".to_string()]);
1032 settings.project.worktree.file_scan_inclusions =
1033 Some(vec!["**/.DS_Store".to_string()]);
1034 });
1035 });
1036 });
1037
1038 let tree = Worktree::local(
1039 dir.path(),
1040 true,
1041 Arc::new(RealFs::new(None, cx.executor())),
1042 Default::default(),
1043 true,
1044 WorktreeId::from_proto(0),
1045 &mut cx.to_async(),
1046 )
1047 .await
1048 .unwrap();
1049 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1050 .await;
1051 tree.flush_fs_events(cx).await;
1052 tree.read_with(cx, |tree, _| {
1053 // Assert that file_scan_inclusions overrides file_scan_exclusions.
1054 check_worktree_entries(
1055 tree,
1056 &[".DS_Store, src/.DS_Store"],
1057 &["target", "node_modules"],
1058 &["src/foo/another.rs", "src/foo/foo.rs", ".gitignore"],
1059 &[],
1060 )
1061 });
1062}
1063
1064#[gpui::test]
1065async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppContext) {
1066 init_test(cx);
1067 cx.executor().allow_parking();
1068 let dir = TempTree::new(json!({
1069 ".gitignore": "**/target\n/node_modules/\n",
1070 "target": {
1071 "index": "blah2"
1072 },
1073 "node_modules": {
1074 ".DS_Store": "",
1075 "prettier": {
1076 "package.json": "{}",
1077 },
1078 },
1079 "src": {
1080 ".DS_Store": "",
1081 "foo": {
1082 "foo.rs": "mod another;\n",
1083 "another.rs": "// another",
1084 },
1085 },
1086 ".DS_Store": "",
1087 }));
1088
1089 cx.update(|cx| {
1090 cx.update_global::<SettingsStore, _>(|store, cx| {
1091 store.update_user_settings(cx, |settings| {
1092 settings.project.worktree.file_scan_exclusions = Some(vec![]);
1093 settings.project.worktree.file_scan_inclusions =
1094 Some(vec!["node_modules/**".to_string()]);
1095 });
1096 });
1097 });
1098 let tree = Worktree::local(
1099 dir.path(),
1100 true,
1101 Arc::new(RealFs::new(None, cx.executor())),
1102 Default::default(),
1103 true,
1104 WorktreeId::from_proto(0),
1105 &mut cx.to_async(),
1106 )
1107 .await
1108 .unwrap();
1109 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1110 .await;
1111 tree.flush_fs_events(cx).await;
1112
1113 tree.read_with(cx, |tree, _| {
1114 assert!(
1115 tree.entry_for_path(rel_path("node_modules"))
1116 .is_some_and(|f| f.is_always_included)
1117 );
1118 assert!(
1119 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
1120 .is_some_and(|f| f.is_always_included)
1121 );
1122 });
1123
1124 cx.update(|cx| {
1125 cx.update_global::<SettingsStore, _>(|store, cx| {
1126 store.update_user_settings(cx, |settings| {
1127 settings.project.worktree.file_scan_exclusions = Some(vec![]);
1128 settings.project.worktree.file_scan_inclusions = Some(vec![]);
1129 });
1130 });
1131 });
1132 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1133 .await;
1134 tree.flush_fs_events(cx).await;
1135
1136 tree.read_with(cx, |tree, _| {
1137 assert!(
1138 tree.entry_for_path(rel_path("node_modules"))
1139 .is_some_and(|f| !f.is_always_included)
1140 );
1141 assert!(
1142 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
1143 .is_some_and(|f| !f.is_always_included)
1144 );
1145 });
1146}
1147
1148#[gpui::test]
1149async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
1150 init_test(cx);
1151 cx.executor().allow_parking();
1152 let dir = TempTree::new(json!({
1153 ".gitignore": "**/target\n/node_modules\n",
1154 "target": {
1155 "index": "blah2"
1156 },
1157 "node_modules": {
1158 ".DS_Store": "",
1159 "prettier": {
1160 "package.json": "{}",
1161 },
1162 },
1163 "src": {
1164 ".DS_Store": "",
1165 "foo": {
1166 "foo.rs": "mod another;\n",
1167 "another.rs": "// another",
1168 },
1169 "bar": {
1170 "bar.rs": "// bar",
1171 },
1172 "lib.rs": "mod foo;\nmod bar;\n",
1173 },
1174 ".DS_Store": "",
1175 }));
1176 cx.update(|cx| {
1177 cx.update_global::<SettingsStore, _>(|store, cx| {
1178 store.update_user_settings(cx, |settings| {
1179 settings.project.worktree.file_scan_exclusions =
1180 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
1181 });
1182 });
1183 });
1184
1185 let tree = Worktree::local(
1186 dir.path(),
1187 true,
1188 Arc::new(RealFs::new(None, cx.executor())),
1189 Default::default(),
1190 true,
1191 WorktreeId::from_proto(0),
1192 &mut cx.to_async(),
1193 )
1194 .await
1195 .unwrap();
1196 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1197 .await;
1198 tree.flush_fs_events(cx).await;
1199 tree.read_with(cx, |tree, _| {
1200 check_worktree_entries(
1201 tree,
1202 &[
1203 "src/foo/foo.rs",
1204 "src/foo/another.rs",
1205 "node_modules/.DS_Store",
1206 "src/.DS_Store",
1207 ".DS_Store",
1208 ],
1209 &["target", "node_modules"],
1210 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
1211 &[],
1212 )
1213 });
1214
1215 cx.update(|cx| {
1216 cx.update_global::<SettingsStore, _>(|store, cx| {
1217 store.update_user_settings(cx, |settings| {
1218 settings.project.worktree.file_scan_exclusions =
1219 Some(vec!["**/node_modules/**".to_string()]);
1220 });
1221 });
1222 });
1223 tree.flush_fs_events(cx).await;
1224 cx.executor().run_until_parked();
1225 tree.read_with(cx, |tree, _| {
1226 check_worktree_entries(
1227 tree,
1228 &[
1229 "node_modules/prettier/package.json",
1230 "node_modules/.DS_Store",
1231 "node_modules",
1232 ],
1233 &["target"],
1234 &[
1235 ".gitignore",
1236 "src/lib.rs",
1237 "src/bar/bar.rs",
1238 "src/foo/foo.rs",
1239 "src/foo/another.rs",
1240 "src/.DS_Store",
1241 ".DS_Store",
1242 ],
1243 &[],
1244 )
1245 });
1246}
1247
1248#[gpui::test]
1249async fn test_hidden_files(cx: &mut TestAppContext) {
1250 init_test(cx);
1251 cx.executor().allow_parking();
1252 let dir = TempTree::new(json!({
1253 ".gitignore": "**/target\n",
1254 ".hidden_file": "content",
1255 ".hidden_dir": {
1256 "nested.rs": "code",
1257 },
1258 "src": {
1259 "visible.rs": "code",
1260 },
1261 "logs": {
1262 "app.log": "logs",
1263 "debug.log": "logs",
1264 },
1265 "visible.txt": "content",
1266 }));
1267
1268 let tree = Worktree::local(
1269 dir.path(),
1270 true,
1271 Arc::new(RealFs::new(None, cx.executor())),
1272 Default::default(),
1273 true,
1274 WorktreeId::from_proto(0),
1275 &mut cx.to_async(),
1276 )
1277 .await
1278 .unwrap();
1279 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1280 .await;
1281 tree.flush_fs_events(cx).await;
1282
1283 tree.read_with(cx, |tree, _| {
1284 assert_eq!(
1285 tree.entries(true, 0)
1286 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1287 .collect::<Vec<_>>(),
1288 vec![
1289 (rel_path(""), false),
1290 (rel_path(".gitignore"), true),
1291 (rel_path(".hidden_dir"), true),
1292 (rel_path(".hidden_dir/nested.rs"), true),
1293 (rel_path(".hidden_file"), true),
1294 (rel_path("logs"), false),
1295 (rel_path("logs/app.log"), false),
1296 (rel_path("logs/debug.log"), false),
1297 (rel_path("src"), false),
1298 (rel_path("src/visible.rs"), false),
1299 (rel_path("visible.txt"), false),
1300 ]
1301 );
1302 });
1303
1304 cx.update(|cx| {
1305 cx.update_global::<SettingsStore, _>(|store, cx| {
1306 store.update_user_settings(cx, |settings| {
1307 settings.project.worktree.hidden_files = Some(vec!["**/*.log".to_string()]);
1308 });
1309 });
1310 });
1311 tree.flush_fs_events(cx).await;
1312 cx.executor().run_until_parked();
1313
1314 tree.read_with(cx, |tree, _| {
1315 assert_eq!(
1316 tree.entries(true, 0)
1317 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1318 .collect::<Vec<_>>(),
1319 vec![
1320 (rel_path(""), false),
1321 (rel_path(".gitignore"), false),
1322 (rel_path(".hidden_dir"), false),
1323 (rel_path(".hidden_dir/nested.rs"), false),
1324 (rel_path(".hidden_file"), false),
1325 (rel_path("logs"), false),
1326 (rel_path("logs/app.log"), true),
1327 (rel_path("logs/debug.log"), true),
1328 (rel_path("src"), false),
1329 (rel_path("src/visible.rs"), false),
1330 (rel_path("visible.txt"), false),
1331 ]
1332 );
1333 });
1334}
1335
1336#[gpui::test]
1337async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
1338 init_test(cx);
1339 cx.executor().allow_parking();
1340 let dir = TempTree::new(json!({
1341 ".git": {
1342 "HEAD": "ref: refs/heads/main\n",
1343 "foo": "bar",
1344 },
1345 ".gitignore": "**/target\n/node_modules\ntest_output\n",
1346 "target": {
1347 "index": "blah2"
1348 },
1349 "node_modules": {
1350 ".DS_Store": "",
1351 "prettier": {
1352 "package.json": "{}",
1353 },
1354 },
1355 "src": {
1356 ".DS_Store": "",
1357 "foo": {
1358 "foo.rs": "mod another;\n",
1359 "another.rs": "// another",
1360 },
1361 "bar": {
1362 "bar.rs": "// bar",
1363 },
1364 "lib.rs": "mod foo;\nmod bar;\n",
1365 },
1366 ".DS_Store": "",
1367 }));
1368 cx.update(|cx| {
1369 cx.update_global::<SettingsStore, _>(|store, cx| {
1370 store.update_user_settings(cx, |settings| {
1371 settings.project.worktree.file_scan_exclusions = Some(vec![
1372 "**/.git".to_string(),
1373 "node_modules/".to_string(),
1374 "build_output".to_string(),
1375 ]);
1376 });
1377 });
1378 });
1379
1380 let tree = Worktree::local(
1381 dir.path(),
1382 true,
1383 Arc::new(RealFs::new(None, cx.executor())),
1384 Default::default(),
1385 true,
1386 WorktreeId::from_proto(0),
1387 &mut cx.to_async(),
1388 )
1389 .await
1390 .unwrap();
1391 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1392 .await;
1393 tree.flush_fs_events(cx).await;
1394 tree.read_with(cx, |tree, _| {
1395 check_worktree_entries(
1396 tree,
1397 &[
1398 ".git/HEAD",
1399 ".git/foo",
1400 "node_modules",
1401 "node_modules/.DS_Store",
1402 "node_modules/prettier",
1403 "node_modules/prettier/package.json",
1404 ],
1405 &["target"],
1406 &[
1407 ".DS_Store",
1408 "src/.DS_Store",
1409 "src/lib.rs",
1410 "src/foo/foo.rs",
1411 "src/foo/another.rs",
1412 "src/bar/bar.rs",
1413 ".gitignore",
1414 ],
1415 &[],
1416 )
1417 });
1418
1419 let new_excluded_dir = dir.path().join("build_output");
1420 let new_ignored_dir = dir.path().join("test_output");
1421 std::fs::create_dir_all(&new_excluded_dir)
1422 .unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
1423 std::fs::create_dir_all(&new_ignored_dir)
1424 .unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
1425 let node_modules_dir = dir.path().join("node_modules");
1426 let dot_git_dir = dir.path().join(".git");
1427 let src_dir = dir.path().join("src");
1428 for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
1429 assert!(
1430 existing_dir.is_dir(),
1431 "Expect {existing_dir:?} to be present in the FS already"
1432 );
1433 }
1434
1435 for directory_for_new_file in [
1436 new_excluded_dir,
1437 new_ignored_dir,
1438 node_modules_dir,
1439 dot_git_dir,
1440 src_dir,
1441 ] {
1442 std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
1443 .unwrap_or_else(|e| {
1444 panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
1445 });
1446 }
1447 tree.flush_fs_events(cx).await;
1448
1449 tree.read_with(cx, |tree, _| {
1450 check_worktree_entries(
1451 tree,
1452 &[
1453 ".git/HEAD",
1454 ".git/foo",
1455 ".git/new_file",
1456 "node_modules",
1457 "node_modules/.DS_Store",
1458 "node_modules/prettier",
1459 "node_modules/prettier/package.json",
1460 "node_modules/new_file",
1461 "build_output",
1462 "build_output/new_file",
1463 "test_output/new_file",
1464 ],
1465 &["target", "test_output"],
1466 &[
1467 ".DS_Store",
1468 "src/.DS_Store",
1469 "src/lib.rs",
1470 "src/foo/foo.rs",
1471 "src/foo/another.rs",
1472 "src/bar/bar.rs",
1473 "src/new_file",
1474 ".gitignore",
1475 ],
1476 &[],
1477 )
1478 });
1479}
1480
1481#[gpui::test]
1482async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
1483 init_test(cx);
1484 cx.executor().allow_parking();
1485 let dir = TempTree::new(json!({
1486 ".git": {
1487 "HEAD": "ref: refs/heads/main\n",
1488 "foo": "foo contents",
1489 },
1490 }));
1491 let dot_git_worktree_dir = dir.path().join(".git");
1492
1493 let tree = Worktree::local(
1494 dot_git_worktree_dir.clone(),
1495 true,
1496 Arc::new(RealFs::new(None, cx.executor())),
1497 Default::default(),
1498 true,
1499 WorktreeId::from_proto(0),
1500 &mut cx.to_async(),
1501 )
1502 .await
1503 .unwrap();
1504 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1505 .await;
1506 tree.flush_fs_events(cx).await;
1507 tree.read_with(cx, |tree, _| {
1508 check_worktree_entries(tree, &[], &["HEAD", "foo"], &[], &[])
1509 });
1510
1511 std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents")
1512 .unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}"));
1513 tree.flush_fs_events(cx).await;
1514 tree.read_with(cx, |tree, _| {
1515 check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[], &[])
1516 });
1517}
1518
1519#[gpui::test(iterations = 30)]
1520async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
1521 init_test(cx);
1522 let fs = FakeFs::new(cx.background_executor.clone());
1523 fs.insert_tree(
1524 "/root",
1525 json!({
1526 "b": {},
1527 "c": {},
1528 "d": {},
1529 }),
1530 )
1531 .await;
1532
1533 let tree = Worktree::local(
1534 "/root".as_ref(),
1535 true,
1536 fs,
1537 Default::default(),
1538 true,
1539 WorktreeId::from_proto(0),
1540 &mut cx.to_async(),
1541 )
1542 .await
1543 .unwrap();
1544
1545 let snapshot1 = tree.update(cx, |tree, cx| {
1546 let tree = tree.as_local_mut().unwrap();
1547 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1548 tree.observe_updates(0, cx, {
1549 let snapshot = snapshot.clone();
1550 let settings = tree.settings();
1551 move |update| {
1552 snapshot
1553 .lock()
1554 .apply_remote_update(update, &settings.file_scan_inclusions);
1555 async { true }
1556 }
1557 });
1558 snapshot
1559 });
1560
1561 let entry = tree
1562 .update(cx, |tree, cx| {
1563 tree.as_local_mut()
1564 .unwrap()
1565 .create_entry(rel_path("a/e").into(), true, None, cx)
1566 })
1567 .await
1568 .unwrap()
1569 .into_included()
1570 .unwrap();
1571 assert!(entry.is_dir());
1572
1573 cx.executor().run_until_parked();
1574 tree.read_with(cx, |tree, _| {
1575 assert_eq!(
1576 tree.entry_for_path(rel_path("a/e")).unwrap().kind,
1577 EntryKind::Dir
1578 );
1579 });
1580
1581 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1582 assert_eq!(
1583 snapshot1.lock().entries(true, 0).collect::<Vec<_>>(),
1584 snapshot2.entries(true, 0).collect::<Vec<_>>()
1585 );
1586}
1587
1588#[gpui::test]
1589async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1590 init_test(cx);
1591 cx.executor().allow_parking();
1592
1593 let fs_fake = FakeFs::new(cx.background_executor.clone());
1594 fs_fake
1595 .insert_tree(
1596 "/root",
1597 json!({
1598 "a": {},
1599 }),
1600 )
1601 .await;
1602
1603 let tree_fake = Worktree::local(
1604 "/root".as_ref(),
1605 true,
1606 fs_fake,
1607 Default::default(),
1608 true,
1609 WorktreeId::from_proto(0),
1610 &mut cx.to_async(),
1611 )
1612 .await
1613 .unwrap();
1614
1615 let entry = tree_fake
1616 .update(cx, |tree, cx| {
1617 tree.as_local_mut().unwrap().create_entry(
1618 rel_path("a/b/c/d.txt").into(),
1619 false,
1620 None,
1621 cx,
1622 )
1623 })
1624 .await
1625 .unwrap()
1626 .into_included()
1627 .unwrap();
1628 assert!(entry.is_file());
1629
1630 cx.executor().run_until_parked();
1631 tree_fake.read_with(cx, |tree, _| {
1632 assert!(
1633 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1634 .unwrap()
1635 .is_file()
1636 );
1637 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1638 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1639 });
1640
1641 let fs_real = Arc::new(RealFs::new(None, cx.executor()));
1642 let temp_root = TempTree::new(json!({
1643 "a": {}
1644 }));
1645
1646 let tree_real = Worktree::local(
1647 temp_root.path(),
1648 true,
1649 fs_real,
1650 Default::default(),
1651 true,
1652 WorktreeId::from_proto(0),
1653 &mut cx.to_async(),
1654 )
1655 .await
1656 .unwrap();
1657
1658 let entry = tree_real
1659 .update(cx, |tree, cx| {
1660 tree.as_local_mut().unwrap().create_entry(
1661 rel_path("a/b/c/d.txt").into(),
1662 false,
1663 None,
1664 cx,
1665 )
1666 })
1667 .await
1668 .unwrap()
1669 .into_included()
1670 .unwrap();
1671 assert!(entry.is_file());
1672
1673 cx.executor().run_until_parked();
1674 tree_real.read_with(cx, |tree, _| {
1675 assert!(
1676 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1677 .unwrap()
1678 .is_file()
1679 );
1680 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1681 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1682 });
1683
1684 // Test smallest change
1685 let entry = tree_real
1686 .update(cx, |tree, cx| {
1687 tree.as_local_mut().unwrap().create_entry(
1688 rel_path("a/b/c/e.txt").into(),
1689 false,
1690 None,
1691 cx,
1692 )
1693 })
1694 .await
1695 .unwrap()
1696 .into_included()
1697 .unwrap();
1698 assert!(entry.is_file());
1699
1700 cx.executor().run_until_parked();
1701 tree_real.read_with(cx, |tree, _| {
1702 assert!(
1703 tree.entry_for_path(rel_path("a/b/c/e.txt"))
1704 .unwrap()
1705 .is_file()
1706 );
1707 });
1708
1709 // Test largest change
1710 let entry = tree_real
1711 .update(cx, |tree, cx| {
1712 tree.as_local_mut().unwrap().create_entry(
1713 rel_path("d/e/f/g.txt").into(),
1714 false,
1715 None,
1716 cx,
1717 )
1718 })
1719 .await
1720 .unwrap()
1721 .into_included()
1722 .unwrap();
1723 assert!(entry.is_file());
1724
1725 cx.executor().run_until_parked();
1726 tree_real.read_with(cx, |tree, _| {
1727 assert!(
1728 tree.entry_for_path(rel_path("d/e/f/g.txt"))
1729 .unwrap()
1730 .is_file()
1731 );
1732 assert!(tree.entry_for_path(rel_path("d/e/f")).unwrap().is_dir());
1733 assert!(tree.entry_for_path(rel_path("d/e")).unwrap().is_dir());
1734 assert!(tree.entry_for_path(rel_path("d")).unwrap().is_dir());
1735 });
1736}
1737
1738#[gpui::test]
1739async fn test_create_file_in_expanded_gitignored_dir(cx: &mut TestAppContext) {
1740 // Tests the behavior of our worktree refresh when a file in a gitignored directory
1741 // is created.
1742 init_test(cx);
1743 let fs = FakeFs::new(cx.background_executor.clone());
1744 fs.insert_tree(
1745 "/root",
1746 json!({
1747 ".gitignore": "ignored_dir\n",
1748 "ignored_dir": {
1749 "existing_file.txt": "existing content",
1750 "another_file.txt": "another content",
1751 },
1752 }),
1753 )
1754 .await;
1755
1756 let tree = Worktree::local(
1757 Path::new("/root"),
1758 true,
1759 fs.clone(),
1760 Default::default(),
1761 true,
1762 WorktreeId::from_proto(0),
1763 &mut cx.to_async(),
1764 )
1765 .await
1766 .unwrap();
1767
1768 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1769 .await;
1770
1771 tree.read_with(cx, |tree, _| {
1772 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1773 assert!(ignored_dir.is_ignored);
1774 assert_eq!(ignored_dir.kind, EntryKind::UnloadedDir);
1775 });
1776
1777 tree.update(cx, |tree, cx| {
1778 tree.load_file(rel_path("ignored_dir/existing_file.txt"), cx)
1779 })
1780 .await
1781 .unwrap();
1782
1783 tree.read_with(cx, |tree, _| {
1784 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1785 assert!(ignored_dir.is_ignored);
1786 assert_eq!(ignored_dir.kind, EntryKind::Dir);
1787
1788 assert!(
1789 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
1790 .is_some()
1791 );
1792 assert!(
1793 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
1794 .is_some()
1795 );
1796 });
1797
1798 let entry = tree
1799 .update(cx, |tree, cx| {
1800 tree.create_entry(rel_path("ignored_dir/new_file.txt").into(), false, None, cx)
1801 })
1802 .await
1803 .unwrap();
1804 assert!(entry.into_included().is_some());
1805
1806 cx.executor().run_until_parked();
1807
1808 tree.read_with(cx, |tree, _| {
1809 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1810 assert!(ignored_dir.is_ignored);
1811 assert_eq!(
1812 ignored_dir.kind,
1813 EntryKind::Dir,
1814 "ignored_dir should still be loaded, not UnloadedDir"
1815 );
1816
1817 assert!(
1818 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
1819 .is_some(),
1820 "existing_file.txt should still be visible"
1821 );
1822 assert!(
1823 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
1824 .is_some(),
1825 "another_file.txt should still be visible"
1826 );
1827 assert!(
1828 tree.entry_for_path(rel_path("ignored_dir/new_file.txt"))
1829 .is_some(),
1830 "new_file.txt should be visible"
1831 );
1832 });
1833}
1834
1835#[gpui::test]
1836async fn test_fs_event_for_gitignored_dir_does_not_lose_contents(cx: &mut TestAppContext) {
1837 // Tests the behavior of our worktree refresh when a directory modification for a gitignored directory
1838 // is triggered.
1839 init_test(cx);
1840 let fs = FakeFs::new(cx.background_executor.clone());
1841 fs.insert_tree(
1842 "/root",
1843 json!({
1844 ".gitignore": "ignored_dir\n",
1845 "ignored_dir": {
1846 "file1.txt": "content1",
1847 "file2.txt": "content2",
1848 },
1849 }),
1850 )
1851 .await;
1852
1853 let tree = Worktree::local(
1854 Path::new("/root"),
1855 true,
1856 fs.clone(),
1857 Default::default(),
1858 true,
1859 WorktreeId::from_proto(0),
1860 &mut cx.to_async(),
1861 )
1862 .await
1863 .unwrap();
1864
1865 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1866 .await;
1867
1868 // Load a file to expand the ignored directory
1869 tree.update(cx, |tree, cx| {
1870 tree.load_file(rel_path("ignored_dir/file1.txt"), cx)
1871 })
1872 .await
1873 .unwrap();
1874
1875 tree.read_with(cx, |tree, _| {
1876 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1877 assert_eq!(ignored_dir.kind, EntryKind::Dir);
1878 assert!(
1879 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
1880 .is_some()
1881 );
1882 assert!(
1883 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
1884 .is_some()
1885 );
1886 });
1887
1888 fs.emit_fs_event("/root/ignored_dir", Some(fs::PathEventKind::Changed));
1889 tree.flush_fs_events(cx).await;
1890
1891 tree.read_with(cx, |tree, _| {
1892 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1893 assert_eq!(
1894 ignored_dir.kind,
1895 EntryKind::Dir,
1896 "ignored_dir should still be loaded (Dir), not UnloadedDir"
1897 );
1898 assert!(
1899 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
1900 .is_some(),
1901 "file1.txt should still be visible after directory fs event"
1902 );
1903 assert!(
1904 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
1905 .is_some(),
1906 "file2.txt should still be visible after directory fs event"
1907 );
1908 });
1909}
1910
1911#[gpui::test(iterations = 100)]
1912async fn test_random_worktree_operations_during_initial_scan(
1913 cx: &mut TestAppContext,
1914 mut rng: StdRng,
1915) {
1916 init_test(cx);
1917 let operations = env::var("OPERATIONS")
1918 .map(|o| o.parse().unwrap())
1919 .unwrap_or(5);
1920 let initial_entries = env::var("INITIAL_ENTRIES")
1921 .map(|o| o.parse().unwrap())
1922 .unwrap_or(20);
1923
1924 let root_dir = Path::new(path!("/test"));
1925 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1926 fs.as_fake().insert_tree(root_dir, json!({})).await;
1927 for _ in 0..initial_entries {
1928 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1929 }
1930 log::info!("generated initial tree");
1931
1932 let worktree = Worktree::local(
1933 root_dir,
1934 true,
1935 fs.clone(),
1936 Default::default(),
1937 true,
1938 WorktreeId::from_proto(0),
1939 &mut cx.to_async(),
1940 )
1941 .await
1942 .unwrap();
1943
1944 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1945 let updates = Arc::new(Mutex::new(Vec::new()));
1946 worktree.update(cx, |tree, cx| {
1947 check_worktree_change_events(tree, cx);
1948
1949 tree.as_local_mut().unwrap().observe_updates(0, cx, {
1950 let updates = updates.clone();
1951 move |update| {
1952 updates.lock().push(update);
1953 async { true }
1954 }
1955 });
1956 });
1957
1958 for _ in 0..operations {
1959 worktree
1960 .update(cx, |worktree, cx| {
1961 randomly_mutate_worktree(worktree, &mut rng, cx)
1962 })
1963 .await
1964 .log_err();
1965 worktree.read_with(cx, |tree, _| {
1966 tree.as_local().unwrap().snapshot().check_invariants(true)
1967 });
1968
1969 if rng.random_bool(0.6) {
1970 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1971 }
1972 }
1973
1974 worktree
1975 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1976 .await;
1977
1978 cx.executor().run_until_parked();
1979
1980 let final_snapshot = worktree.read_with(cx, |tree, _| {
1981 let tree = tree.as_local().unwrap();
1982 let snapshot = tree.snapshot();
1983 snapshot.check_invariants(true);
1984 snapshot
1985 });
1986
1987 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
1988
1989 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1990 let mut updated_snapshot = snapshot.clone();
1991 for update in updates.lock().iter() {
1992 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1993 updated_snapshot
1994 .apply_remote_update(update.clone(), &settings.file_scan_inclusions);
1995 }
1996 }
1997
1998 assert_eq!(
1999 updated_snapshot.entries(true, 0).collect::<Vec<_>>(),
2000 final_snapshot.entries(true, 0).collect::<Vec<_>>(),
2001 "wrong updates after snapshot {i}: {updates:#?}",
2002 );
2003 }
2004}
2005
2006#[gpui::test(iterations = 100)]
2007async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
2008 init_test(cx);
2009 let operations = env::var("OPERATIONS")
2010 .map(|o| o.parse().unwrap())
2011 .unwrap_or(40);
2012 let initial_entries = env::var("INITIAL_ENTRIES")
2013 .map(|o| o.parse().unwrap())
2014 .unwrap_or(20);
2015
2016 let root_dir = Path::new(path!("/test"));
2017 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
2018 fs.as_fake().insert_tree(root_dir, json!({})).await;
2019 for _ in 0..initial_entries {
2020 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
2021 }
2022 log::info!("generated initial tree");
2023
2024 let worktree = Worktree::local(
2025 root_dir,
2026 true,
2027 fs.clone(),
2028 Default::default(),
2029 true,
2030 WorktreeId::from_proto(0),
2031 &mut cx.to_async(),
2032 )
2033 .await
2034 .unwrap();
2035
2036 let updates = Arc::new(Mutex::new(Vec::new()));
2037 worktree.update(cx, |tree, cx| {
2038 check_worktree_change_events(tree, cx);
2039
2040 tree.as_local_mut().unwrap().observe_updates(0, cx, {
2041 let updates = updates.clone();
2042 move |update| {
2043 updates.lock().push(update);
2044 async { true }
2045 }
2046 });
2047 });
2048
2049 worktree
2050 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
2051 .await;
2052
2053 fs.as_fake().pause_events();
2054 let mut snapshots = Vec::new();
2055 let mut mutations_len = operations;
2056 while mutations_len > 1 {
2057 if rng.random_bool(0.2) {
2058 worktree
2059 .update(cx, |worktree, cx| {
2060 randomly_mutate_worktree(worktree, &mut rng, cx)
2061 })
2062 .await
2063 .log_err();
2064 } else {
2065 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
2066 }
2067
2068 let buffered_event_count = fs.as_fake().buffered_event_count();
2069 if buffered_event_count > 0 && rng.random_bool(0.3) {
2070 let len = rng.random_range(0..=buffered_event_count);
2071 log::info!("flushing {} events", len);
2072 fs.as_fake().flush_events(len);
2073 } else {
2074 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
2075 mutations_len -= 1;
2076 }
2077
2078 cx.executor().run_until_parked();
2079 if rng.random_bool(0.2) {
2080 log::info!("storing snapshot {}", snapshots.len());
2081 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2082 snapshots.push(snapshot);
2083 }
2084 }
2085
2086 log::info!("quiescing");
2087 fs.as_fake().flush_events(usize::MAX);
2088 cx.executor().run_until_parked();
2089
2090 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2091 snapshot.check_invariants(true);
2092 let expanded_paths = snapshot
2093 .expanded_entries()
2094 .map(|e| e.path.clone())
2095 .collect::<Vec<_>>();
2096
2097 {
2098 let new_worktree = Worktree::local(
2099 root_dir,
2100 true,
2101 fs.clone(),
2102 Default::default(),
2103 true,
2104 WorktreeId::from_proto(0),
2105 &mut cx.to_async(),
2106 )
2107 .await
2108 .unwrap();
2109 new_worktree
2110 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
2111 .await;
2112 new_worktree
2113 .update(cx, |tree, _| {
2114 tree.as_local_mut()
2115 .unwrap()
2116 .refresh_entries_for_paths(expanded_paths)
2117 })
2118 .recv()
2119 .await;
2120 let new_snapshot =
2121 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2122 assert_eq!(
2123 snapshot.entries_without_ids(true),
2124 new_snapshot.entries_without_ids(true)
2125 );
2126 }
2127
2128 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
2129
2130 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
2131 for update in updates.lock().iter() {
2132 if update.scan_id >= prev_snapshot.scan_id() as u64 {
2133 prev_snapshot.apply_remote_update(update.clone(), &settings.file_scan_inclusions);
2134 }
2135 }
2136
2137 assert_eq!(
2138 prev_snapshot
2139 .entries(true, 0)
2140 .map(ignore_pending_dir)
2141 .collect::<Vec<_>>(),
2142 snapshot
2143 .entries(true, 0)
2144 .map(ignore_pending_dir)
2145 .collect::<Vec<_>>(),
2146 "wrong updates after snapshot {i}: {updates:#?}",
2147 );
2148 }
2149
2150 fn ignore_pending_dir(entry: &Entry) -> Entry {
2151 let mut entry = entry.clone();
2152 if entry.kind.is_dir() {
2153 entry.kind = EntryKind::Dir
2154 }
2155 entry
2156 }
2157}
2158
2159// The worktree's `UpdatedEntries` event can be used to follow along with
2160// all changes to the worktree's snapshot.
2161fn check_worktree_change_events(tree: &mut Worktree, cx: &mut Context<Worktree>) {
2162 let mut entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
2163 cx.subscribe(&cx.entity(), move |tree, _, event, _| {
2164 if let Event::UpdatedEntries(changes) = event {
2165 for (path, _, change_type) in changes.iter() {
2166 let entry = tree.entry_for_path(path).cloned();
2167 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
2168 Ok(ix) | Err(ix) => ix,
2169 };
2170 match change_type {
2171 PathChange::Added => entries.insert(ix, entry.unwrap()),
2172 PathChange::Removed => drop(entries.remove(ix)),
2173 PathChange::Updated => {
2174 let entry = entry.unwrap();
2175 let existing_entry = entries.get_mut(ix).unwrap();
2176 assert_eq!(existing_entry.path, entry.path);
2177 *existing_entry = entry;
2178 }
2179 PathChange::AddedOrUpdated | PathChange::Loaded => {
2180 let entry = entry.unwrap();
2181 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
2182 *entries.get_mut(ix).unwrap() = entry;
2183 } else {
2184 entries.insert(ix, entry);
2185 }
2186 }
2187 }
2188 }
2189
2190 let new_entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
2191 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
2192 }
2193 })
2194 .detach();
2195}
2196
2197fn randomly_mutate_worktree(
2198 worktree: &mut Worktree,
2199 rng: &mut impl Rng,
2200 cx: &mut Context<Worktree>,
2201) -> Task<Result<()>> {
2202 log::info!("mutating worktree");
2203 let worktree = worktree.as_local_mut().unwrap();
2204 let snapshot = worktree.snapshot();
2205 let entry = snapshot.entries(false, 0).choose(rng).unwrap();
2206
2207 match rng.random_range(0_u32..100) {
2208 0..=33 if entry.path.as_ref() != RelPath::empty() => {
2209 log::info!("deleting entry {:?} ({})", entry.path, entry.id.to_usize());
2210 let task = worktree
2211 .delete_entry(entry.id, false, cx)
2212 .unwrap_or_else(|| Task::ready(Ok(None)));
2213
2214 cx.background_spawn(async move {
2215 task.await?;
2216 Ok(())
2217 })
2218 }
2219 _ => {
2220 if entry.is_dir() {
2221 let child_path = entry.path.join(rel_path(&random_filename(rng)));
2222 let is_dir = rng.random_bool(0.3);
2223 log::info!(
2224 "creating {} at {:?}",
2225 if is_dir { "dir" } else { "file" },
2226 child_path,
2227 );
2228 let task = worktree.create_entry(child_path, is_dir, None, cx);
2229 cx.background_spawn(async move {
2230 task.await?;
2231 Ok(())
2232 })
2233 } else {
2234 log::info!(
2235 "overwriting file {:?} ({})",
2236 &entry.path,
2237 entry.id.to_usize()
2238 );
2239 let task = worktree.write_file(
2240 entry.path.clone(),
2241 "".into(),
2242 Default::default(),
2243 encoding_rs::UTF_8,
2244 false,
2245 cx,
2246 );
2247 cx.background_spawn(async move {
2248 task.await?;
2249 Ok(())
2250 })
2251 }
2252 }
2253 }
2254}
2255
2256async fn randomly_mutate_fs(
2257 fs: &Arc<dyn Fs>,
2258 root_path: &Path,
2259 insertion_probability: f64,
2260 rng: &mut impl Rng,
2261) {
2262 log::info!("mutating fs");
2263 let mut files = Vec::new();
2264 let mut dirs = Vec::new();
2265 for path in fs.as_fake().paths(false) {
2266 if path.starts_with(root_path) {
2267 if fs.is_file(&path).await {
2268 files.push(path);
2269 } else {
2270 dirs.push(path);
2271 }
2272 }
2273 }
2274
2275 if (files.is_empty() && dirs.len() == 1) || rng.random_bool(insertion_probability) {
2276 let path = dirs.choose(rng).unwrap();
2277 let new_path = path.join(random_filename(rng));
2278
2279 if rng.random() {
2280 log::info!(
2281 "creating dir {:?}",
2282 new_path.strip_prefix(root_path).unwrap()
2283 );
2284 fs.create_dir(&new_path).await.unwrap();
2285 } else {
2286 log::info!(
2287 "creating file {:?}",
2288 new_path.strip_prefix(root_path).unwrap()
2289 );
2290 fs.create_file(&new_path, Default::default()).await.unwrap();
2291 }
2292 } else if rng.random_bool(0.05) {
2293 let ignore_dir_path = dirs.choose(rng).unwrap();
2294 let ignore_path = ignore_dir_path.join(GITIGNORE);
2295
2296 let subdirs = dirs
2297 .iter()
2298 .filter(|d| d.starts_with(ignore_dir_path))
2299 .cloned()
2300 .collect::<Vec<_>>();
2301 let subfiles = files
2302 .iter()
2303 .filter(|d| d.starts_with(ignore_dir_path))
2304 .cloned()
2305 .collect::<Vec<_>>();
2306 let files_to_ignore = {
2307 let len = rng.random_range(0..=subfiles.len());
2308 subfiles.choose_multiple(rng, len)
2309 };
2310 let dirs_to_ignore = {
2311 let len = rng.random_range(0..subdirs.len());
2312 subdirs.choose_multiple(rng, len)
2313 };
2314
2315 let mut ignore_contents = String::new();
2316 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
2317 writeln!(
2318 ignore_contents,
2319 "{}",
2320 path_to_ignore
2321 .strip_prefix(ignore_dir_path)
2322 .unwrap()
2323 .to_str()
2324 .unwrap()
2325 )
2326 .unwrap();
2327 }
2328 log::info!(
2329 "creating gitignore {:?} with contents:\n{}",
2330 ignore_path.strip_prefix(root_path).unwrap(),
2331 ignore_contents
2332 );
2333 fs.save(
2334 &ignore_path,
2335 &ignore_contents.as_str().into(),
2336 Default::default(),
2337 )
2338 .await
2339 .unwrap();
2340 } else {
2341 let old_path = {
2342 let file_path = files.choose(rng);
2343 let dir_path = dirs[1..].choose(rng);
2344 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
2345 };
2346
2347 let is_rename = rng.random();
2348 if is_rename {
2349 let new_path_parent = dirs
2350 .iter()
2351 .filter(|d| !d.starts_with(old_path))
2352 .choose(rng)
2353 .unwrap();
2354
2355 let overwrite_existing_dir =
2356 !old_path.starts_with(new_path_parent) && rng.random_bool(0.3);
2357 let new_path = if overwrite_existing_dir {
2358 fs.remove_dir(
2359 new_path_parent,
2360 RemoveOptions {
2361 recursive: true,
2362 ignore_if_not_exists: true,
2363 },
2364 )
2365 .await
2366 .unwrap();
2367 new_path_parent.to_path_buf()
2368 } else {
2369 new_path_parent.join(random_filename(rng))
2370 };
2371
2372 log::info!(
2373 "renaming {:?} to {}{:?}",
2374 old_path.strip_prefix(root_path).unwrap(),
2375 if overwrite_existing_dir {
2376 "overwrite "
2377 } else {
2378 ""
2379 },
2380 new_path.strip_prefix(root_path).unwrap()
2381 );
2382 fs.rename(
2383 old_path,
2384 &new_path,
2385 fs::RenameOptions {
2386 overwrite: true,
2387 ignore_if_exists: true,
2388 create_parents: false,
2389 },
2390 )
2391 .await
2392 .unwrap();
2393 } else if fs.is_file(old_path).await {
2394 log::info!(
2395 "deleting file {:?}",
2396 old_path.strip_prefix(root_path).unwrap()
2397 );
2398 fs.remove_file(old_path, Default::default()).await.unwrap();
2399 } else {
2400 log::info!(
2401 "deleting dir {:?}",
2402 old_path.strip_prefix(root_path).unwrap()
2403 );
2404 fs.remove_dir(
2405 old_path,
2406 RemoveOptions {
2407 recursive: true,
2408 ignore_if_not_exists: true,
2409 },
2410 )
2411 .await
2412 .unwrap();
2413 }
2414 }
2415}
2416
2417fn random_filename(rng: &mut impl Rng) -> String {
2418 (0..6)
2419 .map(|_| rng.sample(rand::distr::Alphanumeric))
2420 .map(char::from)
2421 .collect()
2422}
2423
2424#[gpui::test]
2425async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
2426 init_test(cx);
2427 let fs = FakeFs::new(cx.background_executor.clone());
2428 fs.insert_tree("/", json!({".env": "PRIVATE=secret\n"}))
2429 .await;
2430 let tree = Worktree::local(
2431 Path::new("/.env"),
2432 true,
2433 fs.clone(),
2434 Default::default(),
2435 true,
2436 WorktreeId::from_proto(0),
2437 &mut cx.to_async(),
2438 )
2439 .await
2440 .unwrap();
2441 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2442 .await;
2443 tree.read_with(cx, |tree, _| {
2444 let entry = tree.entry_for_path(rel_path("")).unwrap();
2445 assert!(entry.is_private);
2446 });
2447}
2448
2449#[gpui::test]
2450async fn test_repository_above_root(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2451 init_test(cx);
2452
2453 let fs = FakeFs::new(executor);
2454 fs.insert_tree(
2455 path!("/root"),
2456 json!({
2457 ".git": {},
2458 "subproject": {
2459 "a.txt": "A"
2460 }
2461 }),
2462 )
2463 .await;
2464 let worktree = Worktree::local(
2465 path!("/root/subproject").as_ref(),
2466 true,
2467 fs.clone(),
2468 Arc::default(),
2469 true,
2470 WorktreeId::from_proto(0),
2471 &mut cx.to_async(),
2472 )
2473 .await
2474 .unwrap();
2475 worktree
2476 .update(cx, |worktree, _| {
2477 worktree.as_local().unwrap().scan_complete()
2478 })
2479 .await;
2480 cx.run_until_parked();
2481 let repos = worktree.update(cx, |worktree, _| {
2482 worktree.as_local().unwrap().repositories()
2483 });
2484 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2485
2486 fs.touch_path(path!("/root/subproject")).await;
2487 worktree
2488 .update(cx, |worktree, _| {
2489 worktree.as_local().unwrap().scan_complete()
2490 })
2491 .await;
2492 cx.run_until_parked();
2493
2494 let repos = worktree.update(cx, |worktree, _| {
2495 worktree.as_local().unwrap().repositories()
2496 });
2497 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2498}
2499
2500#[gpui::test]
2501async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2502 init_test(cx);
2503
2504 let home = paths::home_dir();
2505 let fs = FakeFs::new(executor);
2506 fs.insert_tree(
2507 home,
2508 json!({
2509 ".config": {
2510 "git": {
2511 "ignore": "foo\n/bar\nbaz\n"
2512 }
2513 },
2514 "project": {
2515 ".git": {},
2516 ".gitignore": "!baz",
2517 "foo": "",
2518 "bar": "",
2519 "sub": {
2520 "bar": "",
2521 },
2522 "subrepo": {
2523 ".git": {},
2524 "bar": ""
2525 },
2526 "baz": ""
2527 }
2528 }),
2529 )
2530 .await;
2531 let worktree = Worktree::local(
2532 home.join("project"),
2533 true,
2534 fs.clone(),
2535 Arc::default(),
2536 true,
2537 WorktreeId::from_proto(0),
2538 &mut cx.to_async(),
2539 )
2540 .await
2541 .unwrap();
2542 worktree
2543 .update(cx, |worktree, _| {
2544 worktree.as_local().unwrap().scan_complete()
2545 })
2546 .await;
2547 cx.run_until_parked();
2548
2549 // .gitignore overrides excludesFile, and anchored paths in excludesFile are resolved
2550 // relative to the nearest containing repository
2551 worktree.update(cx, |worktree, _cx| {
2552 check_worktree_entries(
2553 worktree,
2554 &[],
2555 &["foo", "bar", "subrepo/bar"],
2556 &["sub/bar", "baz"],
2557 &[],
2558 );
2559 });
2560
2561 // Ignore statuses are updated when excludesFile changes
2562 fs.write(
2563 &home.join(".config").join("git").join("ignore"),
2564 "/bar\nbaz\n".as_bytes(),
2565 )
2566 .await
2567 .unwrap();
2568 worktree
2569 .update(cx, |worktree, _| {
2570 worktree.as_local().unwrap().scan_complete()
2571 })
2572 .await;
2573 cx.run_until_parked();
2574
2575 worktree.update(cx, |worktree, _cx| {
2576 check_worktree_entries(
2577 worktree,
2578 &[],
2579 &["bar", "subrepo/bar"],
2580 &["foo", "sub/bar", "baz"],
2581 &[],
2582 );
2583 });
2584
2585 // Statuses are updated when .git added/removed
2586 fs.remove_dir(
2587 &home.join("project").join("subrepo").join(".git"),
2588 RemoveOptions {
2589 recursive: true,
2590 ..Default::default()
2591 },
2592 )
2593 .await
2594 .unwrap();
2595 worktree
2596 .update(cx, |worktree, _| {
2597 worktree.as_local().unwrap().scan_complete()
2598 })
2599 .await;
2600 cx.run_until_parked();
2601
2602 worktree.update(cx, |worktree, _cx| {
2603 check_worktree_entries(
2604 worktree,
2605 &[],
2606 &["bar"],
2607 &["foo", "sub/bar", "baz", "subrepo/bar"],
2608 &[],
2609 );
2610 });
2611}
2612
2613#[gpui::test]
2614async fn test_repo_exclude(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2615 init_test(cx);
2616
2617 let fs = FakeFs::new(executor);
2618 let project_dir = Path::new(path!("/project"));
2619 fs.insert_tree(
2620 project_dir,
2621 json!({
2622 ".git": {
2623 "info": {
2624 "exclude": ".env.*"
2625 }
2626 },
2627 ".env.example": "secret=xxxx",
2628 ".env.local": "secret=1234",
2629 ".gitignore": "!.env.example",
2630 "README.md": "# Repo Exclude",
2631 "src": {
2632 "main.rs": "fn main() {}",
2633 },
2634 }),
2635 )
2636 .await;
2637
2638 let worktree = Worktree::local(
2639 project_dir,
2640 true,
2641 fs.clone(),
2642 Default::default(),
2643 true,
2644 WorktreeId::from_proto(0),
2645 &mut cx.to_async(),
2646 )
2647 .await
2648 .unwrap();
2649 worktree
2650 .update(cx, |worktree, _| {
2651 worktree.as_local().unwrap().scan_complete()
2652 })
2653 .await;
2654 cx.run_until_parked();
2655
2656 // .gitignore overrides .git/info/exclude
2657 worktree.update(cx, |worktree, _cx| {
2658 let expected_excluded_paths = [];
2659 let expected_ignored_paths = [".env.local"];
2660 let expected_tracked_paths = [".env.example", "README.md", "src/main.rs"];
2661 let expected_included_paths = [];
2662
2663 check_worktree_entries(
2664 worktree,
2665 &expected_excluded_paths,
2666 &expected_ignored_paths,
2667 &expected_tracked_paths,
2668 &expected_included_paths,
2669 );
2670 });
2671
2672 // Ignore statuses are updated when .git/info/exclude file changes
2673 fs.write(
2674 &project_dir.join(DOT_GIT).join(REPO_EXCLUDE),
2675 ".env.example".as_bytes(),
2676 )
2677 .await
2678 .unwrap();
2679 worktree
2680 .update(cx, |worktree, _| {
2681 worktree.as_local().unwrap().scan_complete()
2682 })
2683 .await;
2684 cx.run_until_parked();
2685
2686 worktree.update(cx, |worktree, _cx| {
2687 let expected_excluded_paths = [];
2688 let expected_ignored_paths = [];
2689 let expected_tracked_paths = [".env.example", ".env.local", "README.md", "src/main.rs"];
2690 let expected_included_paths = [];
2691
2692 check_worktree_entries(
2693 worktree,
2694 &expected_excluded_paths,
2695 &expected_ignored_paths,
2696 &expected_tracked_paths,
2697 &expected_included_paths,
2698 );
2699 });
2700}
2701
2702#[track_caller]
2703fn check_worktree_entries(
2704 tree: &Worktree,
2705 expected_excluded_paths: &[&str],
2706 expected_ignored_paths: &[&str],
2707 expected_tracked_paths: &[&str],
2708 expected_included_paths: &[&str],
2709) {
2710 for path in expected_excluded_paths {
2711 let entry = tree.entry_for_path(rel_path(path));
2712 assert!(
2713 entry.is_none(),
2714 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2715 );
2716 }
2717 for path in expected_ignored_paths {
2718 let entry = tree
2719 .entry_for_path(rel_path(path))
2720 .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
2721 assert!(
2722 entry.is_ignored,
2723 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2724 );
2725 }
2726 for path in expected_tracked_paths {
2727 let entry = tree
2728 .entry_for_path(rel_path(path))
2729 .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
2730 assert!(
2731 !entry.is_ignored || entry.is_always_included,
2732 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2733 );
2734 }
2735 for path in expected_included_paths {
2736 let entry = tree
2737 .entry_for_path(rel_path(path))
2738 .unwrap_or_else(|| panic!("Missing entry for expected included path '{path}'"));
2739 assert!(
2740 entry.is_always_included,
2741 "expected path '{path}' to always be included, but got entry: {entry:?}",
2742 );
2743 }
2744}
2745
2746#[gpui::test]
2747async fn test_root_repo_common_dir(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2748 init_test(cx);
2749
2750 use git::repository::Worktree as GitWorktree;
2751
2752 let fs = FakeFs::new(executor);
2753
2754 // Set up a main repo and a linked worktree pointing back to it.
2755 fs.insert_tree(
2756 path!("/main_repo"),
2757 json!({
2758 ".git": {},
2759 "file.txt": "content",
2760 }),
2761 )
2762 .await;
2763 fs.add_linked_worktree_for_repo(
2764 Path::new(path!("/main_repo/.git")),
2765 false,
2766 GitWorktree {
2767 path: PathBuf::from(path!("/linked_worktree")),
2768 ref_name: Some("refs/heads/feature".into()),
2769 sha: "abc123".into(),
2770 is_main: false,
2771 },
2772 )
2773 .await;
2774 fs.write(
2775 path!("/linked_worktree/file.txt").as_ref(),
2776 "content".as_bytes(),
2777 )
2778 .await
2779 .unwrap();
2780
2781 let tree = Worktree::local(
2782 path!("/linked_worktree").as_ref(),
2783 true,
2784 fs.clone(),
2785 Arc::default(),
2786 true,
2787 WorktreeId::from_proto(0),
2788 &mut cx.to_async(),
2789 )
2790 .await
2791 .unwrap();
2792 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
2793 .await;
2794 cx.run_until_parked();
2795
2796 // For a linked worktree, root_repo_common_dir should point to the
2797 // main repo's .git, not the worktree-specific git directory.
2798 tree.read_with(cx, |tree, _| {
2799 assert_eq!(
2800 tree.snapshot().root_repo_common_dir().map(|p| p.as_ref()),
2801 Some(Path::new(path!("/main_repo/.git"))),
2802 );
2803 });
2804
2805 let event_count: Rc<Cell<usize>> = Rc::new(Cell::new(0));
2806 tree.update(cx, {
2807 let event_count = event_count.clone();
2808 |_, cx| {
2809 cx.subscribe(&cx.entity(), move |_, _, event, _| {
2810 if matches!(event, Event::UpdatedRootRepoCommonDir) {
2811 event_count.set(event_count.get() + 1);
2812 }
2813 })
2814 .detach();
2815 }
2816 });
2817
2818 // Remove .git — root_repo_common_dir should become None.
2819 fs.remove_file(
2820 &PathBuf::from(path!("/linked_worktree/.git")),
2821 Default::default(),
2822 )
2823 .await
2824 .unwrap();
2825 tree.flush_fs_events(cx).await;
2826
2827 tree.read_with(cx, |tree, _| {
2828 assert_eq!(tree.snapshot().root_repo_common_dir(), None);
2829 });
2830 assert_eq!(
2831 event_count.get(),
2832 1,
2833 "should have emitted UpdatedRootRepoCommonDir on removal"
2834 );
2835}
2836
2837#[gpui::test]
2838async fn test_linked_worktree_git_file_event_does_not_panic(
2839 executor: BackgroundExecutor,
2840 cx: &mut TestAppContext,
2841) {
2842 // Regression test: in a linked worktree, `.git` is a file (containing
2843 // "gitdir: ..."), not a directory. When the background scanner receives
2844 // a filesystem event for a path inside the main repo's `.git` directory
2845 // (which it watches via the commondir), the ancestor-walking code in
2846 // `process_events` calls `is_git_dir` on each ancestor. If `is_git_dir`
2847 // treats `.git` files the same as `.git` directories, it incorrectly
2848 // identifies the gitfile as a git dir, adds it to `dot_git_abs_paths`,
2849 // and `update_git_repositories` panics because the path is outside the
2850 // worktree root.
2851 init_test(cx);
2852
2853 use git::repository::Worktree as GitWorktree;
2854
2855 let fs = FakeFs::new(executor);
2856
2857 fs.insert_tree(
2858 path!("/main_repo"),
2859 json!({
2860 ".git": {},
2861 "file.txt": "content",
2862 }),
2863 )
2864 .await;
2865 fs.add_linked_worktree_for_repo(
2866 Path::new(path!("/main_repo/.git")),
2867 false,
2868 GitWorktree {
2869 path: PathBuf::from(path!("/linked_worktree")),
2870 ref_name: Some("refs/heads/feature".into()),
2871 sha: "abc123".into(),
2872 is_main: false,
2873 },
2874 )
2875 .await;
2876 fs.write(
2877 path!("/linked_worktree/file.txt").as_ref(),
2878 "content".as_bytes(),
2879 )
2880 .await
2881 .unwrap();
2882
2883 let tree = Worktree::local(
2884 path!("/linked_worktree").as_ref(),
2885 true,
2886 fs.clone(),
2887 Arc::default(),
2888 true,
2889 WorktreeId::from_proto(0),
2890 &mut cx.to_async(),
2891 )
2892 .await
2893 .unwrap();
2894 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
2895 .await;
2896 cx.run_until_parked();
2897
2898 // Trigger a filesystem event inside the main repo's .git directory
2899 // (which the linked worktree scanner watches via the commondir). This
2900 // uses the sentinel-file helper to ensure the event goes through the
2901 // real watcher path, exactly as it would in production.
2902 tree.flush_fs_events_in_root_git_repository(cx).await;
2903
2904 // The worktree should still be intact.
2905 tree.read_with(cx, |tree, _| {
2906 assert_eq!(
2907 tree.snapshot().root_repo_common_dir().map(|p| p.as_ref()),
2908 Some(Path::new(path!("/main_repo/.git"))),
2909 );
2910 });
2911}
2912
2913fn init_test(cx: &mut gpui::TestAppContext) {
2914 zlog::init_test();
2915
2916 cx.update(|cx| {
2917 let settings_store = SettingsStore::test(cx);
2918 cx.set_global(settings_store);
2919 });
2920}
2921
2922#[gpui::test]
2923async fn test_load_file_encoding(cx: &mut TestAppContext) {
2924 init_test(cx);
2925
2926 struct TestCase {
2927 name: &'static str,
2928 bytes: Vec<u8>,
2929 expected_text: &'static str,
2930 }
2931
2932 // --- Success Cases ---
2933 let success_cases = vec![
2934 TestCase {
2935 name: "utf8.txt",
2936 bytes: "こんにちは".as_bytes().to_vec(),
2937 expected_text: "こんにちは",
2938 },
2939 TestCase {
2940 name: "sjis.txt",
2941 bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
2942 expected_text: "こんにちは",
2943 },
2944 TestCase {
2945 name: "eucjp.txt",
2946 bytes: vec![0xa4, 0xb3, 0xa4, 0xf3, 0xa4, 0xcb, 0xa4, 0xc1, 0xa4, 0xcf],
2947 expected_text: "こんにちは",
2948 },
2949 TestCase {
2950 name: "iso2022jp.txt",
2951 bytes: vec![
2952 0x1b, 0x24, 0x42, 0x24, 0x33, 0x24, 0x73, 0x24, 0x4b, 0x24, 0x41, 0x24, 0x4f, 0x1b,
2953 0x28, 0x42,
2954 ],
2955 expected_text: "こんにちは",
2956 },
2957 TestCase {
2958 name: "win1252.txt",
2959 bytes: vec![0x43, 0x61, 0x66, 0xe9],
2960 expected_text: "Café",
2961 },
2962 TestCase {
2963 name: "gbk.txt",
2964 bytes: vec![
2965 0xbd, 0xf1, 0xcc, 0xec, 0xcc, 0xec, 0xc6, 0xf8, 0xb2, 0xbb, 0xb4, 0xed,
2966 ],
2967 expected_text: "今天天气不错",
2968 },
2969 // UTF-16LE with BOM
2970 TestCase {
2971 name: "utf16le_bom.txt",
2972 bytes: vec![
2973 0xFF, 0xFE, // BOM
2974 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F, 0x30,
2975 ],
2976 expected_text: "こんにちは",
2977 },
2978 // UTF-16BE with BOM
2979 TestCase {
2980 name: "utf16be_bom.txt",
2981 bytes: vec![
2982 0xFE, 0xFF, // BOM
2983 0x30, 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F,
2984 ],
2985 expected_text: "こんにちは",
2986 },
2987 // UTF-16LE without BOM (ASCII only)
2988 // This relies on the "null byte heuristic" we implemented.
2989 // "ABC" -> 41 00 42 00 43 00
2990 TestCase {
2991 name: "utf16le_ascii_no_bom.txt",
2992 bytes: vec![0x41, 0x00, 0x42, 0x00, 0x43, 0x00],
2993 expected_text: "ABC",
2994 },
2995 ];
2996
2997 // --- Failure Cases ---
2998 let failure_cases = vec![
2999 // Binary File (Should be detected by heuristic and return Error)
3000 // Contains random bytes and mixed nulls that don't match UTF-16 patterns
3001 TestCase {
3002 name: "binary.bin",
3003 bytes: vec![0x00, 0xFF, 0x12, 0x00, 0x99, 0x88, 0x77, 0x66, 0x00],
3004 expected_text: "", // Not used
3005 },
3006 ];
3007
3008 let root_path = if cfg!(windows) {
3009 Path::new("C:\\root")
3010 } else {
3011 Path::new("/root")
3012 };
3013
3014 let fs = FakeFs::new(cx.background_executor.clone());
3015 fs.create_dir(root_path).await.unwrap();
3016
3017 for case in success_cases.iter().chain(failure_cases.iter()) {
3018 let path = root_path.join(case.name);
3019 fs.write(&path, &case.bytes).await.unwrap();
3020 }
3021
3022 let tree = Worktree::local(
3023 root_path,
3024 true,
3025 fs,
3026 Default::default(),
3027 true,
3028 WorktreeId::from_proto(0),
3029 &mut cx.to_async(),
3030 )
3031 .await
3032 .unwrap();
3033
3034 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3035 .await;
3036
3037 let rel_path = |name: &str| {
3038 RelPath::new(&Path::new(name), PathStyle::local())
3039 .unwrap()
3040 .into_arc()
3041 };
3042
3043 // Run Success Tests
3044 for case in success_cases {
3045 let loaded = tree
3046 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
3047 .await;
3048 if let Err(e) = &loaded {
3049 panic!("Failed to load success case '{}': {:?}", case.name, e);
3050 }
3051 let loaded = loaded.unwrap();
3052 assert_eq!(
3053 loaded.text, case.expected_text,
3054 "Encoding mismatch for file: {}",
3055 case.name
3056 );
3057 }
3058
3059 // Run Failure Tests
3060 for case in failure_cases {
3061 let loaded = tree
3062 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
3063 .await;
3064 assert!(
3065 loaded.is_err(),
3066 "Failure case '{}' unexpectedly succeeded! It should have been detected as binary.",
3067 case.name
3068 );
3069 let err_msg = loaded.unwrap_err().to_string();
3070 println!("Got expected error for {}: {}", case.name, err_msg);
3071 }
3072}
3073
3074#[gpui::test]
3075async fn test_write_file_encoding(cx: &mut gpui::TestAppContext) {
3076 init_test(cx);
3077 let fs = FakeFs::new(cx.executor());
3078
3079 let root_path = if cfg!(windows) {
3080 Path::new("C:\\root")
3081 } else {
3082 Path::new("/root")
3083 };
3084 fs.create_dir(root_path).await.unwrap();
3085
3086 let worktree = Worktree::local(
3087 root_path,
3088 true,
3089 fs.clone(),
3090 Default::default(),
3091 true,
3092 WorktreeId::from_proto(0),
3093 &mut cx.to_async(),
3094 )
3095 .await
3096 .unwrap();
3097
3098 // Define test case structure
3099 struct TestCase {
3100 name: &'static str,
3101 text: &'static str,
3102 encoding: &'static encoding_rs::Encoding,
3103 has_bom: bool,
3104 expected_bytes: Vec<u8>,
3105 }
3106
3107 let cases = vec![
3108 // Shift_JIS with Japanese
3109 TestCase {
3110 name: "Shift_JIS with Japanese",
3111 text: "こんにちは",
3112 encoding: encoding_rs::SHIFT_JIS,
3113 has_bom: false,
3114 expected_bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
3115 },
3116 // UTF-8 No BOM
3117 TestCase {
3118 name: "UTF-8 No BOM",
3119 text: "AB",
3120 encoding: encoding_rs::UTF_8,
3121 has_bom: false,
3122 expected_bytes: vec![0x41, 0x42],
3123 },
3124 // UTF-8 with BOM
3125 TestCase {
3126 name: "UTF-8 with BOM",
3127 text: "AB",
3128 encoding: encoding_rs::UTF_8,
3129 has_bom: true,
3130 expected_bytes: vec![0xEF, 0xBB, 0xBF, 0x41, 0x42],
3131 },
3132 // UTF-16LE No BOM with Japanese
3133 // NOTE: This passes thanks to the manual encoding fix implemented in `write_file`.
3134 TestCase {
3135 name: "UTF-16LE No BOM with Japanese",
3136 text: "こんにちは",
3137 encoding: encoding_rs::UTF_16LE,
3138 has_bom: false,
3139 expected_bytes: vec![0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f, 0x30],
3140 },
3141 // UTF-16LE with BOM
3142 TestCase {
3143 name: "UTF-16LE with BOM",
3144 text: "A",
3145 encoding: encoding_rs::UTF_16LE,
3146 has_bom: true,
3147 expected_bytes: vec![0xFF, 0xFE, 0x41, 0x00],
3148 },
3149 // UTF-16BE No BOM with Japanese
3150 // NOTE: This passes thanks to the manual encoding fix.
3151 TestCase {
3152 name: "UTF-16BE No BOM with Japanese",
3153 text: "こんにちは",
3154 encoding: encoding_rs::UTF_16BE,
3155 has_bom: false,
3156 expected_bytes: vec![0x30, 0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f],
3157 },
3158 // UTF-16BE with BOM
3159 TestCase {
3160 name: "UTF-16BE with BOM",
3161 text: "A",
3162 encoding: encoding_rs::UTF_16BE,
3163 has_bom: true,
3164 expected_bytes: vec![0xFE, 0xFF, 0x00, 0x41],
3165 },
3166 ];
3167
3168 for (i, case) in cases.into_iter().enumerate() {
3169 let file_name = format!("test_{}.txt", i);
3170 let path: Arc<Path> = Path::new(&file_name).into();
3171 let file_path = root_path.join(&file_name);
3172
3173 fs.insert_file(&file_path, "".into()).await;
3174
3175 let rel_path = RelPath::new(&path, PathStyle::local()).unwrap().into_arc();
3176 let text = text::Rope::from(case.text);
3177
3178 let task = worktree.update(cx, |wt, cx| {
3179 wt.write_file(
3180 rel_path,
3181 text,
3182 text::LineEnding::Unix,
3183 case.encoding,
3184 case.has_bom,
3185 cx,
3186 )
3187 });
3188
3189 if let Err(e) = task.await {
3190 panic!("Unexpected error in case '{}': {:?}", case.name, e);
3191 }
3192
3193 let bytes = fs.load_bytes(&file_path).await.unwrap();
3194
3195 assert_eq!(
3196 bytes, case.expected_bytes,
3197 "case '{}' mismatch. Expected {:?}, but got {:?}",
3198 case.name, case.expected_bytes, bytes
3199 );
3200 }
3201}
3202
3203#[gpui::test]
3204async fn test_refresh_entries_for_paths_creates_ancestors(cx: &mut TestAppContext) {
3205 init_test(cx);
3206 let fs = FakeFs::new(cx.background_executor.clone());
3207 fs.insert_tree(
3208 "/root",
3209 json!({
3210 "a": {
3211 "b": {
3212 "c": {
3213 "deep_file.txt": "content",
3214 "sibling.txt": "content"
3215 },
3216 "d": {
3217 "under_sibling_dir.txt": "content"
3218 }
3219 }
3220 }
3221 }),
3222 )
3223 .await;
3224
3225 let tree = Worktree::local(
3226 Path::new("/root"),
3227 true,
3228 fs.clone(),
3229 Default::default(),
3230 false, // Disable scanning so the initial scan doesn't discover any entries
3231 WorktreeId::from_proto(0),
3232 &mut cx.to_async(),
3233 )
3234 .await
3235 .unwrap();
3236
3237 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3238 .await;
3239
3240 tree.read_with(cx, |tree, _| {
3241 assert_eq!(
3242 tree.entries(true, 0)
3243 .map(|e| e.path.as_ref())
3244 .collect::<Vec<_>>(),
3245 &[rel_path("")],
3246 "Only root entry should exist when scanning is disabled"
3247 );
3248
3249 assert!(tree.entry_for_path(rel_path("a")).is_none());
3250 assert!(tree.entry_for_path(rel_path("a/b")).is_none());
3251 assert!(tree.entry_for_path(rel_path("a/b/c")).is_none());
3252 assert!(
3253 tree.entry_for_path(rel_path("a/b/c/deep_file.txt"))
3254 .is_none()
3255 );
3256 });
3257
3258 tree.read_with(cx, |tree, _| {
3259 tree.as_local()
3260 .unwrap()
3261 .refresh_entries_for_paths(vec![rel_path("a/b/c/deep_file.txt").into()])
3262 })
3263 .recv()
3264 .await;
3265
3266 tree.read_with(cx, |tree, _| {
3267 assert_eq!(
3268 tree.entries(true, 0)
3269 .map(|e| e.path.as_ref())
3270 .collect::<Vec<_>>(),
3271 &[
3272 rel_path(""),
3273 rel_path("a"),
3274 rel_path("a/b"),
3275 rel_path("a/b/c"),
3276 rel_path("a/b/c/deep_file.txt"),
3277 rel_path("a/b/c/sibling.txt"),
3278 rel_path("a/b/d"),
3279 ],
3280 "All ancestors should be created when refreshing a deeply nested path"
3281 );
3282 });
3283}
3284
3285#[gpui::test]
3286async fn test_single_file_worktree_deleted(cx: &mut TestAppContext) {
3287 init_test(cx);
3288 let fs = FakeFs::new(cx.background_executor.clone());
3289
3290 fs.insert_tree(
3291 "/root",
3292 json!({
3293 "test.txt": "content",
3294 }),
3295 )
3296 .await;
3297
3298 let tree = Worktree::local(
3299 Path::new("/root/test.txt"),
3300 true,
3301 fs.clone(),
3302 Default::default(),
3303 true,
3304 WorktreeId::from_proto(0),
3305 &mut cx.to_async(),
3306 )
3307 .await
3308 .unwrap();
3309
3310 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3311 .await;
3312
3313 tree.read_with(cx, |tree, _| {
3314 assert!(tree.is_single_file(), "Should be a single-file worktree");
3315 assert_eq!(tree.abs_path().as_ref(), Path::new("/root/test.txt"));
3316 });
3317
3318 // Delete the file
3319 fs.remove_file(Path::new("/root/test.txt"), Default::default())
3320 .await
3321 .unwrap();
3322
3323 // Subscribe to worktree events
3324 let deleted_event_received = Rc::new(Cell::new(false));
3325 let _subscription = cx.update({
3326 let deleted_event_received = deleted_event_received.clone();
3327 |cx| {
3328 cx.subscribe(&tree, move |_, event, _| {
3329 if matches!(event, Event::Deleted) {
3330 deleted_event_received.set(true);
3331 }
3332 })
3333 }
3334 });
3335
3336 // Trigger filesystem events - the scanner should detect the file is gone immediately
3337 // and emit a Deleted event
3338 cx.background_executor.run_until_parked();
3339 cx.background_executor
3340 .advance_clock(std::time::Duration::from_secs(1));
3341 cx.background_executor.run_until_parked();
3342
3343 assert!(
3344 deleted_event_received.get(),
3345 "Should receive Deleted event when single-file worktree root is deleted"
3346 );
3347}