1mod worktree_settings;
2
3use anyhow::Result;
4use encoding_rs;
5use fs::{FakeFs, Fs, RealFs, RemoveOptions};
6use git::{DOT_GIT, GITIGNORE, REPO_EXCLUDE};
7use gpui::{AppContext as _, BackgroundExecutor, BorrowAppContext, Context, Task, TestAppContext};
8use parking_lot::Mutex;
9use postage::stream::Stream;
10use pretty_assertions::assert_eq;
11use rand::prelude::*;
12use rpc::{AnyProtoClient, NoopProtoClient, proto};
13use worktree::{Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle};
14
15use serde_json::json;
16use settings::{SettingsStore, WorktreeId};
17use std::{
18 cell::Cell,
19 env,
20 fmt::Write,
21 mem,
22 path::{Path, PathBuf},
23 rc::Rc,
24 sync::Arc,
25};
26use util::{
27 ResultExt, path,
28 paths::PathStyle,
29 rel_path::{RelPath, rel_path},
30 test::TempTree,
31};
32
33#[gpui::test]
34async fn test_traversal(cx: &mut TestAppContext) {
35 init_test(cx);
36 let fs = FakeFs::new(cx.background_executor.clone());
37 fs.insert_tree(
38 "/root",
39 json!({
40 ".gitignore": "a/b\n",
41 "a": {
42 "b": "",
43 "c": "",
44 }
45 }),
46 )
47 .await;
48
49 let tree = Worktree::local(
50 Path::new("/root"),
51 true,
52 fs,
53 Default::default(),
54 true,
55 WorktreeId::from_proto(0),
56 &mut cx.to_async(),
57 )
58 .await
59 .unwrap();
60 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
61 .await;
62
63 tree.read_with(cx, |tree, _| {
64 assert_eq!(
65 tree.entries(false, 0)
66 .map(|entry| entry.path.as_ref())
67 .collect::<Vec<_>>(),
68 vec![
69 rel_path(""),
70 rel_path(".gitignore"),
71 rel_path("a"),
72 rel_path("a/c"),
73 ]
74 );
75 assert_eq!(
76 tree.entries(true, 0)
77 .map(|entry| entry.path.as_ref())
78 .collect::<Vec<_>>(),
79 vec![
80 rel_path(""),
81 rel_path(".gitignore"),
82 rel_path("a"),
83 rel_path("a/b"),
84 rel_path("a/c"),
85 ]
86 );
87 })
88}
89
90#[gpui::test(iterations = 10)]
91async fn test_circular_symlinks(cx: &mut TestAppContext) {
92 init_test(cx);
93 let fs = FakeFs::new(cx.background_executor.clone());
94 fs.insert_tree(
95 "/root",
96 json!({
97 "lib": {
98 "a": {
99 "a.txt": ""
100 },
101 "b": {
102 "b.txt": ""
103 }
104 }
105 }),
106 )
107 .await;
108 fs.create_symlink("/root/lib/a/lib".as_ref(), "..".into())
109 .await
110 .unwrap();
111 fs.create_symlink("/root/lib/b/lib".as_ref(), "..".into())
112 .await
113 .unwrap();
114
115 let tree = Worktree::local(
116 Path::new("/root"),
117 true,
118 fs.clone(),
119 Default::default(),
120 true,
121 WorktreeId::from_proto(0),
122 &mut cx.to_async(),
123 )
124 .await
125 .unwrap();
126
127 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
128 .await;
129
130 tree.read_with(cx, |tree, _| {
131 assert_eq!(
132 tree.entries(false, 0)
133 .map(|entry| entry.path.as_ref())
134 .collect::<Vec<_>>(),
135 vec![
136 rel_path(""),
137 rel_path("lib"),
138 rel_path("lib/a"),
139 rel_path("lib/a/a.txt"),
140 rel_path("lib/a/lib"),
141 rel_path("lib/b"),
142 rel_path("lib/b/b.txt"),
143 rel_path("lib/b/lib"),
144 ]
145 );
146 });
147
148 fs.rename(
149 Path::new("/root/lib/a/lib"),
150 Path::new("/root/lib/a/lib-2"),
151 Default::default(),
152 )
153 .await
154 .unwrap();
155 cx.executor().run_until_parked();
156 tree.read_with(cx, |tree, _| {
157 assert_eq!(
158 tree.entries(false, 0)
159 .map(|entry| entry.path.as_ref())
160 .collect::<Vec<_>>(),
161 vec![
162 rel_path(""),
163 rel_path("lib"),
164 rel_path("lib/a"),
165 rel_path("lib/a/a.txt"),
166 rel_path("lib/a/lib-2"),
167 rel_path("lib/b"),
168 rel_path("lib/b/b.txt"),
169 rel_path("lib/b/lib"),
170 ]
171 );
172 });
173}
174
175#[gpui::test]
176async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
177 init_test(cx);
178 let fs = FakeFs::new(cx.background_executor.clone());
179 fs.insert_tree(
180 "/root",
181 json!({
182 "dir1": {
183 "deps": {
184 // symlinks here
185 },
186 "src": {
187 "a.rs": "",
188 "b.rs": "",
189 },
190 },
191 "dir2": {
192 "src": {
193 "c.rs": "",
194 "d.rs": "",
195 }
196 },
197 "dir3": {
198 "deps": {},
199 "src": {
200 "e.rs": "",
201 "f.rs": "",
202 },
203 }
204 }),
205 )
206 .await;
207
208 // These symlinks point to directories outside of the worktree's root, dir1.
209 fs.create_symlink("/root/dir1/deps/dep-dir2".as_ref(), "../../dir2".into())
210 .await
211 .unwrap();
212 fs.create_symlink("/root/dir1/deps/dep-dir3".as_ref(), "../../dir3".into())
213 .await
214 .unwrap();
215
216 let tree = Worktree::local(
217 Path::new("/root/dir1"),
218 true,
219 fs.clone(),
220 Default::default(),
221 true,
222 WorktreeId::from_proto(0),
223 &mut cx.to_async(),
224 )
225 .await
226 .unwrap();
227
228 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
229 .await;
230
231 let tree_updates = Arc::new(Mutex::new(Vec::new()));
232 tree.update(cx, |_, cx| {
233 let tree_updates = tree_updates.clone();
234 cx.subscribe(&tree, move |_, _, event, _| {
235 if let Event::UpdatedEntries(update) = event {
236 tree_updates.lock().extend(
237 update
238 .iter()
239 .map(|(path, _, change)| (path.clone(), *change)),
240 );
241 }
242 })
243 .detach();
244 });
245
246 // The symlinked directories are not scanned by default.
247 tree.read_with(cx, |tree, _| {
248 assert_eq!(
249 tree.entries(true, 0)
250 .map(|entry| (entry.path.as_ref(), entry.is_external))
251 .collect::<Vec<_>>(),
252 vec![
253 (rel_path(""), false),
254 (rel_path("deps"), false),
255 (rel_path("deps/dep-dir2"), true),
256 (rel_path("deps/dep-dir3"), true),
257 (rel_path("src"), false),
258 (rel_path("src/a.rs"), false),
259 (rel_path("src/b.rs"), false),
260 ]
261 );
262
263 assert_eq!(
264 tree.entry_for_path(rel_path("deps/dep-dir2")).unwrap().kind,
265 EntryKind::UnloadedDir
266 );
267 });
268
269 // Expand one of the symlinked directories.
270 tree.read_with(cx, |tree, _| {
271 tree.as_local()
272 .unwrap()
273 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3").into()])
274 })
275 .recv()
276 .await;
277
278 // The expanded directory's contents are loaded. Subdirectories are
279 // not scanned yet.
280 tree.read_with(cx, |tree, _| {
281 assert_eq!(
282 tree.entries(true, 0)
283 .map(|entry| (entry.path.as_ref(), entry.is_external))
284 .collect::<Vec<_>>(),
285 vec![
286 (rel_path(""), false),
287 (rel_path("deps"), false),
288 (rel_path("deps/dep-dir2"), true),
289 (rel_path("deps/dep-dir3"), true),
290 (rel_path("deps/dep-dir3/deps"), true),
291 (rel_path("deps/dep-dir3/src"), true),
292 (rel_path("src"), false),
293 (rel_path("src/a.rs"), false),
294 (rel_path("src/b.rs"), false),
295 ]
296 );
297 });
298 assert_eq!(
299 mem::take(&mut *tree_updates.lock()),
300 &[
301 (rel_path("deps/dep-dir3").into(), PathChange::Loaded),
302 (rel_path("deps/dep-dir3/deps").into(), PathChange::Loaded),
303 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded)
304 ]
305 );
306
307 // Expand a subdirectory of one of the symlinked directories.
308 tree.read_with(cx, |tree, _| {
309 tree.as_local()
310 .unwrap()
311 .refresh_entries_for_paths(vec![rel_path("deps/dep-dir3/src").into()])
312 })
313 .recv()
314 .await;
315
316 // The expanded subdirectory's contents are loaded.
317 tree.read_with(cx, |tree, _| {
318 assert_eq!(
319 tree.entries(true, 0)
320 .map(|entry| (entry.path.as_ref(), entry.is_external))
321 .collect::<Vec<_>>(),
322 vec![
323 (rel_path(""), false),
324 (rel_path("deps"), false),
325 (rel_path("deps/dep-dir2"), true),
326 (rel_path("deps/dep-dir3"), true),
327 (rel_path("deps/dep-dir3/deps"), true),
328 (rel_path("deps/dep-dir3/src"), true),
329 (rel_path("deps/dep-dir3/src/e.rs"), true),
330 (rel_path("deps/dep-dir3/src/f.rs"), true),
331 (rel_path("src"), false),
332 (rel_path("src/a.rs"), false),
333 (rel_path("src/b.rs"), false),
334 ]
335 );
336 });
337
338 assert_eq!(
339 mem::take(&mut *tree_updates.lock()),
340 &[
341 (rel_path("deps/dep-dir3/src").into(), PathChange::Loaded),
342 (
343 rel_path("deps/dep-dir3/src/e.rs").into(),
344 PathChange::Loaded
345 ),
346 (
347 rel_path("deps/dep-dir3/src/f.rs").into(),
348 PathChange::Loaded
349 )
350 ]
351 );
352}
353
354#[cfg(target_os = "macos")]
355#[gpui::test]
356async fn test_renaming_case_only(cx: &mut TestAppContext) {
357 cx.executor().allow_parking();
358 init_test(cx);
359
360 const OLD_NAME: &str = "aaa.rs";
361 const NEW_NAME: &str = "AAA.rs";
362
363 let fs = Arc::new(RealFs::new(None, cx.executor()));
364 let temp_root = TempTree::new(json!({
365 OLD_NAME: "",
366 }));
367
368 let tree = Worktree::local(
369 temp_root.path(),
370 true,
371 fs.clone(),
372 Default::default(),
373 true,
374 WorktreeId::from_proto(0),
375 &mut cx.to_async(),
376 )
377 .await
378 .unwrap();
379
380 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
381 .await;
382 tree.read_with(cx, |tree, _| {
383 assert_eq!(
384 tree.entries(true, 0)
385 .map(|entry| entry.path.as_ref())
386 .collect::<Vec<_>>(),
387 vec![rel_path(""), rel_path(OLD_NAME)]
388 );
389 });
390
391 fs.rename(
392 &temp_root.path().join(OLD_NAME),
393 &temp_root.path().join(NEW_NAME),
394 fs::RenameOptions {
395 overwrite: true,
396 ignore_if_exists: true,
397 create_parents: false,
398 },
399 )
400 .await
401 .unwrap();
402
403 tree.flush_fs_events(cx).await;
404
405 tree.read_with(cx, |tree, _| {
406 assert_eq!(
407 tree.entries(true, 0)
408 .map(|entry| entry.path.as_ref())
409 .collect::<Vec<_>>(),
410 vec![rel_path(""), rel_path(NEW_NAME)]
411 );
412 });
413}
414
415#[gpui::test]
416async fn test_root_rescan_reconciles_stale_state(cx: &mut TestAppContext) {
417 init_test(cx);
418 let fs = FakeFs::new(cx.background_executor.clone());
419 fs.insert_tree(
420 "/root",
421 json!({
422 "old.txt": "",
423 }),
424 )
425 .await;
426
427 let tree = Worktree::local(
428 Path::new("/root"),
429 true,
430 fs.clone(),
431 Default::default(),
432 true,
433 WorktreeId::from_proto(0),
434 &mut cx.to_async(),
435 )
436 .await
437 .unwrap();
438
439 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
440 .await;
441
442 tree.read_with(cx, |tree, _| {
443 assert_eq!(
444 tree.entries(true, 0)
445 .map(|entry| entry.path.as_ref())
446 .collect::<Vec<_>>(),
447 vec![rel_path(""), rel_path("old.txt")]
448 );
449 });
450
451 fs.pause_events();
452 fs.remove_file(Path::new("/root/old.txt"), RemoveOptions::default())
453 .await
454 .unwrap();
455 fs.insert_file(Path::new("/root/new.txt"), Vec::new()).await;
456 assert_eq!(fs.buffered_event_count(), 2);
457 fs.clear_buffered_events();
458
459 tree.read_with(cx, |tree, _| {
460 assert!(tree.entry_for_path(rel_path("old.txt")).is_some());
461 assert!(tree.entry_for_path(rel_path("new.txt")).is_none());
462 });
463
464 fs.emit_fs_event("/root", Some(fs::PathEventKind::Rescan));
465 fs.unpause_events_and_flush();
466 tree.flush_fs_events(cx).await;
467
468 tree.read_with(cx, |tree, _| {
469 assert!(tree.entry_for_path(rel_path("old.txt")).is_none());
470 assert!(tree.entry_for_path(rel_path("new.txt")).is_some());
471 assert_eq!(
472 tree.entries(true, 0)
473 .map(|entry| entry.path.as_ref())
474 .collect::<Vec<_>>(),
475 vec![rel_path(""), rel_path("new.txt")]
476 );
477 });
478}
479
480#[gpui::test]
481async fn test_subtree_rescan_reports_unchanged_descendants_as_updated(cx: &mut TestAppContext) {
482 init_test(cx);
483 let fs = FakeFs::new(cx.background_executor.clone());
484 fs.insert_tree(
485 "/root",
486 json!({
487 "dir": {
488 "child.txt": "",
489 "nested": {
490 "grandchild.txt": "",
491 },
492 "remove": {
493 "removed.txt": "",
494 }
495 },
496 "other.txt": "",
497 }),
498 )
499 .await;
500
501 let tree = Worktree::local(
502 Path::new("/root"),
503 true,
504 fs.clone(),
505 Default::default(),
506 true,
507 WorktreeId::from_proto(0),
508 &mut cx.to_async(),
509 )
510 .await
511 .unwrap();
512
513 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
514 .await;
515
516 let tree_updates = Arc::new(Mutex::new(Vec::new()));
517 tree.update(cx, |_, cx| {
518 let tree_updates = tree_updates.clone();
519 cx.subscribe(&tree, move |_, _, event, _| {
520 if let Event::UpdatedEntries(update) = event {
521 tree_updates.lock().extend(
522 update
523 .iter()
524 .filter(|(path, _, _)| path.as_ref() != rel_path("fs-event-sentinel"))
525 .map(|(path, _, change)| (path.clone(), *change)),
526 );
527 }
528 })
529 .detach();
530 });
531 fs.pause_events();
532 fs.insert_file("/root/dir/new.txt", b"new content".to_vec())
533 .await;
534 fs.remove_dir(
535 "/root/dir/remove".as_ref(),
536 RemoveOptions {
537 recursive: true,
538 ignore_if_not_exists: false,
539 },
540 )
541 .await
542 .unwrap();
543 fs.clear_buffered_events();
544 fs.unpause_events_and_flush();
545
546 fs.emit_fs_event("/root/dir", Some(fs::PathEventKind::Rescan));
547 tree.flush_fs_events(cx).await;
548
549 assert_eq!(
550 mem::take(&mut *tree_updates.lock()),
551 &[
552 (rel_path("dir").into(), PathChange::Updated),
553 (rel_path("dir/child.txt").into(), PathChange::Updated),
554 (rel_path("dir/nested").into(), PathChange::Updated),
555 (
556 rel_path("dir/nested/grandchild.txt").into(),
557 PathChange::Updated
558 ),
559 (rel_path("dir/new.txt").into(), PathChange::Added),
560 (rel_path("dir/remove").into(), PathChange::Removed),
561 (
562 rel_path("dir/remove/removed.txt").into(),
563 PathChange::Removed
564 ),
565 ]
566 );
567
568 tree.read_with(cx, |tree, _| {
569 assert!(tree.entry_for_path(rel_path("other.txt")).is_some());
570 });
571}
572
573#[gpui::test]
574async fn test_open_gitignored_files(cx: &mut TestAppContext) {
575 init_test(cx);
576 let fs = FakeFs::new(cx.background_executor.clone());
577 fs.insert_tree(
578 "/root",
579 json!({
580 ".gitignore": "node_modules\n",
581 "one": {
582 "node_modules": {
583 "a": {
584 "a1.js": "a1",
585 "a2.js": "a2",
586 },
587 "b": {
588 "b1.js": "b1",
589 "b2.js": "b2",
590 },
591 "c": {
592 "c1.js": "c1",
593 "c2.js": "c2",
594 }
595 },
596 },
597 "two": {
598 "x.js": "",
599 "y.js": "",
600 },
601 }),
602 )
603 .await;
604
605 let tree = Worktree::local(
606 Path::new("/root"),
607 true,
608 fs.clone(),
609 Default::default(),
610 true,
611 WorktreeId::from_proto(0),
612 &mut cx.to_async(),
613 )
614 .await
615 .unwrap();
616
617 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
618 .await;
619
620 tree.read_with(cx, |tree, _| {
621 assert_eq!(
622 tree.entries(true, 0)
623 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
624 .collect::<Vec<_>>(),
625 vec![
626 (rel_path(""), false),
627 (rel_path(".gitignore"), false),
628 (rel_path("one"), false),
629 (rel_path("one/node_modules"), true),
630 (rel_path("two"), false),
631 (rel_path("two/x.js"), false),
632 (rel_path("two/y.js"), false),
633 ]
634 );
635 });
636
637 // Open a file that is nested inside of a gitignored directory that
638 // has not yet been expanded.
639 let prev_read_dir_count = fs.read_dir_call_count();
640 let loaded = tree
641 .update(cx, |tree, cx| {
642 tree.load_file(rel_path("one/node_modules/b/b1.js"), cx)
643 })
644 .await
645 .unwrap();
646
647 tree.read_with(cx, |tree, _| {
648 assert_eq!(
649 tree.entries(true, 0)
650 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
651 .collect::<Vec<_>>(),
652 vec![
653 (rel_path(""), false),
654 (rel_path(".gitignore"), false),
655 (rel_path("one"), false),
656 (rel_path("one/node_modules"), true),
657 (rel_path("one/node_modules/a"), true),
658 (rel_path("one/node_modules/b"), true),
659 (rel_path("one/node_modules/b/b1.js"), true),
660 (rel_path("one/node_modules/b/b2.js"), true),
661 (rel_path("one/node_modules/c"), true),
662 (rel_path("two"), false),
663 (rel_path("two/x.js"), false),
664 (rel_path("two/y.js"), false),
665 ]
666 );
667
668 assert_eq!(
669 loaded.file.path.as_ref(),
670 rel_path("one/node_modules/b/b1.js")
671 );
672
673 // Only the newly-expanded directories are scanned.
674 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
675 });
676
677 // Open another file in a different subdirectory of the same
678 // gitignored directory.
679 let prev_read_dir_count = fs.read_dir_call_count();
680 let loaded = tree
681 .update(cx, |tree, cx| {
682 tree.load_file(rel_path("one/node_modules/a/a2.js"), cx)
683 })
684 .await
685 .unwrap();
686
687 tree.read_with(cx, |tree, _| {
688 assert_eq!(
689 tree.entries(true, 0)
690 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
691 .collect::<Vec<_>>(),
692 vec![
693 (rel_path(""), false),
694 (rel_path(".gitignore"), false),
695 (rel_path("one"), false),
696 (rel_path("one/node_modules"), true),
697 (rel_path("one/node_modules/a"), true),
698 (rel_path("one/node_modules/a/a1.js"), true),
699 (rel_path("one/node_modules/a/a2.js"), true),
700 (rel_path("one/node_modules/b"), true),
701 (rel_path("one/node_modules/b/b1.js"), true),
702 (rel_path("one/node_modules/b/b2.js"), true),
703 (rel_path("one/node_modules/c"), true),
704 (rel_path("two"), false),
705 (rel_path("two/x.js"), false),
706 (rel_path("two/y.js"), false),
707 ]
708 );
709
710 assert_eq!(
711 loaded.file.path.as_ref(),
712 rel_path("one/node_modules/a/a2.js")
713 );
714
715 // Only the newly-expanded directory is scanned.
716 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
717 });
718
719 let path = PathBuf::from("/root/one/node_modules/c/lib");
720
721 // No work happens when files and directories change within an unloaded directory.
722 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
723 // When we open a directory, we check each ancestor whether it's a git
724 // repository. That means we have an fs.metadata call per ancestor that we
725 // need to subtract here.
726 let ancestors = path.ancestors().count();
727
728 fs.create_dir(path.as_ref()).await.unwrap();
729 cx.executor().run_until_parked();
730
731 assert_eq!(
732 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count - ancestors,
733 0
734 );
735}
736
737#[gpui::test]
738async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
739 init_test(cx);
740 let fs = FakeFs::new(cx.background_executor.clone());
741 fs.insert_tree(
742 "/root",
743 json!({
744 ".gitignore": "node_modules\n",
745 "a": {
746 "a.js": "",
747 },
748 "b": {
749 "b.js": "",
750 },
751 "node_modules": {
752 "c": {
753 "c.js": "",
754 },
755 "d": {
756 "d.js": "",
757 "e": {
758 "e1.js": "",
759 "e2.js": "",
760 },
761 "f": {
762 "f1.js": "",
763 "f2.js": "",
764 }
765 },
766 },
767 }),
768 )
769 .await;
770
771 let tree = Worktree::local(
772 Path::new("/root"),
773 true,
774 fs.clone(),
775 Default::default(),
776 true,
777 WorktreeId::from_proto(0),
778 &mut cx.to_async(),
779 )
780 .await
781 .unwrap();
782
783 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
784 .await;
785
786 // Open a file within the gitignored directory, forcing some of its
787 // subdirectories to be read, but not all.
788 let read_dir_count_1 = fs.read_dir_call_count();
789 tree.read_with(cx, |tree, _| {
790 tree.as_local()
791 .unwrap()
792 .refresh_entries_for_paths(vec![rel_path("node_modules/d/d.js").into()])
793 })
794 .recv()
795 .await;
796
797 // Those subdirectories are now loaded.
798 tree.read_with(cx, |tree, _| {
799 assert_eq!(
800 tree.entries(true, 0)
801 .map(|e| (e.path.as_ref(), e.is_ignored))
802 .collect::<Vec<_>>(),
803 &[
804 (rel_path(""), false),
805 (rel_path(".gitignore"), false),
806 (rel_path("a"), false),
807 (rel_path("a/a.js"), false),
808 (rel_path("b"), false),
809 (rel_path("b/b.js"), false),
810 (rel_path("node_modules"), true),
811 (rel_path("node_modules/c"), true),
812 (rel_path("node_modules/d"), true),
813 (rel_path("node_modules/d/d.js"), true),
814 (rel_path("node_modules/d/e"), true),
815 (rel_path("node_modules/d/f"), true),
816 ]
817 );
818 });
819 let read_dir_count_2 = fs.read_dir_call_count();
820 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
821
822 // Update the gitignore so that node_modules is no longer ignored,
823 // but a subdirectory is ignored
824 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
825 .await
826 .unwrap();
827 cx.executor().run_until_parked();
828
829 // All of the directories that are no longer ignored are now loaded.
830 tree.read_with(cx, |tree, _| {
831 assert_eq!(
832 tree.entries(true, 0)
833 .map(|e| (e.path.as_ref(), e.is_ignored))
834 .collect::<Vec<_>>(),
835 &[
836 (rel_path(""), false),
837 (rel_path(".gitignore"), false),
838 (rel_path("a"), false),
839 (rel_path("a/a.js"), false),
840 (rel_path("b"), false),
841 (rel_path("b/b.js"), false),
842 // This directory is no longer ignored
843 (rel_path("node_modules"), false),
844 (rel_path("node_modules/c"), false),
845 (rel_path("node_modules/c/c.js"), false),
846 (rel_path("node_modules/d"), false),
847 (rel_path("node_modules/d/d.js"), false),
848 // This subdirectory is now ignored
849 (rel_path("node_modules/d/e"), true),
850 (rel_path("node_modules/d/f"), false),
851 (rel_path("node_modules/d/f/f1.js"), false),
852 (rel_path("node_modules/d/f/f2.js"), false),
853 ]
854 );
855 });
856
857 // Each of the newly-loaded directories is scanned only once.
858 let read_dir_count_3 = fs.read_dir_call_count();
859 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
860}
861
862#[gpui::test]
863async fn test_write_file(cx: &mut TestAppContext) {
864 init_test(cx);
865 cx.executor().allow_parking();
866 let dir = TempTree::new(json!({
867 ".git": {},
868 ".gitignore": "ignored-dir\n",
869 "tracked-dir": {},
870 "ignored-dir": {}
871 }));
872
873 let worktree = Worktree::local(
874 dir.path(),
875 true,
876 Arc::new(RealFs::new(None, cx.executor())),
877 Default::default(),
878 true,
879 WorktreeId::from_proto(0),
880 &mut cx.to_async(),
881 )
882 .await
883 .unwrap();
884
885 #[cfg(not(target_os = "macos"))]
886 fs::fs_watcher::global(|_| {}).unwrap();
887
888 cx.read(|cx| worktree.read(cx).as_local().unwrap().scan_complete())
889 .await;
890 worktree.flush_fs_events(cx).await;
891
892 worktree
893 .update(cx, |tree, cx| {
894 tree.write_file(
895 rel_path("tracked-dir/file.txt").into(),
896 "hello".into(),
897 Default::default(),
898 encoding_rs::UTF_8,
899 false,
900 cx,
901 )
902 })
903 .await
904 .unwrap();
905 worktree
906 .update(cx, |tree, cx| {
907 tree.write_file(
908 rel_path("ignored-dir/file.txt").into(),
909 "world".into(),
910 Default::default(),
911 encoding_rs::UTF_8,
912 false,
913 cx,
914 )
915 })
916 .await
917 .unwrap();
918 worktree.read_with(cx, |tree, _| {
919 let tracked = tree
920 .entry_for_path(rel_path("tracked-dir/file.txt"))
921 .unwrap();
922 let ignored = tree
923 .entry_for_path(rel_path("ignored-dir/file.txt"))
924 .unwrap();
925 assert!(!tracked.is_ignored);
926 assert!(ignored.is_ignored);
927 });
928}
929
930#[gpui::test]
931async fn test_file_scan_inclusions(cx: &mut TestAppContext) {
932 init_test(cx);
933 cx.executor().allow_parking();
934 let dir = TempTree::new(json!({
935 ".gitignore": "**/target\n/node_modules\ntop_level.txt\n",
936 "target": {
937 "index": "blah2"
938 },
939 "node_modules": {
940 ".DS_Store": "",
941 "prettier": {
942 "package.json": "{}",
943 },
944 "package.json": "//package.json"
945 },
946 "src": {
947 ".DS_Store": "",
948 "foo": {
949 "foo.rs": "mod another;\n",
950 "another.rs": "// another",
951 },
952 "bar": {
953 "bar.rs": "// bar",
954 },
955 "lib.rs": "mod foo;\nmod bar;\n",
956 },
957 "top_level.txt": "top level file",
958 ".DS_Store": "",
959 }));
960 cx.update(|cx| {
961 cx.update_global::<SettingsStore, _>(|store, cx| {
962 store.update_user_settings(cx, |settings| {
963 settings.project.worktree.file_scan_exclusions = Some(vec![]);
964 settings.project.worktree.file_scan_inclusions = Some(vec![
965 "node_modules/**/package.json".to_string(),
966 "**/.DS_Store".to_string(),
967 ]);
968 });
969 });
970 });
971
972 let tree = Worktree::local(
973 dir.path(),
974 true,
975 Arc::new(RealFs::new(None, cx.executor())),
976 Default::default(),
977 true,
978 WorktreeId::from_proto(0),
979 &mut cx.to_async(),
980 )
981 .await
982 .unwrap();
983 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
984 .await;
985 tree.flush_fs_events(cx).await;
986 tree.read_with(cx, |tree, _| {
987 // Assert that file_scan_inclusions overrides file_scan_exclusions.
988 check_worktree_entries(
989 tree,
990 &[],
991 &["target", "node_modules"],
992 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
993 &[
994 "node_modules/prettier/package.json",
995 ".DS_Store",
996 "node_modules/.DS_Store",
997 "src/.DS_Store",
998 ],
999 )
1000 });
1001}
1002
1003#[gpui::test]
1004async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext) {
1005 init_test(cx);
1006 cx.executor().allow_parking();
1007 let dir = TempTree::new(json!({
1008 ".gitignore": "**/target\n/node_modules\n",
1009 "target": {
1010 "index": "blah2"
1011 },
1012 "node_modules": {
1013 ".DS_Store": "",
1014 "prettier": {
1015 "package.json": "{}",
1016 },
1017 },
1018 "src": {
1019 ".DS_Store": "",
1020 "foo": {
1021 "foo.rs": "mod another;\n",
1022 "another.rs": "// another",
1023 },
1024 },
1025 ".DS_Store": "",
1026 }));
1027
1028 cx.update(|cx| {
1029 cx.update_global::<SettingsStore, _>(|store, cx| {
1030 store.update_user_settings(cx, |settings| {
1031 settings.project.worktree.file_scan_exclusions =
1032 Some(vec!["**/.DS_Store".to_string()]);
1033 settings.project.worktree.file_scan_inclusions =
1034 Some(vec!["**/.DS_Store".to_string()]);
1035 });
1036 });
1037 });
1038
1039 let tree = Worktree::local(
1040 dir.path(),
1041 true,
1042 Arc::new(RealFs::new(None, cx.executor())),
1043 Default::default(),
1044 true,
1045 WorktreeId::from_proto(0),
1046 &mut cx.to_async(),
1047 )
1048 .await
1049 .unwrap();
1050 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1051 .await;
1052 tree.flush_fs_events(cx).await;
1053 tree.read_with(cx, |tree, _| {
1054 // Assert that file_scan_inclusions overrides file_scan_exclusions.
1055 check_worktree_entries(
1056 tree,
1057 &[".DS_Store, src/.DS_Store"],
1058 &["target", "node_modules"],
1059 &["src/foo/another.rs", "src/foo/foo.rs", ".gitignore"],
1060 &[],
1061 )
1062 });
1063}
1064
1065#[gpui::test]
1066async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppContext) {
1067 init_test(cx);
1068 cx.executor().allow_parking();
1069 let dir = TempTree::new(json!({
1070 ".gitignore": "**/target\n/node_modules/\n",
1071 "target": {
1072 "index": "blah2"
1073 },
1074 "node_modules": {
1075 ".DS_Store": "",
1076 "prettier": {
1077 "package.json": "{}",
1078 },
1079 },
1080 "src": {
1081 ".DS_Store": "",
1082 "foo": {
1083 "foo.rs": "mod another;\n",
1084 "another.rs": "// another",
1085 },
1086 },
1087 ".DS_Store": "",
1088 }));
1089
1090 cx.update(|cx| {
1091 cx.update_global::<SettingsStore, _>(|store, cx| {
1092 store.update_user_settings(cx, |settings| {
1093 settings.project.worktree.file_scan_exclusions = Some(vec![]);
1094 settings.project.worktree.file_scan_inclusions =
1095 Some(vec!["node_modules/**".to_string()]);
1096 });
1097 });
1098 });
1099 let tree = Worktree::local(
1100 dir.path(),
1101 true,
1102 Arc::new(RealFs::new(None, cx.executor())),
1103 Default::default(),
1104 true,
1105 WorktreeId::from_proto(0),
1106 &mut cx.to_async(),
1107 )
1108 .await
1109 .unwrap();
1110 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1111 .await;
1112 tree.flush_fs_events(cx).await;
1113
1114 tree.read_with(cx, |tree, _| {
1115 assert!(
1116 tree.entry_for_path(rel_path("node_modules"))
1117 .is_some_and(|f| f.is_always_included)
1118 );
1119 assert!(
1120 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
1121 .is_some_and(|f| f.is_always_included)
1122 );
1123 });
1124
1125 cx.update(|cx| {
1126 cx.update_global::<SettingsStore, _>(|store, cx| {
1127 store.update_user_settings(cx, |settings| {
1128 settings.project.worktree.file_scan_exclusions = Some(vec![]);
1129 settings.project.worktree.file_scan_inclusions = Some(vec![]);
1130 });
1131 });
1132 });
1133 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1134 .await;
1135 tree.flush_fs_events(cx).await;
1136
1137 tree.read_with(cx, |tree, _| {
1138 assert!(
1139 tree.entry_for_path(rel_path("node_modules"))
1140 .is_some_and(|f| !f.is_always_included)
1141 );
1142 assert!(
1143 tree.entry_for_path(rel_path("node_modules/prettier/package.json"))
1144 .is_some_and(|f| !f.is_always_included)
1145 );
1146 });
1147}
1148
1149#[gpui::test]
1150async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
1151 init_test(cx);
1152 cx.executor().allow_parking();
1153 let dir = TempTree::new(json!({
1154 ".gitignore": "**/target\n/node_modules\n",
1155 "target": {
1156 "index": "blah2"
1157 },
1158 "node_modules": {
1159 ".DS_Store": "",
1160 "prettier": {
1161 "package.json": "{}",
1162 },
1163 },
1164 "src": {
1165 ".DS_Store": "",
1166 "foo": {
1167 "foo.rs": "mod another;\n",
1168 "another.rs": "// another",
1169 },
1170 "bar": {
1171 "bar.rs": "// bar",
1172 },
1173 "lib.rs": "mod foo;\nmod bar;\n",
1174 },
1175 ".DS_Store": "",
1176 }));
1177 cx.update(|cx| {
1178 cx.update_global::<SettingsStore, _>(|store, cx| {
1179 store.update_user_settings(cx, |settings| {
1180 settings.project.worktree.file_scan_exclusions =
1181 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
1182 });
1183 });
1184 });
1185
1186 let tree = Worktree::local(
1187 dir.path(),
1188 true,
1189 Arc::new(RealFs::new(None, cx.executor())),
1190 Default::default(),
1191 true,
1192 WorktreeId::from_proto(0),
1193 &mut cx.to_async(),
1194 )
1195 .await
1196 .unwrap();
1197 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1198 .await;
1199 tree.flush_fs_events(cx).await;
1200 tree.read_with(cx, |tree, _| {
1201 check_worktree_entries(
1202 tree,
1203 &[
1204 "src/foo/foo.rs",
1205 "src/foo/another.rs",
1206 "node_modules/.DS_Store",
1207 "src/.DS_Store",
1208 ".DS_Store",
1209 ],
1210 &["target", "node_modules"],
1211 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
1212 &[],
1213 )
1214 });
1215
1216 cx.update(|cx| {
1217 cx.update_global::<SettingsStore, _>(|store, cx| {
1218 store.update_user_settings(cx, |settings| {
1219 settings.project.worktree.file_scan_exclusions =
1220 Some(vec!["**/node_modules/**".to_string()]);
1221 });
1222 });
1223 });
1224 tree.flush_fs_events(cx).await;
1225 cx.executor().run_until_parked();
1226 tree.read_with(cx, |tree, _| {
1227 check_worktree_entries(
1228 tree,
1229 &[
1230 "node_modules/prettier/package.json",
1231 "node_modules/.DS_Store",
1232 "node_modules",
1233 ],
1234 &["target"],
1235 &[
1236 ".gitignore",
1237 "src/lib.rs",
1238 "src/bar/bar.rs",
1239 "src/foo/foo.rs",
1240 "src/foo/another.rs",
1241 "src/.DS_Store",
1242 ".DS_Store",
1243 ],
1244 &[],
1245 )
1246 });
1247}
1248
1249#[gpui::test]
1250async fn test_hidden_files(cx: &mut TestAppContext) {
1251 init_test(cx);
1252 cx.executor().allow_parking();
1253 let dir = TempTree::new(json!({
1254 ".gitignore": "**/target\n",
1255 ".hidden_file": "content",
1256 ".hidden_dir": {
1257 "nested.rs": "code",
1258 },
1259 "src": {
1260 "visible.rs": "code",
1261 },
1262 "logs": {
1263 "app.log": "logs",
1264 "debug.log": "logs",
1265 },
1266 "visible.txt": "content",
1267 }));
1268
1269 let tree = Worktree::local(
1270 dir.path(),
1271 true,
1272 Arc::new(RealFs::new(None, cx.executor())),
1273 Default::default(),
1274 true,
1275 WorktreeId::from_proto(0),
1276 &mut cx.to_async(),
1277 )
1278 .await
1279 .unwrap();
1280 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1281 .await;
1282 tree.flush_fs_events(cx).await;
1283
1284 tree.read_with(cx, |tree, _| {
1285 assert_eq!(
1286 tree.entries(true, 0)
1287 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1288 .collect::<Vec<_>>(),
1289 vec![
1290 (rel_path(""), false),
1291 (rel_path(".gitignore"), true),
1292 (rel_path(".hidden_dir"), true),
1293 (rel_path(".hidden_dir/nested.rs"), true),
1294 (rel_path(".hidden_file"), true),
1295 (rel_path("logs"), false),
1296 (rel_path("logs/app.log"), false),
1297 (rel_path("logs/debug.log"), false),
1298 (rel_path("src"), false),
1299 (rel_path("src/visible.rs"), false),
1300 (rel_path("visible.txt"), false),
1301 ]
1302 );
1303 });
1304
1305 cx.update(|cx| {
1306 cx.update_global::<SettingsStore, _>(|store, cx| {
1307 store.update_user_settings(cx, |settings| {
1308 settings.project.worktree.hidden_files = Some(vec!["**/*.log".to_string()]);
1309 });
1310 });
1311 });
1312 tree.flush_fs_events(cx).await;
1313 cx.executor().run_until_parked();
1314
1315 tree.read_with(cx, |tree, _| {
1316 assert_eq!(
1317 tree.entries(true, 0)
1318 .map(|entry| (entry.path.as_ref(), entry.is_hidden))
1319 .collect::<Vec<_>>(),
1320 vec![
1321 (rel_path(""), false),
1322 (rel_path(".gitignore"), false),
1323 (rel_path(".hidden_dir"), false),
1324 (rel_path(".hidden_dir/nested.rs"), false),
1325 (rel_path(".hidden_file"), false),
1326 (rel_path("logs"), false),
1327 (rel_path("logs/app.log"), true),
1328 (rel_path("logs/debug.log"), true),
1329 (rel_path("src"), false),
1330 (rel_path("src/visible.rs"), false),
1331 (rel_path("visible.txt"), false),
1332 ]
1333 );
1334 });
1335}
1336
1337#[gpui::test]
1338async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
1339 init_test(cx);
1340 cx.executor().allow_parking();
1341 let dir = TempTree::new(json!({
1342 ".git": {
1343 "HEAD": "ref: refs/heads/main\n",
1344 "foo": "bar",
1345 },
1346 ".gitignore": "**/target\n/node_modules\ntest_output\n",
1347 "target": {
1348 "index": "blah2"
1349 },
1350 "node_modules": {
1351 ".DS_Store": "",
1352 "prettier": {
1353 "package.json": "{}",
1354 },
1355 },
1356 "src": {
1357 ".DS_Store": "",
1358 "foo": {
1359 "foo.rs": "mod another;\n",
1360 "another.rs": "// another",
1361 },
1362 "bar": {
1363 "bar.rs": "// bar",
1364 },
1365 "lib.rs": "mod foo;\nmod bar;\n",
1366 },
1367 ".DS_Store": "",
1368 }));
1369 cx.update(|cx| {
1370 cx.update_global::<SettingsStore, _>(|store, cx| {
1371 store.update_user_settings(cx, |settings| {
1372 settings.project.worktree.file_scan_exclusions = Some(vec![
1373 "**/.git".to_string(),
1374 "node_modules/".to_string(),
1375 "build_output".to_string(),
1376 ]);
1377 });
1378 });
1379 });
1380
1381 let tree = Worktree::local(
1382 dir.path(),
1383 true,
1384 Arc::new(RealFs::new(None, cx.executor())),
1385 Default::default(),
1386 true,
1387 WorktreeId::from_proto(0),
1388 &mut cx.to_async(),
1389 )
1390 .await
1391 .unwrap();
1392 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1393 .await;
1394 tree.flush_fs_events(cx).await;
1395 tree.read_with(cx, |tree, _| {
1396 check_worktree_entries(
1397 tree,
1398 &[
1399 ".git/HEAD",
1400 ".git/foo",
1401 "node_modules",
1402 "node_modules/.DS_Store",
1403 "node_modules/prettier",
1404 "node_modules/prettier/package.json",
1405 ],
1406 &["target"],
1407 &[
1408 ".DS_Store",
1409 "src/.DS_Store",
1410 "src/lib.rs",
1411 "src/foo/foo.rs",
1412 "src/foo/another.rs",
1413 "src/bar/bar.rs",
1414 ".gitignore",
1415 ],
1416 &[],
1417 )
1418 });
1419
1420 let new_excluded_dir = dir.path().join("build_output");
1421 let new_ignored_dir = dir.path().join("test_output");
1422 std::fs::create_dir_all(&new_excluded_dir)
1423 .unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
1424 std::fs::create_dir_all(&new_ignored_dir)
1425 .unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
1426 let node_modules_dir = dir.path().join("node_modules");
1427 let dot_git_dir = dir.path().join(".git");
1428 let src_dir = dir.path().join("src");
1429 for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
1430 assert!(
1431 existing_dir.is_dir(),
1432 "Expect {existing_dir:?} to be present in the FS already"
1433 );
1434 }
1435
1436 for directory_for_new_file in [
1437 new_excluded_dir,
1438 new_ignored_dir,
1439 node_modules_dir,
1440 dot_git_dir,
1441 src_dir,
1442 ] {
1443 std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
1444 .unwrap_or_else(|e| {
1445 panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
1446 });
1447 }
1448 tree.flush_fs_events(cx).await;
1449
1450 tree.read_with(cx, |tree, _| {
1451 check_worktree_entries(
1452 tree,
1453 &[
1454 ".git/HEAD",
1455 ".git/foo",
1456 ".git/new_file",
1457 "node_modules",
1458 "node_modules/.DS_Store",
1459 "node_modules/prettier",
1460 "node_modules/prettier/package.json",
1461 "node_modules/new_file",
1462 "build_output",
1463 "build_output/new_file",
1464 "test_output/new_file",
1465 ],
1466 &["target", "test_output"],
1467 &[
1468 ".DS_Store",
1469 "src/.DS_Store",
1470 "src/lib.rs",
1471 "src/foo/foo.rs",
1472 "src/foo/another.rs",
1473 "src/bar/bar.rs",
1474 "src/new_file",
1475 ".gitignore",
1476 ],
1477 &[],
1478 )
1479 });
1480}
1481
1482#[gpui::test]
1483async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
1484 init_test(cx);
1485 cx.executor().allow_parking();
1486 let dir = TempTree::new(json!({
1487 ".git": {
1488 "HEAD": "ref: refs/heads/main\n",
1489 "foo": "foo contents",
1490 },
1491 }));
1492 let dot_git_worktree_dir = dir.path().join(".git");
1493
1494 let tree = Worktree::local(
1495 dot_git_worktree_dir.clone(),
1496 true,
1497 Arc::new(RealFs::new(None, cx.executor())),
1498 Default::default(),
1499 true,
1500 WorktreeId::from_proto(0),
1501 &mut cx.to_async(),
1502 )
1503 .await
1504 .unwrap();
1505 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1506 .await;
1507 tree.flush_fs_events(cx).await;
1508 tree.read_with(cx, |tree, _| {
1509 check_worktree_entries(tree, &[], &["HEAD", "foo"], &[], &[])
1510 });
1511
1512 std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents")
1513 .unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}"));
1514 tree.flush_fs_events(cx).await;
1515 tree.read_with(cx, |tree, _| {
1516 check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[], &[])
1517 });
1518}
1519
1520#[gpui::test(iterations = 30)]
1521async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
1522 init_test(cx);
1523 let fs = FakeFs::new(cx.background_executor.clone());
1524 fs.insert_tree(
1525 "/root",
1526 json!({
1527 "b": {},
1528 "c": {},
1529 "d": {},
1530 }),
1531 )
1532 .await;
1533
1534 let tree = Worktree::local(
1535 "/root".as_ref(),
1536 true,
1537 fs,
1538 Default::default(),
1539 true,
1540 WorktreeId::from_proto(0),
1541 &mut cx.to_async(),
1542 )
1543 .await
1544 .unwrap();
1545
1546 let snapshot1 = tree.update(cx, |tree, cx| {
1547 let tree = tree.as_local_mut().unwrap();
1548 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1549 tree.observe_updates(0, cx, {
1550 let snapshot = snapshot.clone();
1551 let settings = tree.settings();
1552 move |update| {
1553 snapshot
1554 .lock()
1555 .apply_remote_update(update, &settings.file_scan_inclusions);
1556 async { true }
1557 }
1558 });
1559 snapshot
1560 });
1561
1562 let entry = tree
1563 .update(cx, |tree, cx| {
1564 tree.as_local_mut()
1565 .unwrap()
1566 .create_entry(rel_path("a/e").into(), true, None, cx)
1567 })
1568 .await
1569 .unwrap()
1570 .into_included()
1571 .unwrap();
1572 assert!(entry.is_dir());
1573
1574 cx.executor().run_until_parked();
1575 tree.read_with(cx, |tree, _| {
1576 assert_eq!(
1577 tree.entry_for_path(rel_path("a/e")).unwrap().kind,
1578 EntryKind::Dir
1579 );
1580 });
1581
1582 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1583 assert_eq!(
1584 snapshot1.lock().entries(true, 0).collect::<Vec<_>>(),
1585 snapshot2.entries(true, 0).collect::<Vec<_>>()
1586 );
1587}
1588
1589#[gpui::test]
1590async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1591 init_test(cx);
1592 cx.executor().allow_parking();
1593
1594 let fs_fake = FakeFs::new(cx.background_executor.clone());
1595 fs_fake
1596 .insert_tree(
1597 "/root",
1598 json!({
1599 "a": {},
1600 }),
1601 )
1602 .await;
1603
1604 let tree_fake = Worktree::local(
1605 "/root".as_ref(),
1606 true,
1607 fs_fake,
1608 Default::default(),
1609 true,
1610 WorktreeId::from_proto(0),
1611 &mut cx.to_async(),
1612 )
1613 .await
1614 .unwrap();
1615
1616 let entry = tree_fake
1617 .update(cx, |tree, cx| {
1618 tree.as_local_mut().unwrap().create_entry(
1619 rel_path("a/b/c/d.txt").into(),
1620 false,
1621 None,
1622 cx,
1623 )
1624 })
1625 .await
1626 .unwrap()
1627 .into_included()
1628 .unwrap();
1629 assert!(entry.is_file());
1630
1631 cx.executor().run_until_parked();
1632 tree_fake.read_with(cx, |tree, _| {
1633 assert!(
1634 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1635 .unwrap()
1636 .is_file()
1637 );
1638 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1639 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1640 });
1641
1642 let fs_real = Arc::new(RealFs::new(None, cx.executor()));
1643 let temp_root = TempTree::new(json!({
1644 "a": {}
1645 }));
1646
1647 let tree_real = Worktree::local(
1648 temp_root.path(),
1649 true,
1650 fs_real,
1651 Default::default(),
1652 true,
1653 WorktreeId::from_proto(0),
1654 &mut cx.to_async(),
1655 )
1656 .await
1657 .unwrap();
1658
1659 let entry = tree_real
1660 .update(cx, |tree, cx| {
1661 tree.as_local_mut().unwrap().create_entry(
1662 rel_path("a/b/c/d.txt").into(),
1663 false,
1664 None,
1665 cx,
1666 )
1667 })
1668 .await
1669 .unwrap()
1670 .into_included()
1671 .unwrap();
1672 assert!(entry.is_file());
1673
1674 cx.executor().run_until_parked();
1675 tree_real.read_with(cx, |tree, _| {
1676 assert!(
1677 tree.entry_for_path(rel_path("a/b/c/d.txt"))
1678 .unwrap()
1679 .is_file()
1680 );
1681 assert!(tree.entry_for_path(rel_path("a/b/c")).unwrap().is_dir());
1682 assert!(tree.entry_for_path(rel_path("a/b")).unwrap().is_dir());
1683 });
1684
1685 // Test smallest change
1686 let entry = tree_real
1687 .update(cx, |tree, cx| {
1688 tree.as_local_mut().unwrap().create_entry(
1689 rel_path("a/b/c/e.txt").into(),
1690 false,
1691 None,
1692 cx,
1693 )
1694 })
1695 .await
1696 .unwrap()
1697 .into_included()
1698 .unwrap();
1699 assert!(entry.is_file());
1700
1701 cx.executor().run_until_parked();
1702 tree_real.read_with(cx, |tree, _| {
1703 assert!(
1704 tree.entry_for_path(rel_path("a/b/c/e.txt"))
1705 .unwrap()
1706 .is_file()
1707 );
1708 });
1709
1710 // Test largest change
1711 let entry = tree_real
1712 .update(cx, |tree, cx| {
1713 tree.as_local_mut().unwrap().create_entry(
1714 rel_path("d/e/f/g.txt").into(),
1715 false,
1716 None,
1717 cx,
1718 )
1719 })
1720 .await
1721 .unwrap()
1722 .into_included()
1723 .unwrap();
1724 assert!(entry.is_file());
1725
1726 cx.executor().run_until_parked();
1727 tree_real.read_with(cx, |tree, _| {
1728 assert!(
1729 tree.entry_for_path(rel_path("d/e/f/g.txt"))
1730 .unwrap()
1731 .is_file()
1732 );
1733 assert!(tree.entry_for_path(rel_path("d/e/f")).unwrap().is_dir());
1734 assert!(tree.entry_for_path(rel_path("d/e")).unwrap().is_dir());
1735 assert!(tree.entry_for_path(rel_path("d")).unwrap().is_dir());
1736 });
1737}
1738
1739#[gpui::test]
1740async fn test_create_file_in_expanded_gitignored_dir(cx: &mut TestAppContext) {
1741 // Tests the behavior of our worktree refresh when a file in a gitignored directory
1742 // is created.
1743 init_test(cx);
1744 let fs = FakeFs::new(cx.background_executor.clone());
1745 fs.insert_tree(
1746 "/root",
1747 json!({
1748 ".gitignore": "ignored_dir\n",
1749 "ignored_dir": {
1750 "existing_file.txt": "existing content",
1751 "another_file.txt": "another content",
1752 },
1753 }),
1754 )
1755 .await;
1756
1757 let tree = Worktree::local(
1758 Path::new("/root"),
1759 true,
1760 fs.clone(),
1761 Default::default(),
1762 true,
1763 WorktreeId::from_proto(0),
1764 &mut cx.to_async(),
1765 )
1766 .await
1767 .unwrap();
1768
1769 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1770 .await;
1771
1772 tree.read_with(cx, |tree, _| {
1773 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1774 assert!(ignored_dir.is_ignored);
1775 assert_eq!(ignored_dir.kind, EntryKind::UnloadedDir);
1776 });
1777
1778 tree.update(cx, |tree, cx| {
1779 tree.load_file(rel_path("ignored_dir/existing_file.txt"), cx)
1780 })
1781 .await
1782 .unwrap();
1783
1784 tree.read_with(cx, |tree, _| {
1785 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1786 assert!(ignored_dir.is_ignored);
1787 assert_eq!(ignored_dir.kind, EntryKind::Dir);
1788
1789 assert!(
1790 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
1791 .is_some()
1792 );
1793 assert!(
1794 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
1795 .is_some()
1796 );
1797 });
1798
1799 let entry = tree
1800 .update(cx, |tree, cx| {
1801 tree.create_entry(rel_path("ignored_dir/new_file.txt").into(), false, None, cx)
1802 })
1803 .await
1804 .unwrap();
1805 assert!(entry.into_included().is_some());
1806
1807 cx.executor().run_until_parked();
1808
1809 tree.read_with(cx, |tree, _| {
1810 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1811 assert!(ignored_dir.is_ignored);
1812 assert_eq!(
1813 ignored_dir.kind,
1814 EntryKind::Dir,
1815 "ignored_dir should still be loaded, not UnloadedDir"
1816 );
1817
1818 assert!(
1819 tree.entry_for_path(rel_path("ignored_dir/existing_file.txt"))
1820 .is_some(),
1821 "existing_file.txt should still be visible"
1822 );
1823 assert!(
1824 tree.entry_for_path(rel_path("ignored_dir/another_file.txt"))
1825 .is_some(),
1826 "another_file.txt should still be visible"
1827 );
1828 assert!(
1829 tree.entry_for_path(rel_path("ignored_dir/new_file.txt"))
1830 .is_some(),
1831 "new_file.txt should be visible"
1832 );
1833 });
1834}
1835
1836#[gpui::test]
1837async fn test_fs_event_for_gitignored_dir_does_not_lose_contents(cx: &mut TestAppContext) {
1838 // Tests the behavior of our worktree refresh when a directory modification for a gitignored directory
1839 // is triggered.
1840 init_test(cx);
1841 let fs = FakeFs::new(cx.background_executor.clone());
1842 fs.insert_tree(
1843 "/root",
1844 json!({
1845 ".gitignore": "ignored_dir\n",
1846 "ignored_dir": {
1847 "file1.txt": "content1",
1848 "file2.txt": "content2",
1849 },
1850 }),
1851 )
1852 .await;
1853
1854 let tree = Worktree::local(
1855 Path::new("/root"),
1856 true,
1857 fs.clone(),
1858 Default::default(),
1859 true,
1860 WorktreeId::from_proto(0),
1861 &mut cx.to_async(),
1862 )
1863 .await
1864 .unwrap();
1865
1866 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1867 .await;
1868
1869 // Load a file to expand the ignored directory
1870 tree.update(cx, |tree, cx| {
1871 tree.load_file(rel_path("ignored_dir/file1.txt"), cx)
1872 })
1873 .await
1874 .unwrap();
1875
1876 tree.read_with(cx, |tree, _| {
1877 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1878 assert_eq!(ignored_dir.kind, EntryKind::Dir);
1879 assert!(
1880 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
1881 .is_some()
1882 );
1883 assert!(
1884 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
1885 .is_some()
1886 );
1887 });
1888
1889 fs.emit_fs_event("/root/ignored_dir", Some(fs::PathEventKind::Changed));
1890 tree.flush_fs_events(cx).await;
1891
1892 tree.read_with(cx, |tree, _| {
1893 let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap();
1894 assert_eq!(
1895 ignored_dir.kind,
1896 EntryKind::Dir,
1897 "ignored_dir should still be loaded (Dir), not UnloadedDir"
1898 );
1899 assert!(
1900 tree.entry_for_path(rel_path("ignored_dir/file1.txt"))
1901 .is_some(),
1902 "file1.txt should still be visible after directory fs event"
1903 );
1904 assert!(
1905 tree.entry_for_path(rel_path("ignored_dir/file2.txt"))
1906 .is_some(),
1907 "file2.txt should still be visible after directory fs event"
1908 );
1909 });
1910}
1911
1912#[gpui::test(iterations = 100)]
1913async fn test_random_worktree_operations_during_initial_scan(
1914 cx: &mut TestAppContext,
1915 mut rng: StdRng,
1916) {
1917 init_test(cx);
1918 let operations = env::var("OPERATIONS")
1919 .map(|o| o.parse().unwrap())
1920 .unwrap_or(5);
1921 let initial_entries = env::var("INITIAL_ENTRIES")
1922 .map(|o| o.parse().unwrap())
1923 .unwrap_or(20);
1924
1925 let root_dir = Path::new(path!("/test"));
1926 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1927 fs.as_fake().insert_tree(root_dir, json!({})).await;
1928 for _ in 0..initial_entries {
1929 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1930 }
1931 log::info!("generated initial tree");
1932
1933 let worktree = Worktree::local(
1934 root_dir,
1935 true,
1936 fs.clone(),
1937 Default::default(),
1938 true,
1939 WorktreeId::from_proto(0),
1940 &mut cx.to_async(),
1941 )
1942 .await
1943 .unwrap();
1944
1945 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1946 let updates = Arc::new(Mutex::new(Vec::new()));
1947 worktree.update(cx, |tree, cx| {
1948 check_worktree_change_events(tree, cx);
1949
1950 tree.as_local_mut().unwrap().observe_updates(0, cx, {
1951 let updates = updates.clone();
1952 move |update| {
1953 updates.lock().push(update);
1954 async { true }
1955 }
1956 });
1957 });
1958
1959 for _ in 0..operations {
1960 worktree
1961 .update(cx, |worktree, cx| {
1962 randomly_mutate_worktree(worktree, &mut rng, cx)
1963 })
1964 .await
1965 .log_err();
1966 worktree.read_with(cx, |tree, _| {
1967 tree.as_local().unwrap().snapshot().check_invariants(true)
1968 });
1969
1970 if rng.random_bool(0.6) {
1971 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1972 }
1973 }
1974
1975 worktree
1976 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1977 .await;
1978
1979 cx.executor().run_until_parked();
1980
1981 let final_snapshot = worktree.read_with(cx, |tree, _| {
1982 let tree = tree.as_local().unwrap();
1983 let snapshot = tree.snapshot();
1984 snapshot.check_invariants(true);
1985 snapshot
1986 });
1987
1988 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
1989
1990 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1991 let mut updated_snapshot = snapshot.clone();
1992 for update in updates.lock().iter() {
1993 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1994 updated_snapshot
1995 .apply_remote_update(update.clone(), &settings.file_scan_inclusions);
1996 }
1997 }
1998
1999 assert_eq!(
2000 updated_snapshot.entries(true, 0).collect::<Vec<_>>(),
2001 final_snapshot.entries(true, 0).collect::<Vec<_>>(),
2002 "wrong updates after snapshot {i}: {updates:#?}",
2003 );
2004 }
2005}
2006
2007#[gpui::test(iterations = 100)]
2008async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
2009 init_test(cx);
2010 let operations = env::var("OPERATIONS")
2011 .map(|o| o.parse().unwrap())
2012 .unwrap_or(40);
2013 let initial_entries = env::var("INITIAL_ENTRIES")
2014 .map(|o| o.parse().unwrap())
2015 .unwrap_or(20);
2016
2017 let root_dir = Path::new(path!("/test"));
2018 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
2019 fs.as_fake().insert_tree(root_dir, json!({})).await;
2020 for _ in 0..initial_entries {
2021 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
2022 }
2023 log::info!("generated initial tree");
2024
2025 let worktree = Worktree::local(
2026 root_dir,
2027 true,
2028 fs.clone(),
2029 Default::default(),
2030 true,
2031 WorktreeId::from_proto(0),
2032 &mut cx.to_async(),
2033 )
2034 .await
2035 .unwrap();
2036
2037 let updates = Arc::new(Mutex::new(Vec::new()));
2038 worktree.update(cx, |tree, cx| {
2039 check_worktree_change_events(tree, cx);
2040
2041 tree.as_local_mut().unwrap().observe_updates(0, cx, {
2042 let updates = updates.clone();
2043 move |update| {
2044 updates.lock().push(update);
2045 async { true }
2046 }
2047 });
2048 });
2049
2050 worktree
2051 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
2052 .await;
2053
2054 fs.as_fake().pause_events();
2055 let mut snapshots = Vec::new();
2056 let mut mutations_len = operations;
2057 while mutations_len > 1 {
2058 if rng.random_bool(0.2) {
2059 worktree
2060 .update(cx, |worktree, cx| {
2061 randomly_mutate_worktree(worktree, &mut rng, cx)
2062 })
2063 .await
2064 .log_err();
2065 } else {
2066 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
2067 }
2068
2069 let buffered_event_count = fs.as_fake().buffered_event_count();
2070 if buffered_event_count > 0 && rng.random_bool(0.3) {
2071 let len = rng.random_range(0..=buffered_event_count);
2072 log::info!("flushing {} events", len);
2073 fs.as_fake().flush_events(len);
2074 } else {
2075 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
2076 mutations_len -= 1;
2077 }
2078
2079 cx.executor().run_until_parked();
2080 if rng.random_bool(0.2) {
2081 log::info!("storing snapshot {}", snapshots.len());
2082 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2083 snapshots.push(snapshot);
2084 }
2085 }
2086
2087 log::info!("quiescing");
2088 fs.as_fake().flush_events(usize::MAX);
2089 cx.executor().run_until_parked();
2090
2091 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2092 snapshot.check_invariants(true);
2093 let expanded_paths = snapshot
2094 .expanded_entries()
2095 .map(|e| e.path.clone())
2096 .collect::<Vec<_>>();
2097
2098 {
2099 let new_worktree = Worktree::local(
2100 root_dir,
2101 true,
2102 fs.clone(),
2103 Default::default(),
2104 true,
2105 WorktreeId::from_proto(0),
2106 &mut cx.to_async(),
2107 )
2108 .await
2109 .unwrap();
2110 new_worktree
2111 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
2112 .await;
2113 new_worktree
2114 .update(cx, |tree, _| {
2115 tree.as_local_mut()
2116 .unwrap()
2117 .refresh_entries_for_paths(expanded_paths)
2118 })
2119 .recv()
2120 .await;
2121 let new_snapshot =
2122 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2123 assert_eq!(
2124 snapshot.entries_without_ids(true),
2125 new_snapshot.entries_without_ids(true)
2126 );
2127 }
2128
2129 let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
2130
2131 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
2132 for update in updates.lock().iter() {
2133 if update.scan_id >= prev_snapshot.scan_id() as u64 {
2134 prev_snapshot.apply_remote_update(update.clone(), &settings.file_scan_inclusions);
2135 }
2136 }
2137
2138 assert_eq!(
2139 prev_snapshot
2140 .entries(true, 0)
2141 .map(ignore_pending_dir)
2142 .collect::<Vec<_>>(),
2143 snapshot
2144 .entries(true, 0)
2145 .map(ignore_pending_dir)
2146 .collect::<Vec<_>>(),
2147 "wrong updates after snapshot {i}: {updates:#?}",
2148 );
2149 }
2150
2151 fn ignore_pending_dir(entry: &Entry) -> Entry {
2152 let mut entry = entry.clone();
2153 if entry.kind.is_dir() {
2154 entry.kind = EntryKind::Dir
2155 }
2156 entry
2157 }
2158}
2159
2160// The worktree's `UpdatedEntries` event can be used to follow along with
2161// all changes to the worktree's snapshot.
2162fn check_worktree_change_events(tree: &mut Worktree, cx: &mut Context<Worktree>) {
2163 let mut entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
2164 cx.subscribe(&cx.entity(), move |tree, _, event, _| {
2165 if let Event::UpdatedEntries(changes) = event {
2166 for (path, _, change_type) in changes.iter() {
2167 let entry = tree.entry_for_path(path).cloned();
2168 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
2169 Ok(ix) | Err(ix) => ix,
2170 };
2171 match change_type {
2172 PathChange::Added => entries.insert(ix, entry.unwrap()),
2173 PathChange::Removed => drop(entries.remove(ix)),
2174 PathChange::Updated => {
2175 let entry = entry.unwrap();
2176 let existing_entry = entries.get_mut(ix).unwrap();
2177 assert_eq!(existing_entry.path, entry.path);
2178 *existing_entry = entry;
2179 }
2180 PathChange::AddedOrUpdated | PathChange::Loaded => {
2181 let entry = entry.unwrap();
2182 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
2183 *entries.get_mut(ix).unwrap() = entry;
2184 } else {
2185 entries.insert(ix, entry);
2186 }
2187 }
2188 }
2189 }
2190
2191 let new_entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
2192 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
2193 }
2194 })
2195 .detach();
2196}
2197
2198fn randomly_mutate_worktree(
2199 worktree: &mut Worktree,
2200 rng: &mut impl Rng,
2201 cx: &mut Context<Worktree>,
2202) -> Task<Result<()>> {
2203 log::info!("mutating worktree");
2204 let worktree = worktree.as_local_mut().unwrap();
2205 let snapshot = worktree.snapshot();
2206 let entry = snapshot.entries(false, 0).choose(rng).unwrap();
2207
2208 match rng.random_range(0_u32..100) {
2209 0..=33 if entry.path.as_ref() != RelPath::empty() => {
2210 log::info!("deleting entry {:?} ({})", entry.path, entry.id.to_usize());
2211 let task = worktree
2212 .delete_entry(entry.id, false, cx)
2213 .unwrap_or_else(|| Task::ready(Ok(None)));
2214
2215 cx.background_spawn(async move {
2216 task.await?;
2217 Ok(())
2218 })
2219 }
2220 _ => {
2221 if entry.is_dir() {
2222 let child_path = entry.path.join(rel_path(&random_filename(rng)));
2223 let is_dir = rng.random_bool(0.3);
2224 log::info!(
2225 "creating {} at {:?}",
2226 if is_dir { "dir" } else { "file" },
2227 child_path,
2228 );
2229 let task = worktree.create_entry(child_path, is_dir, None, cx);
2230 cx.background_spawn(async move {
2231 task.await?;
2232 Ok(())
2233 })
2234 } else {
2235 log::info!(
2236 "overwriting file {:?} ({})",
2237 &entry.path,
2238 entry.id.to_usize()
2239 );
2240 let task = worktree.write_file(
2241 entry.path.clone(),
2242 "".into(),
2243 Default::default(),
2244 encoding_rs::UTF_8,
2245 false,
2246 cx,
2247 );
2248 cx.background_spawn(async move {
2249 task.await?;
2250 Ok(())
2251 })
2252 }
2253 }
2254 }
2255}
2256
2257async fn randomly_mutate_fs(
2258 fs: &Arc<dyn Fs>,
2259 root_path: &Path,
2260 insertion_probability: f64,
2261 rng: &mut impl Rng,
2262) {
2263 log::info!("mutating fs");
2264 let mut files = Vec::new();
2265 let mut dirs = Vec::new();
2266 for path in fs.as_fake().paths(false) {
2267 if path.starts_with(root_path) {
2268 if fs.is_file(&path).await {
2269 files.push(path);
2270 } else {
2271 dirs.push(path);
2272 }
2273 }
2274 }
2275
2276 if (files.is_empty() && dirs.len() == 1) || rng.random_bool(insertion_probability) {
2277 let path = dirs.choose(rng).unwrap();
2278 let new_path = path.join(random_filename(rng));
2279
2280 if rng.random() {
2281 log::info!(
2282 "creating dir {:?}",
2283 new_path.strip_prefix(root_path).unwrap()
2284 );
2285 fs.create_dir(&new_path).await.unwrap();
2286 } else {
2287 log::info!(
2288 "creating file {:?}",
2289 new_path.strip_prefix(root_path).unwrap()
2290 );
2291 fs.create_file(&new_path, Default::default()).await.unwrap();
2292 }
2293 } else if rng.random_bool(0.05) {
2294 let ignore_dir_path = dirs.choose(rng).unwrap();
2295 let ignore_path = ignore_dir_path.join(GITIGNORE);
2296
2297 let subdirs = dirs
2298 .iter()
2299 .filter(|d| d.starts_with(ignore_dir_path))
2300 .cloned()
2301 .collect::<Vec<_>>();
2302 let subfiles = files
2303 .iter()
2304 .filter(|d| d.starts_with(ignore_dir_path))
2305 .cloned()
2306 .collect::<Vec<_>>();
2307 let files_to_ignore = {
2308 let len = rng.random_range(0..=subfiles.len());
2309 subfiles.choose_multiple(rng, len)
2310 };
2311 let dirs_to_ignore = {
2312 let len = rng.random_range(0..subdirs.len());
2313 subdirs.choose_multiple(rng, len)
2314 };
2315
2316 let mut ignore_contents = String::new();
2317 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
2318 writeln!(
2319 ignore_contents,
2320 "{}",
2321 path_to_ignore
2322 .strip_prefix(ignore_dir_path)
2323 .unwrap()
2324 .to_str()
2325 .unwrap()
2326 )
2327 .unwrap();
2328 }
2329 log::info!(
2330 "creating gitignore {:?} with contents:\n{}",
2331 ignore_path.strip_prefix(root_path).unwrap(),
2332 ignore_contents
2333 );
2334 fs.save(
2335 &ignore_path,
2336 &ignore_contents.as_str().into(),
2337 Default::default(),
2338 )
2339 .await
2340 .unwrap();
2341 } else {
2342 let old_path = {
2343 let file_path = files.choose(rng);
2344 let dir_path = dirs[1..].choose(rng);
2345 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
2346 };
2347
2348 let is_rename = rng.random();
2349 if is_rename {
2350 let new_path_parent = dirs
2351 .iter()
2352 .filter(|d| !d.starts_with(old_path))
2353 .choose(rng)
2354 .unwrap();
2355
2356 let overwrite_existing_dir =
2357 !old_path.starts_with(new_path_parent) && rng.random_bool(0.3);
2358 let new_path = if overwrite_existing_dir {
2359 fs.remove_dir(
2360 new_path_parent,
2361 RemoveOptions {
2362 recursive: true,
2363 ignore_if_not_exists: true,
2364 },
2365 )
2366 .await
2367 .unwrap();
2368 new_path_parent.to_path_buf()
2369 } else {
2370 new_path_parent.join(random_filename(rng))
2371 };
2372
2373 log::info!(
2374 "renaming {:?} to {}{:?}",
2375 old_path.strip_prefix(root_path).unwrap(),
2376 if overwrite_existing_dir {
2377 "overwrite "
2378 } else {
2379 ""
2380 },
2381 new_path.strip_prefix(root_path).unwrap()
2382 );
2383 fs.rename(
2384 old_path,
2385 &new_path,
2386 fs::RenameOptions {
2387 overwrite: true,
2388 ignore_if_exists: true,
2389 create_parents: false,
2390 },
2391 )
2392 .await
2393 .unwrap();
2394 } else if fs.is_file(old_path).await {
2395 log::info!(
2396 "deleting file {:?}",
2397 old_path.strip_prefix(root_path).unwrap()
2398 );
2399 fs.remove_file(old_path, Default::default()).await.unwrap();
2400 } else {
2401 log::info!(
2402 "deleting dir {:?}",
2403 old_path.strip_prefix(root_path).unwrap()
2404 );
2405 fs.remove_dir(
2406 old_path,
2407 RemoveOptions {
2408 recursive: true,
2409 ignore_if_not_exists: true,
2410 },
2411 )
2412 .await
2413 .unwrap();
2414 }
2415 }
2416}
2417
2418fn random_filename(rng: &mut impl Rng) -> String {
2419 (0..6)
2420 .map(|_| rng.sample(rand::distr::Alphanumeric))
2421 .map(char::from)
2422 .collect()
2423}
2424
2425#[gpui::test]
2426async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
2427 init_test(cx);
2428 let fs = FakeFs::new(cx.background_executor.clone());
2429 fs.insert_tree("/", json!({".env": "PRIVATE=secret\n"}))
2430 .await;
2431 let tree = Worktree::local(
2432 Path::new("/.env"),
2433 true,
2434 fs.clone(),
2435 Default::default(),
2436 true,
2437 WorktreeId::from_proto(0),
2438 &mut cx.to_async(),
2439 )
2440 .await
2441 .unwrap();
2442 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2443 .await;
2444 tree.read_with(cx, |tree, _| {
2445 let entry = tree.entry_for_path(rel_path("")).unwrap();
2446 assert!(entry.is_private);
2447 });
2448}
2449
2450#[gpui::test]
2451async fn test_repository_above_root(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2452 init_test(cx);
2453
2454 let fs = FakeFs::new(executor);
2455 fs.insert_tree(
2456 path!("/root"),
2457 json!({
2458 ".git": {},
2459 "subproject": {
2460 "a.txt": "A"
2461 }
2462 }),
2463 )
2464 .await;
2465 let worktree = Worktree::local(
2466 path!("/root/subproject").as_ref(),
2467 true,
2468 fs.clone(),
2469 Arc::default(),
2470 true,
2471 WorktreeId::from_proto(0),
2472 &mut cx.to_async(),
2473 )
2474 .await
2475 .unwrap();
2476 worktree
2477 .update(cx, |worktree, _| {
2478 worktree.as_local().unwrap().scan_complete()
2479 })
2480 .await;
2481 cx.run_until_parked();
2482 let repos = worktree.update(cx, |worktree, _| {
2483 worktree.as_local().unwrap().repositories()
2484 });
2485 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2486
2487 fs.touch_path(path!("/root/subproject")).await;
2488 worktree
2489 .update(cx, |worktree, _| {
2490 worktree.as_local().unwrap().scan_complete()
2491 })
2492 .await;
2493 cx.run_until_parked();
2494
2495 let repos = worktree.update(cx, |worktree, _| {
2496 worktree.as_local().unwrap().repositories()
2497 });
2498 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root")).into()]);
2499}
2500
2501#[gpui::test]
2502async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2503 init_test(cx);
2504
2505 let home = paths::home_dir();
2506 let fs = FakeFs::new(executor);
2507 fs.insert_tree(
2508 home,
2509 json!({
2510 ".config": {
2511 "git": {
2512 "ignore": "foo\n/bar\nbaz\n"
2513 }
2514 },
2515 "project": {
2516 ".git": {},
2517 ".gitignore": "!baz",
2518 "foo": "",
2519 "bar": "",
2520 "sub": {
2521 "bar": "",
2522 },
2523 "subrepo": {
2524 ".git": {},
2525 "bar": ""
2526 },
2527 "baz": ""
2528 }
2529 }),
2530 )
2531 .await;
2532 let worktree = Worktree::local(
2533 home.join("project"),
2534 true,
2535 fs.clone(),
2536 Arc::default(),
2537 true,
2538 WorktreeId::from_proto(0),
2539 &mut cx.to_async(),
2540 )
2541 .await
2542 .unwrap();
2543 worktree
2544 .update(cx, |worktree, _| {
2545 worktree.as_local().unwrap().scan_complete()
2546 })
2547 .await;
2548 cx.run_until_parked();
2549
2550 // .gitignore overrides excludesFile, and anchored paths in excludesFile are resolved
2551 // relative to the nearest containing repository
2552 worktree.update(cx, |worktree, _cx| {
2553 check_worktree_entries(
2554 worktree,
2555 &[],
2556 &["foo", "bar", "subrepo/bar"],
2557 &["sub/bar", "baz"],
2558 &[],
2559 );
2560 });
2561
2562 // Ignore statuses are updated when excludesFile changes
2563 fs.write(
2564 &home.join(".config").join("git").join("ignore"),
2565 "/bar\nbaz\n".as_bytes(),
2566 )
2567 .await
2568 .unwrap();
2569 worktree
2570 .update(cx, |worktree, _| {
2571 worktree.as_local().unwrap().scan_complete()
2572 })
2573 .await;
2574 cx.run_until_parked();
2575
2576 worktree.update(cx, |worktree, _cx| {
2577 check_worktree_entries(
2578 worktree,
2579 &[],
2580 &["bar", "subrepo/bar"],
2581 &["foo", "sub/bar", "baz"],
2582 &[],
2583 );
2584 });
2585
2586 // Statuses are updated when .git added/removed
2587 fs.remove_dir(
2588 &home.join("project").join("subrepo").join(".git"),
2589 RemoveOptions {
2590 recursive: true,
2591 ..Default::default()
2592 },
2593 )
2594 .await
2595 .unwrap();
2596 worktree
2597 .update(cx, |worktree, _| {
2598 worktree.as_local().unwrap().scan_complete()
2599 })
2600 .await;
2601 cx.run_until_parked();
2602
2603 worktree.update(cx, |worktree, _cx| {
2604 check_worktree_entries(
2605 worktree,
2606 &[],
2607 &["bar"],
2608 &["foo", "sub/bar", "baz", "subrepo/bar"],
2609 &[],
2610 );
2611 });
2612}
2613
2614#[gpui::test]
2615async fn test_repo_exclude(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2616 init_test(cx);
2617
2618 let fs = FakeFs::new(executor);
2619 let project_dir = Path::new(path!("/project"));
2620 fs.insert_tree(
2621 project_dir,
2622 json!({
2623 ".git": {
2624 "info": {
2625 "exclude": ".env.*"
2626 }
2627 },
2628 ".env.example": "secret=xxxx",
2629 ".env.local": "secret=1234",
2630 ".gitignore": "!.env.example",
2631 "README.md": "# Repo Exclude",
2632 "src": {
2633 "main.rs": "fn main() {}",
2634 },
2635 }),
2636 )
2637 .await;
2638
2639 let worktree = Worktree::local(
2640 project_dir,
2641 true,
2642 fs.clone(),
2643 Default::default(),
2644 true,
2645 WorktreeId::from_proto(0),
2646 &mut cx.to_async(),
2647 )
2648 .await
2649 .unwrap();
2650 worktree
2651 .update(cx, |worktree, _| {
2652 worktree.as_local().unwrap().scan_complete()
2653 })
2654 .await;
2655 cx.run_until_parked();
2656
2657 // .gitignore overrides .git/info/exclude
2658 worktree.update(cx, |worktree, _cx| {
2659 let expected_excluded_paths = [];
2660 let expected_ignored_paths = [".env.local"];
2661 let expected_tracked_paths = [".env.example", "README.md", "src/main.rs"];
2662 let expected_included_paths = [];
2663
2664 check_worktree_entries(
2665 worktree,
2666 &expected_excluded_paths,
2667 &expected_ignored_paths,
2668 &expected_tracked_paths,
2669 &expected_included_paths,
2670 );
2671 });
2672
2673 // Ignore statuses are updated when .git/info/exclude file changes
2674 fs.write(
2675 &project_dir.join(DOT_GIT).join(REPO_EXCLUDE),
2676 ".env.example".as_bytes(),
2677 )
2678 .await
2679 .unwrap();
2680 worktree
2681 .update(cx, |worktree, _| {
2682 worktree.as_local().unwrap().scan_complete()
2683 })
2684 .await;
2685 cx.run_until_parked();
2686
2687 worktree.update(cx, |worktree, _cx| {
2688 let expected_excluded_paths = [];
2689 let expected_ignored_paths = [];
2690 let expected_tracked_paths = [".env.example", ".env.local", "README.md", "src/main.rs"];
2691 let expected_included_paths = [];
2692
2693 check_worktree_entries(
2694 worktree,
2695 &expected_excluded_paths,
2696 &expected_ignored_paths,
2697 &expected_tracked_paths,
2698 &expected_included_paths,
2699 );
2700 });
2701}
2702
2703#[track_caller]
2704fn check_worktree_entries(
2705 tree: &Worktree,
2706 expected_excluded_paths: &[&str],
2707 expected_ignored_paths: &[&str],
2708 expected_tracked_paths: &[&str],
2709 expected_included_paths: &[&str],
2710) {
2711 for path in expected_excluded_paths {
2712 let entry = tree.entry_for_path(rel_path(path));
2713 assert!(
2714 entry.is_none(),
2715 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2716 );
2717 }
2718 for path in expected_ignored_paths {
2719 let entry = tree
2720 .entry_for_path(rel_path(path))
2721 .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
2722 assert!(
2723 entry.is_ignored,
2724 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2725 );
2726 }
2727 for path in expected_tracked_paths {
2728 let entry = tree
2729 .entry_for_path(rel_path(path))
2730 .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
2731 assert!(
2732 !entry.is_ignored || entry.is_always_included,
2733 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2734 );
2735 }
2736 for path in expected_included_paths {
2737 let entry = tree
2738 .entry_for_path(rel_path(path))
2739 .unwrap_or_else(|| panic!("Missing entry for expected included path '{path}'"));
2740 assert!(
2741 entry.is_always_included,
2742 "expected path '{path}' to always be included, but got entry: {entry:?}",
2743 );
2744 }
2745}
2746
2747#[gpui::test]
2748async fn test_root_repo_common_dir(executor: BackgroundExecutor, cx: &mut TestAppContext) {
2749 init_test(cx);
2750
2751 use git::repository::Worktree as GitWorktree;
2752
2753 let fs = FakeFs::new(executor);
2754
2755 // Set up a main repo and a linked worktree pointing back to it.
2756 fs.insert_tree(
2757 path!("/main_repo"),
2758 json!({
2759 ".git": {},
2760 "file.txt": "content",
2761 }),
2762 )
2763 .await;
2764 fs.add_linked_worktree_for_repo(
2765 Path::new(path!("/main_repo/.git")),
2766 false,
2767 GitWorktree {
2768 path: PathBuf::from(path!("/linked_worktree")),
2769 ref_name: Some("refs/heads/feature".into()),
2770 sha: "abc123".into(),
2771 is_main: false,
2772 },
2773 )
2774 .await;
2775 fs.write(
2776 path!("/linked_worktree/file.txt").as_ref(),
2777 "content".as_bytes(),
2778 )
2779 .await
2780 .unwrap();
2781
2782 let tree = Worktree::local(
2783 path!("/linked_worktree").as_ref(),
2784 true,
2785 fs.clone(),
2786 Arc::default(),
2787 true,
2788 WorktreeId::from_proto(0),
2789 &mut cx.to_async(),
2790 )
2791 .await
2792 .unwrap();
2793 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
2794 .await;
2795 cx.run_until_parked();
2796
2797 // For a linked worktree, root_repo_common_dir should point to the
2798 // main repo's .git, not the worktree-specific git directory.
2799 tree.read_with(cx, |tree, _| {
2800 assert_eq!(
2801 tree.snapshot().root_repo_common_dir().map(|p| p.as_ref()),
2802 Some(Path::new(path!("/main_repo/.git"))),
2803 );
2804 });
2805
2806 let event_count: Rc<Cell<usize>> = Rc::new(Cell::new(0));
2807 tree.update(cx, {
2808 let event_count = event_count.clone();
2809 |_, cx| {
2810 cx.subscribe(&cx.entity(), move |_, _, event, _| {
2811 if matches!(event, Event::UpdatedRootRepoCommonDir { .. }) {
2812 event_count.set(event_count.get() + 1);
2813 }
2814 })
2815 .detach();
2816 }
2817 });
2818
2819 // Remove .git — root_repo_common_dir should become None.
2820 fs.remove_file(
2821 &PathBuf::from(path!("/linked_worktree/.git")),
2822 Default::default(),
2823 )
2824 .await
2825 .unwrap();
2826 tree.flush_fs_events(cx).await;
2827
2828 tree.read_with(cx, |tree, _| {
2829 assert_eq!(tree.snapshot().root_repo_common_dir(), None);
2830 });
2831 assert_eq!(
2832 event_count.get(),
2833 1,
2834 "should have emitted UpdatedRootRepoCommonDir on removal"
2835 );
2836}
2837
2838#[gpui::test]
2839async fn test_linked_worktree_git_file_event_does_not_panic(
2840 executor: BackgroundExecutor,
2841 cx: &mut TestAppContext,
2842) {
2843 // Regression test: in a linked worktree, `.git` is a file (containing
2844 // "gitdir: ..."), not a directory. When the background scanner receives
2845 // a filesystem event for a path inside the main repo's `.git` directory
2846 // (which it watches via the commondir), the ancestor-walking code in
2847 // `process_events` calls `is_git_dir` on each ancestor. If `is_git_dir`
2848 // treats `.git` files the same as `.git` directories, it incorrectly
2849 // identifies the gitfile as a git dir, adds it to `dot_git_abs_paths`,
2850 // and `update_git_repositories` panics because the path is outside the
2851 // worktree root.
2852 init_test(cx);
2853
2854 use git::repository::Worktree as GitWorktree;
2855
2856 let fs = FakeFs::new(executor);
2857
2858 fs.insert_tree(
2859 path!("/main_repo"),
2860 json!({
2861 ".git": {},
2862 "file.txt": "content",
2863 }),
2864 )
2865 .await;
2866 fs.add_linked_worktree_for_repo(
2867 Path::new(path!("/main_repo/.git")),
2868 false,
2869 GitWorktree {
2870 path: PathBuf::from(path!("/linked_worktree")),
2871 ref_name: Some("refs/heads/feature".into()),
2872 sha: "abc123".into(),
2873 is_main: false,
2874 },
2875 )
2876 .await;
2877 fs.write(
2878 path!("/linked_worktree/file.txt").as_ref(),
2879 "content".as_bytes(),
2880 )
2881 .await
2882 .unwrap();
2883
2884 let tree = Worktree::local(
2885 path!("/linked_worktree").as_ref(),
2886 true,
2887 fs.clone(),
2888 Arc::default(),
2889 true,
2890 WorktreeId::from_proto(0),
2891 &mut cx.to_async(),
2892 )
2893 .await
2894 .unwrap();
2895 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
2896 .await;
2897 cx.run_until_parked();
2898
2899 // Trigger a filesystem event inside the main repo's .git directory
2900 // (which the linked worktree scanner watches via the commondir). This
2901 // uses the sentinel-file helper to ensure the event goes through the
2902 // real watcher path, exactly as it would in production.
2903 tree.flush_fs_events_in_root_git_repository(cx).await;
2904
2905 // The worktree should still be intact.
2906 tree.read_with(cx, |tree, _| {
2907 assert_eq!(
2908 tree.snapshot().root_repo_common_dir().map(|p| p.as_ref()),
2909 Some(Path::new(path!("/main_repo/.git"))),
2910 );
2911 });
2912}
2913
2914fn init_test(cx: &mut gpui::TestAppContext) {
2915 zlog::init_test();
2916
2917 cx.update(|cx| {
2918 let settings_store = SettingsStore::test(cx);
2919 cx.set_global(settings_store);
2920 });
2921}
2922
2923#[gpui::test]
2924async fn test_load_file_encoding(cx: &mut TestAppContext) {
2925 init_test(cx);
2926
2927 struct TestCase {
2928 name: &'static str,
2929 bytes: Vec<u8>,
2930 expected_text: &'static str,
2931 }
2932
2933 // --- Success Cases ---
2934 let success_cases = vec![
2935 TestCase {
2936 name: "utf8.txt",
2937 bytes: "こんにちは".as_bytes().to_vec(),
2938 expected_text: "こんにちは",
2939 },
2940 TestCase {
2941 name: "sjis.txt",
2942 bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
2943 expected_text: "こんにちは",
2944 },
2945 TestCase {
2946 name: "eucjp.txt",
2947 bytes: vec![0xa4, 0xb3, 0xa4, 0xf3, 0xa4, 0xcb, 0xa4, 0xc1, 0xa4, 0xcf],
2948 expected_text: "こんにちは",
2949 },
2950 TestCase {
2951 name: "iso2022jp.txt",
2952 bytes: vec![
2953 0x1b, 0x24, 0x42, 0x24, 0x33, 0x24, 0x73, 0x24, 0x4b, 0x24, 0x41, 0x24, 0x4f, 0x1b,
2954 0x28, 0x42,
2955 ],
2956 expected_text: "こんにちは",
2957 },
2958 TestCase {
2959 name: "win1252.txt",
2960 bytes: vec![0x43, 0x61, 0x66, 0xe9],
2961 expected_text: "Café",
2962 },
2963 TestCase {
2964 name: "gbk.txt",
2965 bytes: vec![
2966 0xbd, 0xf1, 0xcc, 0xec, 0xcc, 0xec, 0xc6, 0xf8, 0xb2, 0xbb, 0xb4, 0xed,
2967 ],
2968 expected_text: "今天天气不错",
2969 },
2970 // UTF-16LE with BOM
2971 TestCase {
2972 name: "utf16le_bom.txt",
2973 bytes: vec![
2974 0xFF, 0xFE, // BOM
2975 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F, 0x30,
2976 ],
2977 expected_text: "こんにちは",
2978 },
2979 // UTF-16BE with BOM
2980 TestCase {
2981 name: "utf16be_bom.txt",
2982 bytes: vec![
2983 0xFE, 0xFF, // BOM
2984 0x30, 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F,
2985 ],
2986 expected_text: "こんにちは",
2987 },
2988 // UTF-16LE without BOM (ASCII only)
2989 // This relies on the "null byte heuristic" we implemented.
2990 // "ABC" -> 41 00 42 00 43 00
2991 TestCase {
2992 name: "utf16le_ascii_no_bom.txt",
2993 bytes: vec![0x41, 0x00, 0x42, 0x00, 0x43, 0x00],
2994 expected_text: "ABC",
2995 },
2996 ];
2997
2998 // --- Failure Cases ---
2999 let failure_cases = vec![
3000 // Binary File (Should be detected by heuristic and return Error)
3001 // Contains random bytes and mixed nulls that don't match UTF-16 patterns
3002 TestCase {
3003 name: "binary.bin",
3004 bytes: vec![0x00, 0xFF, 0x12, 0x00, 0x99, 0x88, 0x77, 0x66, 0x00],
3005 expected_text: "", // Not used
3006 },
3007 ];
3008
3009 let root_path = if cfg!(windows) {
3010 Path::new("C:\\root")
3011 } else {
3012 Path::new("/root")
3013 };
3014
3015 let fs = FakeFs::new(cx.background_executor.clone());
3016 fs.create_dir(root_path).await.unwrap();
3017
3018 for case in success_cases.iter().chain(failure_cases.iter()) {
3019 let path = root_path.join(case.name);
3020 fs.write(&path, &case.bytes).await.unwrap();
3021 }
3022
3023 let tree = Worktree::local(
3024 root_path,
3025 true,
3026 fs,
3027 Default::default(),
3028 true,
3029 WorktreeId::from_proto(0),
3030 &mut cx.to_async(),
3031 )
3032 .await
3033 .unwrap();
3034
3035 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3036 .await;
3037
3038 let rel_path = |name: &str| {
3039 RelPath::new(&Path::new(name), PathStyle::local())
3040 .unwrap()
3041 .into_arc()
3042 };
3043
3044 // Run Success Tests
3045 for case in success_cases {
3046 let loaded = tree
3047 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
3048 .await;
3049 if let Err(e) = &loaded {
3050 panic!("Failed to load success case '{}': {:?}", case.name, e);
3051 }
3052 let loaded = loaded.unwrap();
3053 assert_eq!(
3054 loaded.text, case.expected_text,
3055 "Encoding mismatch for file: {}",
3056 case.name
3057 );
3058 }
3059
3060 // Run Failure Tests
3061 for case in failure_cases {
3062 let loaded = tree
3063 .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
3064 .await;
3065 assert!(
3066 loaded.is_err(),
3067 "Failure case '{}' unexpectedly succeeded! It should have been detected as binary.",
3068 case.name
3069 );
3070 let err_msg = loaded.unwrap_err().to_string();
3071 println!("Got expected error for {}: {}", case.name, err_msg);
3072 }
3073}
3074
3075#[gpui::test]
3076async fn test_write_file_encoding(cx: &mut gpui::TestAppContext) {
3077 init_test(cx);
3078 let fs = FakeFs::new(cx.executor());
3079
3080 let root_path = if cfg!(windows) {
3081 Path::new("C:\\root")
3082 } else {
3083 Path::new("/root")
3084 };
3085 fs.create_dir(root_path).await.unwrap();
3086
3087 let worktree = Worktree::local(
3088 root_path,
3089 true,
3090 fs.clone(),
3091 Default::default(),
3092 true,
3093 WorktreeId::from_proto(0),
3094 &mut cx.to_async(),
3095 )
3096 .await
3097 .unwrap();
3098
3099 // Define test case structure
3100 struct TestCase {
3101 name: &'static str,
3102 text: &'static str,
3103 encoding: &'static encoding_rs::Encoding,
3104 has_bom: bool,
3105 expected_bytes: Vec<u8>,
3106 }
3107
3108 let cases = vec![
3109 // Shift_JIS with Japanese
3110 TestCase {
3111 name: "Shift_JIS with Japanese",
3112 text: "こんにちは",
3113 encoding: encoding_rs::SHIFT_JIS,
3114 has_bom: false,
3115 expected_bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
3116 },
3117 // UTF-8 No BOM
3118 TestCase {
3119 name: "UTF-8 No BOM",
3120 text: "AB",
3121 encoding: encoding_rs::UTF_8,
3122 has_bom: false,
3123 expected_bytes: vec![0x41, 0x42],
3124 },
3125 // UTF-8 with BOM
3126 TestCase {
3127 name: "UTF-8 with BOM",
3128 text: "AB",
3129 encoding: encoding_rs::UTF_8,
3130 has_bom: true,
3131 expected_bytes: vec![0xEF, 0xBB, 0xBF, 0x41, 0x42],
3132 },
3133 // UTF-16LE No BOM with Japanese
3134 // NOTE: This passes thanks to the manual encoding fix implemented in `write_file`.
3135 TestCase {
3136 name: "UTF-16LE No BOM with Japanese",
3137 text: "こんにちは",
3138 encoding: encoding_rs::UTF_16LE,
3139 has_bom: false,
3140 expected_bytes: vec![0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f, 0x30],
3141 },
3142 // UTF-16LE with BOM
3143 TestCase {
3144 name: "UTF-16LE with BOM",
3145 text: "A",
3146 encoding: encoding_rs::UTF_16LE,
3147 has_bom: true,
3148 expected_bytes: vec![0xFF, 0xFE, 0x41, 0x00],
3149 },
3150 // UTF-16BE No BOM with Japanese
3151 // NOTE: This passes thanks to the manual encoding fix.
3152 TestCase {
3153 name: "UTF-16BE No BOM with Japanese",
3154 text: "こんにちは",
3155 encoding: encoding_rs::UTF_16BE,
3156 has_bom: false,
3157 expected_bytes: vec![0x30, 0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f],
3158 },
3159 // UTF-16BE with BOM
3160 TestCase {
3161 name: "UTF-16BE with BOM",
3162 text: "A",
3163 encoding: encoding_rs::UTF_16BE,
3164 has_bom: true,
3165 expected_bytes: vec![0xFE, 0xFF, 0x00, 0x41],
3166 },
3167 ];
3168
3169 for (i, case) in cases.into_iter().enumerate() {
3170 let file_name = format!("test_{}.txt", i);
3171 let path: Arc<Path> = Path::new(&file_name).into();
3172 let file_path = root_path.join(&file_name);
3173
3174 fs.insert_file(&file_path, "".into()).await;
3175
3176 let rel_path = RelPath::new(&path, PathStyle::local()).unwrap().into_arc();
3177 let text = text::Rope::from(case.text);
3178
3179 let task = worktree.update(cx, |wt, cx| {
3180 wt.write_file(
3181 rel_path,
3182 text,
3183 text::LineEnding::Unix,
3184 case.encoding,
3185 case.has_bom,
3186 cx,
3187 )
3188 });
3189
3190 if let Err(e) = task.await {
3191 panic!("Unexpected error in case '{}': {:?}", case.name, e);
3192 }
3193
3194 let bytes = fs.load_bytes(&file_path).await.unwrap();
3195
3196 assert_eq!(
3197 bytes, case.expected_bytes,
3198 "case '{}' mismatch. Expected {:?}, but got {:?}",
3199 case.name, case.expected_bytes, bytes
3200 );
3201 }
3202}
3203
3204#[gpui::test]
3205async fn test_refresh_entries_for_paths_creates_ancestors(cx: &mut TestAppContext) {
3206 init_test(cx);
3207 let fs = FakeFs::new(cx.background_executor.clone());
3208 fs.insert_tree(
3209 "/root",
3210 json!({
3211 "a": {
3212 "b": {
3213 "c": {
3214 "deep_file.txt": "content",
3215 "sibling.txt": "content"
3216 },
3217 "d": {
3218 "under_sibling_dir.txt": "content"
3219 }
3220 }
3221 }
3222 }),
3223 )
3224 .await;
3225
3226 let tree = Worktree::local(
3227 Path::new("/root"),
3228 true,
3229 fs.clone(),
3230 Default::default(),
3231 false, // Disable scanning so the initial scan doesn't discover any entries
3232 WorktreeId::from_proto(0),
3233 &mut cx.to_async(),
3234 )
3235 .await
3236 .unwrap();
3237
3238 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3239 .await;
3240
3241 tree.read_with(cx, |tree, _| {
3242 assert_eq!(
3243 tree.entries(true, 0)
3244 .map(|e| e.path.as_ref())
3245 .collect::<Vec<_>>(),
3246 &[rel_path("")],
3247 "Only root entry should exist when scanning is disabled"
3248 );
3249
3250 assert!(tree.entry_for_path(rel_path("a")).is_none());
3251 assert!(tree.entry_for_path(rel_path("a/b")).is_none());
3252 assert!(tree.entry_for_path(rel_path("a/b/c")).is_none());
3253 assert!(
3254 tree.entry_for_path(rel_path("a/b/c/deep_file.txt"))
3255 .is_none()
3256 );
3257 });
3258
3259 tree.read_with(cx, |tree, _| {
3260 tree.as_local()
3261 .unwrap()
3262 .refresh_entries_for_paths(vec![rel_path("a/b/c/deep_file.txt").into()])
3263 })
3264 .recv()
3265 .await;
3266
3267 tree.read_with(cx, |tree, _| {
3268 assert_eq!(
3269 tree.entries(true, 0)
3270 .map(|e| e.path.as_ref())
3271 .collect::<Vec<_>>(),
3272 &[
3273 rel_path(""),
3274 rel_path("a"),
3275 rel_path("a/b"),
3276 rel_path("a/b/c"),
3277 rel_path("a/b/c/deep_file.txt"),
3278 rel_path("a/b/c/sibling.txt"),
3279 rel_path("a/b/d"),
3280 ],
3281 "All ancestors should be created when refreshing a deeply nested path"
3282 );
3283 });
3284}
3285
3286#[gpui::test]
3287async fn test_single_file_worktree_deleted(cx: &mut TestAppContext) {
3288 init_test(cx);
3289 let fs = FakeFs::new(cx.background_executor.clone());
3290
3291 fs.insert_tree(
3292 "/root",
3293 json!({
3294 "test.txt": "content",
3295 }),
3296 )
3297 .await;
3298
3299 let tree = Worktree::local(
3300 Path::new("/root/test.txt"),
3301 true,
3302 fs.clone(),
3303 Default::default(),
3304 true,
3305 WorktreeId::from_proto(0),
3306 &mut cx.to_async(),
3307 )
3308 .await
3309 .unwrap();
3310
3311 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3312 .await;
3313
3314 tree.read_with(cx, |tree, _| {
3315 assert!(tree.is_single_file(), "Should be a single-file worktree");
3316 assert_eq!(tree.abs_path().as_ref(), Path::new("/root/test.txt"));
3317 });
3318
3319 // Delete the file
3320 fs.remove_file(Path::new("/root/test.txt"), Default::default())
3321 .await
3322 .unwrap();
3323
3324 // Subscribe to worktree events
3325 let deleted_event_received = Rc::new(Cell::new(false));
3326 let _subscription = cx.update({
3327 let deleted_event_received = deleted_event_received.clone();
3328 |cx| {
3329 cx.subscribe(&tree, move |_, event, _| {
3330 if matches!(event, Event::Deleted) {
3331 deleted_event_received.set(true);
3332 }
3333 })
3334 }
3335 });
3336
3337 // Trigger filesystem events - the scanner should detect the file is gone immediately
3338 // and emit a Deleted event
3339 cx.background_executor.run_until_parked();
3340 cx.background_executor
3341 .advance_clock(std::time::Duration::from_secs(1));
3342 cx.background_executor.run_until_parked();
3343
3344 assert!(
3345 deleted_event_received.get(),
3346 "Should receive Deleted event when single-file worktree root is deleted"
3347 );
3348}
3349
3350#[gpui::test]
3351async fn test_remote_worktree_without_git_emits_root_repo_event_after_first_update(
3352 cx: &mut TestAppContext,
3353) {
3354 cx.update(|cx| {
3355 let store = SettingsStore::test(cx);
3356 cx.set_global(store);
3357 });
3358
3359 let client = AnyProtoClient::new(NoopProtoClient::new());
3360
3361 let worktree = cx.update(|cx| {
3362 Worktree::remote(
3363 1,
3364 clock::ReplicaId::new(1),
3365 proto::WorktreeMetadata {
3366 id: 1,
3367 root_name: "project".to_string(),
3368 visible: true,
3369 abs_path: "/home/user/project".to_string(),
3370 root_repo_common_dir: None,
3371 },
3372 client,
3373 PathStyle::Posix,
3374 cx,
3375 )
3376 });
3377
3378 let events: Arc<std::sync::Mutex<Vec<&'static str>>> =
3379 Arc::new(std::sync::Mutex::new(Vec::new()));
3380 let events_clone = events.clone();
3381 cx.update(|cx| {
3382 cx.subscribe(&worktree, move |_, event, _cx| {
3383 if matches!(event, Event::UpdatedRootRepoCommonDir { .. }) {
3384 events_clone
3385 .lock()
3386 .unwrap()
3387 .push("UpdatedRootRepoCommonDir");
3388 }
3389 if matches!(event, Event::UpdatedEntries(_)) {
3390 events_clone.lock().unwrap().push("UpdatedEntries");
3391 }
3392 })
3393 .detach();
3394 });
3395
3396 // Send an update with entries but no repo info (plain directory).
3397 worktree.update(cx, |worktree, _cx| {
3398 worktree
3399 .as_remote()
3400 .unwrap()
3401 .update_from_remote(proto::UpdateWorktree {
3402 project_id: 1,
3403 worktree_id: 1,
3404 abs_path: "/home/user/project".to_string(),
3405 root_name: "project".to_string(),
3406 updated_entries: vec![proto::Entry {
3407 id: 1,
3408 is_dir: true,
3409 path: "".to_string(),
3410 inode: 1,
3411 mtime: Some(proto::Timestamp {
3412 seconds: 0,
3413 nanos: 0,
3414 }),
3415 is_ignored: false,
3416 is_hidden: false,
3417 is_external: false,
3418 is_fifo: false,
3419 size: None,
3420 canonical_path: None,
3421 }],
3422 removed_entries: vec![],
3423 scan_id: 1,
3424 is_last_update: true,
3425 updated_repositories: vec![],
3426 removed_repositories: vec![],
3427 root_repo_common_dir: None,
3428 });
3429 });
3430
3431 cx.run_until_parked();
3432
3433 let fired = events.lock().unwrap();
3434 assert!(
3435 fired.contains(&"UpdatedEntries"),
3436 "UpdatedEntries should fire after remote update"
3437 );
3438 assert!(
3439 fired.contains(&"UpdatedRootRepoCommonDir"),
3440 "UpdatedRootRepoCommonDir should fire after first remote update even when \
3441 root_repo_common_dir is None, to signal that repo state is now known"
3442 );
3443}
3444
3445#[gpui::test]
3446async fn test_remote_worktree_with_git_emits_root_repo_event_when_repo_info_arrives(
3447 cx: &mut TestAppContext,
3448) {
3449 cx.update(|cx| {
3450 let store = SettingsStore::test(cx);
3451 cx.set_global(store);
3452 });
3453
3454 let client = AnyProtoClient::new(NoopProtoClient::new());
3455
3456 let worktree = cx.update(|cx| {
3457 Worktree::remote(
3458 1,
3459 clock::ReplicaId::new(1),
3460 proto::WorktreeMetadata {
3461 id: 1,
3462 root_name: "project".to_string(),
3463 visible: true,
3464 abs_path: "/home/user/project".to_string(),
3465 root_repo_common_dir: None,
3466 },
3467 client,
3468 PathStyle::Posix,
3469 cx,
3470 )
3471 });
3472
3473 let events: Arc<std::sync::Mutex<Vec<&'static str>>> =
3474 Arc::new(std::sync::Mutex::new(Vec::new()));
3475 let events_clone = events.clone();
3476 cx.update(|cx| {
3477 cx.subscribe(&worktree, move |_, event, _cx| {
3478 if matches!(event, Event::UpdatedRootRepoCommonDir { .. }) {
3479 events_clone
3480 .lock()
3481 .unwrap()
3482 .push("UpdatedRootRepoCommonDir");
3483 }
3484 })
3485 .detach();
3486 });
3487
3488 // Send an update where repo info arrives (None -> Some).
3489 worktree.update(cx, |worktree, _cx| {
3490 worktree
3491 .as_remote()
3492 .unwrap()
3493 .update_from_remote(proto::UpdateWorktree {
3494 project_id: 1,
3495 worktree_id: 1,
3496 abs_path: "/home/user/project".to_string(),
3497 root_name: "project".to_string(),
3498 updated_entries: vec![proto::Entry {
3499 id: 1,
3500 is_dir: true,
3501 path: "".to_string(),
3502 inode: 1,
3503 mtime: Some(proto::Timestamp {
3504 seconds: 0,
3505 nanos: 0,
3506 }),
3507 is_ignored: false,
3508 is_hidden: false,
3509 is_external: false,
3510 is_fifo: false,
3511 size: None,
3512 canonical_path: None,
3513 }],
3514 removed_entries: vec![],
3515 scan_id: 1,
3516 is_last_update: true,
3517 updated_repositories: vec![],
3518 removed_repositories: vec![],
3519 root_repo_common_dir: Some("/home/user/project/.git".to_string()),
3520 });
3521 });
3522
3523 cx.run_until_parked();
3524
3525 let fired = events.lock().unwrap();
3526 assert!(
3527 fired.contains(&"UpdatedRootRepoCommonDir"),
3528 "UpdatedRootRepoCommonDir should fire when repo info arrives (None -> Some)"
3529 );
3530 assert_eq!(
3531 fired
3532 .iter()
3533 .filter(|e| **e == "UpdatedRootRepoCommonDir")
3534 .count(),
3535 1,
3536 "should fire exactly once, not duplicate"
3537 );
3538}