1use crate::{
2 project_settings::ProjectSettings,
3 worktree::{Event, Snapshot, WorktreeModelHandle},
4 Entry, EntryKind, PathChange, Project, Worktree,
5};
6use anyhow::Result;
7use client::Client;
8use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
9use git::GITIGNORE;
10use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
11use parking_lot::Mutex;
12use postage::stream::Stream;
13use pretty_assertions::assert_eq;
14use rand::prelude::*;
15use serde_json::json;
16use settings::SettingsStore;
17use std::{
18 env,
19 fmt::Write,
20 mem,
21 path::{Path, PathBuf},
22 sync::Arc,
23};
24use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
25
26#[gpui::test]
27async fn test_traversal(cx: &mut TestAppContext) {
28 init_test(cx);
29 let fs = FakeFs::new(cx.background());
30 fs.insert_tree(
31 "/root",
32 json!({
33 ".gitignore": "a/b\n",
34 "a": {
35 "b": "",
36 "c": "",
37 }
38 }),
39 )
40 .await;
41
42 let tree = Worktree::local(
43 build_client(cx),
44 Path::new("/root"),
45 true,
46 fs,
47 Default::default(),
48 &mut cx.to_async(),
49 )
50 .await
51 .unwrap();
52 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
53 .await;
54
55 tree.read_with(cx, |tree, _| {
56 assert_eq!(
57 tree.entries(false)
58 .map(|entry| entry.path.as_ref())
59 .collect::<Vec<_>>(),
60 vec![
61 Path::new(""),
62 Path::new(".gitignore"),
63 Path::new("a"),
64 Path::new("a/c"),
65 ]
66 );
67 assert_eq!(
68 tree.entries(true)
69 .map(|entry| entry.path.as_ref())
70 .collect::<Vec<_>>(),
71 vec![
72 Path::new(""),
73 Path::new(".gitignore"),
74 Path::new("a"),
75 Path::new("a/b"),
76 Path::new("a/c"),
77 ]
78 );
79 })
80}
81
82#[gpui::test]
83async fn test_descendent_entries(cx: &mut TestAppContext) {
84 init_test(cx);
85 let fs = FakeFs::new(cx.background());
86 fs.insert_tree(
87 "/root",
88 json!({
89 "a": "",
90 "b": {
91 "c": {
92 "d": ""
93 },
94 "e": {}
95 },
96 "f": "",
97 "g": {
98 "h": {}
99 },
100 "i": {
101 "j": {
102 "k": ""
103 },
104 "l": {
105
106 }
107 },
108 ".gitignore": "i/j\n",
109 }),
110 )
111 .await;
112
113 let tree = Worktree::local(
114 build_client(cx),
115 Path::new("/root"),
116 true,
117 fs,
118 Default::default(),
119 &mut cx.to_async(),
120 )
121 .await
122 .unwrap();
123 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
124 .await;
125
126 tree.read_with(cx, |tree, _| {
127 assert_eq!(
128 tree.descendent_entries(false, false, Path::new("b"))
129 .map(|entry| entry.path.as_ref())
130 .collect::<Vec<_>>(),
131 vec![Path::new("b/c/d"),]
132 );
133 assert_eq!(
134 tree.descendent_entries(true, false, Path::new("b"))
135 .map(|entry| entry.path.as_ref())
136 .collect::<Vec<_>>(),
137 vec![
138 Path::new("b"),
139 Path::new("b/c"),
140 Path::new("b/c/d"),
141 Path::new("b/e"),
142 ]
143 );
144
145 assert_eq!(
146 tree.descendent_entries(false, false, Path::new("g"))
147 .map(|entry| entry.path.as_ref())
148 .collect::<Vec<_>>(),
149 Vec::<PathBuf>::new()
150 );
151 assert_eq!(
152 tree.descendent_entries(true, false, Path::new("g"))
153 .map(|entry| entry.path.as_ref())
154 .collect::<Vec<_>>(),
155 vec![Path::new("g"), Path::new("g/h"),]
156 );
157 });
158
159 // Expand gitignored directory.
160 tree.read_with(cx, |tree, _| {
161 tree.as_local()
162 .unwrap()
163 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
164 })
165 .recv()
166 .await;
167
168 tree.read_with(cx, |tree, _| {
169 assert_eq!(
170 tree.descendent_entries(false, false, Path::new("i"))
171 .map(|entry| entry.path.as_ref())
172 .collect::<Vec<_>>(),
173 Vec::<PathBuf>::new()
174 );
175 assert_eq!(
176 tree.descendent_entries(false, true, Path::new("i"))
177 .map(|entry| entry.path.as_ref())
178 .collect::<Vec<_>>(),
179 vec![Path::new("i/j/k")]
180 );
181 assert_eq!(
182 tree.descendent_entries(true, false, Path::new("i"))
183 .map(|entry| entry.path.as_ref())
184 .collect::<Vec<_>>(),
185 vec![Path::new("i"), Path::new("i/l"),]
186 );
187 })
188}
189
190#[gpui::test(iterations = 10)]
191async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
192 init_test(cx);
193 let fs = FakeFs::new(cx.background());
194 fs.insert_tree(
195 "/root",
196 json!({
197 "lib": {
198 "a": {
199 "a.txt": ""
200 },
201 "b": {
202 "b.txt": ""
203 }
204 }
205 }),
206 )
207 .await;
208 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
209 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
210
211 let tree = Worktree::local(
212 build_client(cx),
213 Path::new("/root"),
214 true,
215 fs.clone(),
216 Default::default(),
217 &mut cx.to_async(),
218 )
219 .await
220 .unwrap();
221
222 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
223 .await;
224
225 tree.read_with(cx, |tree, _| {
226 assert_eq!(
227 tree.entries(false)
228 .map(|entry| entry.path.as_ref())
229 .collect::<Vec<_>>(),
230 vec![
231 Path::new(""),
232 Path::new("lib"),
233 Path::new("lib/a"),
234 Path::new("lib/a/a.txt"),
235 Path::new("lib/a/lib"),
236 Path::new("lib/b"),
237 Path::new("lib/b/b.txt"),
238 Path::new("lib/b/lib"),
239 ]
240 );
241 });
242
243 fs.rename(
244 Path::new("/root/lib/a/lib"),
245 Path::new("/root/lib/a/lib-2"),
246 Default::default(),
247 )
248 .await
249 .unwrap();
250 executor.run_until_parked();
251 tree.read_with(cx, |tree, _| {
252 assert_eq!(
253 tree.entries(false)
254 .map(|entry| entry.path.as_ref())
255 .collect::<Vec<_>>(),
256 vec![
257 Path::new(""),
258 Path::new("lib"),
259 Path::new("lib/a"),
260 Path::new("lib/a/a.txt"),
261 Path::new("lib/a/lib-2"),
262 Path::new("lib/b"),
263 Path::new("lib/b/b.txt"),
264 Path::new("lib/b/lib"),
265 ]
266 );
267 });
268}
269
270#[gpui::test]
271async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
272 init_test(cx);
273 let fs = FakeFs::new(cx.background());
274 fs.insert_tree(
275 "/root",
276 json!({
277 "dir1": {
278 "deps": {
279 // symlinks here
280 },
281 "src": {
282 "a.rs": "",
283 "b.rs": "",
284 },
285 },
286 "dir2": {
287 "src": {
288 "c.rs": "",
289 "d.rs": "",
290 }
291 },
292 "dir3": {
293 "deps": {},
294 "src": {
295 "e.rs": "",
296 "f.rs": "",
297 },
298 }
299 }),
300 )
301 .await;
302
303 // These symlinks point to directories outside of the worktree's root, dir1.
304 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
305 .await;
306 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
307 .await;
308
309 let tree = Worktree::local(
310 build_client(cx),
311 Path::new("/root/dir1"),
312 true,
313 fs.clone(),
314 Default::default(),
315 &mut cx.to_async(),
316 )
317 .await
318 .unwrap();
319
320 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
321 .await;
322
323 let tree_updates = Arc::new(Mutex::new(Vec::new()));
324 tree.update(cx, |_, cx| {
325 let tree_updates = tree_updates.clone();
326 cx.subscribe(&tree, move |_, _, event, _| {
327 if let Event::UpdatedEntries(update) = event {
328 tree_updates.lock().extend(
329 update
330 .iter()
331 .map(|(path, _, change)| (path.clone(), *change)),
332 );
333 }
334 })
335 .detach();
336 });
337
338 // The symlinked directories are not scanned by default.
339 tree.read_with(cx, |tree, _| {
340 assert_eq!(
341 tree.entries(true)
342 .map(|entry| (entry.path.as_ref(), entry.is_external))
343 .collect::<Vec<_>>(),
344 vec![
345 (Path::new(""), false),
346 (Path::new("deps"), false),
347 (Path::new("deps/dep-dir2"), true),
348 (Path::new("deps/dep-dir3"), true),
349 (Path::new("src"), false),
350 (Path::new("src/a.rs"), false),
351 (Path::new("src/b.rs"), false),
352 ]
353 );
354
355 assert_eq!(
356 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
357 EntryKind::UnloadedDir
358 );
359 });
360
361 // Expand one of the symlinked directories.
362 tree.read_with(cx, |tree, _| {
363 tree.as_local()
364 .unwrap()
365 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
366 })
367 .recv()
368 .await;
369
370 // The expanded directory's contents are loaded. Subdirectories are
371 // not scanned yet.
372 tree.read_with(cx, |tree, _| {
373 assert_eq!(
374 tree.entries(true)
375 .map(|entry| (entry.path.as_ref(), entry.is_external))
376 .collect::<Vec<_>>(),
377 vec![
378 (Path::new(""), false),
379 (Path::new("deps"), false),
380 (Path::new("deps/dep-dir2"), true),
381 (Path::new("deps/dep-dir3"), true),
382 (Path::new("deps/dep-dir3/deps"), true),
383 (Path::new("deps/dep-dir3/src"), true),
384 (Path::new("src"), false),
385 (Path::new("src/a.rs"), false),
386 (Path::new("src/b.rs"), false),
387 ]
388 );
389 });
390 assert_eq!(
391 mem::take(&mut *tree_updates.lock()),
392 &[
393 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
394 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
395 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
396 ]
397 );
398
399 // Expand a subdirectory of one of the symlinked directories.
400 tree.read_with(cx, |tree, _| {
401 tree.as_local()
402 .unwrap()
403 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
404 })
405 .recv()
406 .await;
407
408 // The expanded subdirectory's contents are loaded.
409 tree.read_with(cx, |tree, _| {
410 assert_eq!(
411 tree.entries(true)
412 .map(|entry| (entry.path.as_ref(), entry.is_external))
413 .collect::<Vec<_>>(),
414 vec![
415 (Path::new(""), false),
416 (Path::new("deps"), false),
417 (Path::new("deps/dep-dir2"), true),
418 (Path::new("deps/dep-dir3"), true),
419 (Path::new("deps/dep-dir3/deps"), true),
420 (Path::new("deps/dep-dir3/src"), true),
421 (Path::new("deps/dep-dir3/src/e.rs"), true),
422 (Path::new("deps/dep-dir3/src/f.rs"), true),
423 (Path::new("src"), false),
424 (Path::new("src/a.rs"), false),
425 (Path::new("src/b.rs"), false),
426 ]
427 );
428 });
429
430 assert_eq!(
431 mem::take(&mut *tree_updates.lock()),
432 &[
433 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
434 (
435 Path::new("deps/dep-dir3/src/e.rs").into(),
436 PathChange::Loaded
437 ),
438 (
439 Path::new("deps/dep-dir3/src/f.rs").into(),
440 PathChange::Loaded
441 )
442 ]
443 );
444}
445
446#[gpui::test]
447async fn test_open_gitignored_files(cx: &mut TestAppContext) {
448 init_test(cx);
449 let fs = FakeFs::new(cx.background());
450 fs.insert_tree(
451 "/root",
452 json!({
453 ".gitignore": "node_modules\n",
454 "one": {
455 "node_modules": {
456 "a": {
457 "a1.js": "a1",
458 "a2.js": "a2",
459 },
460 "b": {
461 "b1.js": "b1",
462 "b2.js": "b2",
463 },
464 "c": {
465 "c1.js": "c1",
466 "c2.js": "c2",
467 }
468 },
469 },
470 "two": {
471 "x.js": "",
472 "y.js": "",
473 },
474 }),
475 )
476 .await;
477
478 let tree = Worktree::local(
479 build_client(cx),
480 Path::new("/root"),
481 true,
482 fs.clone(),
483 Default::default(),
484 &mut cx.to_async(),
485 )
486 .await
487 .unwrap();
488
489 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
490 .await;
491
492 tree.read_with(cx, |tree, _| {
493 assert_eq!(
494 tree.entries(true)
495 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
496 .collect::<Vec<_>>(),
497 vec![
498 (Path::new(""), false),
499 (Path::new(".gitignore"), false),
500 (Path::new("one"), false),
501 (Path::new("one/node_modules"), true),
502 (Path::new("two"), false),
503 (Path::new("two/x.js"), false),
504 (Path::new("two/y.js"), false),
505 ]
506 );
507 });
508
509 // Open a file that is nested inside of a gitignored directory that
510 // has not yet been expanded.
511 let prev_read_dir_count = fs.read_dir_call_count();
512 let buffer = tree
513 .update(cx, |tree, cx| {
514 tree.as_local_mut()
515 .unwrap()
516 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
517 })
518 .await
519 .unwrap();
520
521 tree.read_with(cx, |tree, cx| {
522 assert_eq!(
523 tree.entries(true)
524 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
525 .collect::<Vec<_>>(),
526 vec![
527 (Path::new(""), false),
528 (Path::new(".gitignore"), false),
529 (Path::new("one"), false),
530 (Path::new("one/node_modules"), true),
531 (Path::new("one/node_modules/a"), true),
532 (Path::new("one/node_modules/b"), true),
533 (Path::new("one/node_modules/b/b1.js"), true),
534 (Path::new("one/node_modules/b/b2.js"), true),
535 (Path::new("one/node_modules/c"), true),
536 (Path::new("two"), false),
537 (Path::new("two/x.js"), false),
538 (Path::new("two/y.js"), false),
539 ]
540 );
541
542 assert_eq!(
543 buffer.read(cx).file().unwrap().path().as_ref(),
544 Path::new("one/node_modules/b/b1.js")
545 );
546
547 // Only the newly-expanded directories are scanned.
548 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
549 });
550
551 // Open another file in a different subdirectory of the same
552 // gitignored directory.
553 let prev_read_dir_count = fs.read_dir_call_count();
554 let buffer = tree
555 .update(cx, |tree, cx| {
556 tree.as_local_mut()
557 .unwrap()
558 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
559 })
560 .await
561 .unwrap();
562
563 tree.read_with(cx, |tree, cx| {
564 assert_eq!(
565 tree.entries(true)
566 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
567 .collect::<Vec<_>>(),
568 vec![
569 (Path::new(""), false),
570 (Path::new(".gitignore"), false),
571 (Path::new("one"), false),
572 (Path::new("one/node_modules"), true),
573 (Path::new("one/node_modules/a"), true),
574 (Path::new("one/node_modules/a/a1.js"), true),
575 (Path::new("one/node_modules/a/a2.js"), true),
576 (Path::new("one/node_modules/b"), true),
577 (Path::new("one/node_modules/b/b1.js"), true),
578 (Path::new("one/node_modules/b/b2.js"), true),
579 (Path::new("one/node_modules/c"), true),
580 (Path::new("two"), false),
581 (Path::new("two/x.js"), false),
582 (Path::new("two/y.js"), false),
583 ]
584 );
585
586 assert_eq!(
587 buffer.read(cx).file().unwrap().path().as_ref(),
588 Path::new("one/node_modules/a/a2.js")
589 );
590
591 // Only the newly-expanded directory is scanned.
592 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
593 });
594
595 // No work happens when files and directories change within an unloaded directory.
596 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
597 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
598 .await
599 .unwrap();
600 cx.foreground().run_until_parked();
601 assert_eq!(
602 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
603 0
604 );
605}
606
607#[gpui::test]
608async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
609 init_test(cx);
610 let fs = FakeFs::new(cx.background());
611 fs.insert_tree(
612 "/root",
613 json!({
614 ".gitignore": "node_modules\n",
615 "a": {
616 "a.js": "",
617 },
618 "b": {
619 "b.js": "",
620 },
621 "node_modules": {
622 "c": {
623 "c.js": "",
624 },
625 "d": {
626 "d.js": "",
627 "e": {
628 "e1.js": "",
629 "e2.js": "",
630 },
631 "f": {
632 "f1.js": "",
633 "f2.js": "",
634 }
635 },
636 },
637 }),
638 )
639 .await;
640
641 let tree = Worktree::local(
642 build_client(cx),
643 Path::new("/root"),
644 true,
645 fs.clone(),
646 Default::default(),
647 &mut cx.to_async(),
648 )
649 .await
650 .unwrap();
651
652 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
653 .await;
654
655 // Open a file within the gitignored directory, forcing some of its
656 // subdirectories to be read, but not all.
657 let read_dir_count_1 = fs.read_dir_call_count();
658 tree.read_with(cx, |tree, _| {
659 tree.as_local()
660 .unwrap()
661 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
662 })
663 .recv()
664 .await;
665
666 // Those subdirectories are now loaded.
667 tree.read_with(cx, |tree, _| {
668 assert_eq!(
669 tree.entries(true)
670 .map(|e| (e.path.as_ref(), e.is_ignored))
671 .collect::<Vec<_>>(),
672 &[
673 (Path::new(""), false),
674 (Path::new(".gitignore"), false),
675 (Path::new("a"), false),
676 (Path::new("a/a.js"), false),
677 (Path::new("b"), false),
678 (Path::new("b/b.js"), false),
679 (Path::new("node_modules"), true),
680 (Path::new("node_modules/c"), true),
681 (Path::new("node_modules/d"), true),
682 (Path::new("node_modules/d/d.js"), true),
683 (Path::new("node_modules/d/e"), true),
684 (Path::new("node_modules/d/f"), true),
685 ]
686 );
687 });
688 let read_dir_count_2 = fs.read_dir_call_count();
689 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
690
691 // Update the gitignore so that node_modules is no longer ignored,
692 // but a subdirectory is ignored
693 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
694 .await
695 .unwrap();
696 cx.foreground().run_until_parked();
697
698 // All of the directories that are no longer ignored are now loaded.
699 tree.read_with(cx, |tree, _| {
700 assert_eq!(
701 tree.entries(true)
702 .map(|e| (e.path.as_ref(), e.is_ignored))
703 .collect::<Vec<_>>(),
704 &[
705 (Path::new(""), false),
706 (Path::new(".gitignore"), false),
707 (Path::new("a"), false),
708 (Path::new("a/a.js"), false),
709 (Path::new("b"), false),
710 (Path::new("b/b.js"), false),
711 // This directory is no longer ignored
712 (Path::new("node_modules"), false),
713 (Path::new("node_modules/c"), false),
714 (Path::new("node_modules/c/c.js"), false),
715 (Path::new("node_modules/d"), false),
716 (Path::new("node_modules/d/d.js"), false),
717 // This subdirectory is now ignored
718 (Path::new("node_modules/d/e"), true),
719 (Path::new("node_modules/d/f"), false),
720 (Path::new("node_modules/d/f/f1.js"), false),
721 (Path::new("node_modules/d/f/f2.js"), false),
722 ]
723 );
724 });
725
726 // Each of the newly-loaded directories is scanned only once.
727 let read_dir_count_3 = fs.read_dir_call_count();
728 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
729}
730
731#[gpui::test(iterations = 10)]
732async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
733 init_test(cx);
734 let fs = FakeFs::new(cx.background());
735 fs.insert_tree(
736 "/root",
737 json!({
738 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
739 "tree": {
740 ".git": {},
741 ".gitignore": "ignored-dir\n",
742 "tracked-dir": {
743 "tracked-file1": "",
744 "ancestor-ignored-file1": "",
745 },
746 "ignored-dir": {
747 "ignored-file1": ""
748 }
749 }
750 }),
751 )
752 .await;
753
754 let tree = Worktree::local(
755 build_client(cx),
756 "/root/tree".as_ref(),
757 true,
758 fs.clone(),
759 Default::default(),
760 &mut cx.to_async(),
761 )
762 .await
763 .unwrap();
764 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
765 .await;
766
767 tree.read_with(cx, |tree, _| {
768 tree.as_local()
769 .unwrap()
770 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
771 })
772 .recv()
773 .await;
774
775 cx.read(|cx| {
776 let tree = tree.read(cx);
777 assert!(
778 !tree
779 .entry_for_path("tracked-dir/tracked-file1")
780 .unwrap()
781 .is_ignored
782 );
783 assert!(
784 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
785 .unwrap()
786 .is_ignored
787 );
788 assert!(
789 tree.entry_for_path("ignored-dir/ignored-file1")
790 .unwrap()
791 .is_ignored
792 );
793 });
794
795 fs.create_file(
796 "/root/tree/tracked-dir/tracked-file2".as_ref(),
797 Default::default(),
798 )
799 .await
800 .unwrap();
801 fs.create_file(
802 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
803 Default::default(),
804 )
805 .await
806 .unwrap();
807 fs.create_file(
808 "/root/tree/ignored-dir/ignored-file2".as_ref(),
809 Default::default(),
810 )
811 .await
812 .unwrap();
813
814 cx.foreground().run_until_parked();
815 cx.read(|cx| {
816 let tree = tree.read(cx);
817 assert!(
818 !tree
819 .entry_for_path("tracked-dir/tracked-file2")
820 .unwrap()
821 .is_ignored
822 );
823 assert!(
824 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
825 .unwrap()
826 .is_ignored
827 );
828 assert!(
829 tree.entry_for_path("ignored-dir/ignored-file2")
830 .unwrap()
831 .is_ignored
832 );
833 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
834 });
835}
836
837#[gpui::test]
838async fn test_write_file(cx: &mut TestAppContext) {
839 init_test(cx);
840 let dir = temp_tree(json!({
841 ".git": {},
842 ".gitignore": "ignored-dir\n",
843 "tracked-dir": {},
844 "ignored-dir": {}
845 }));
846
847 let tree = Worktree::local(
848 build_client(cx),
849 dir.path(),
850 true,
851 Arc::new(RealFs),
852 Default::default(),
853 &mut cx.to_async(),
854 )
855 .await
856 .unwrap();
857 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
858 .await;
859 tree.flush_fs_events(cx).await;
860
861 tree.update(cx, |tree, cx| {
862 tree.as_local().unwrap().write_file(
863 Path::new("tracked-dir/file.txt"),
864 "hello".into(),
865 Default::default(),
866 cx,
867 )
868 })
869 .await
870 .unwrap();
871 tree.update(cx, |tree, cx| {
872 tree.as_local().unwrap().write_file(
873 Path::new("ignored-dir/file.txt"),
874 "world".into(),
875 Default::default(),
876 cx,
877 )
878 })
879 .await
880 .unwrap();
881
882 tree.read_with(cx, |tree, _| {
883 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
884 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
885 assert!(!tracked.is_ignored);
886 assert!(ignored.is_ignored);
887 });
888}
889
890#[gpui::test]
891async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
892 init_test(cx);
893 let dir = temp_tree(json!({
894 ".gitignore": "**/target\n/node_modules\n",
895 "target": {
896 "index": "blah2"
897 },
898 "node_modules": {
899 ".DS_Store": "",
900 "prettier": {
901 "package.json": "{}",
902 },
903 },
904 "src": {
905 ".DS_Store": "",
906 "foo": {
907 "foo.rs": "mod another;\n",
908 "another.rs": "// another",
909 },
910 "bar": {
911 "bar.rs": "// bar",
912 },
913 "lib.rs": "mod foo;\nmod bar;\n",
914 },
915 ".DS_Store": "",
916 }));
917 cx.update(|cx| {
918 cx.update_global::<SettingsStore, _, _>(|store, cx| {
919 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
920 project_settings.file_scan_exclusions =
921 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
922 });
923 });
924 });
925
926 let tree = Worktree::local(
927 build_client(cx),
928 dir.path(),
929 true,
930 Arc::new(RealFs),
931 Default::default(),
932 &mut cx.to_async(),
933 )
934 .await
935 .unwrap();
936 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
937 .await;
938 tree.flush_fs_events(cx).await;
939 tree.read_with(cx, |tree, _| {
940 check_worktree_entries(
941 tree,
942 &[
943 "src/foo/foo.rs",
944 "src/foo/another.rs",
945 "node_modules/.DS_Store",
946 "src/.DS_Store",
947 ".DS_Store",
948 ],
949 &["target/index", "node_modules/prettier/package.json"],
950 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
951 )
952 });
953
954 cx.update(|cx| {
955 cx.update_global::<SettingsStore, _, _>(|store, cx| {
956 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
957 project_settings.file_scan_exclusions =
958 Some(vec!["**/node_modules/**".to_string()]);
959 });
960 });
961 });
962 tree.flush_fs_events(cx).await;
963 cx.foreground().run_until_parked();
964 tree.read_with(cx, |tree, _| {
965 check_worktree_entries(
966 tree,
967 &[
968 "node_modules/prettier/package.json",
969 "node_modules/.DS_Store",
970 ],
971 &["target/index"],
972 &[
973 ".gitignore",
974 "src/lib.rs",
975 "src/bar/bar.rs",
976 "src/foo/foo.rs",
977 "src/foo/another.rs",
978 "src/.DS_Store",
979 ".DS_Store",
980 ],
981 )
982 });
983}
984
985#[gpui::test(iterations = 30)]
986async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
987 init_test(cx);
988 let fs = FakeFs::new(cx.background());
989 fs.insert_tree(
990 "/root",
991 json!({
992 "b": {},
993 "c": {},
994 "d": {},
995 }),
996 )
997 .await;
998
999 let tree = Worktree::local(
1000 build_client(cx),
1001 "/root".as_ref(),
1002 true,
1003 fs,
1004 Default::default(),
1005 &mut cx.to_async(),
1006 )
1007 .await
1008 .unwrap();
1009
1010 let snapshot1 = tree.update(cx, |tree, cx| {
1011 let tree = tree.as_local_mut().unwrap();
1012 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1013 let _ = tree.observe_updates(0, cx, {
1014 let snapshot = snapshot.clone();
1015 move |update| {
1016 snapshot.lock().apply_remote_update(update).unwrap();
1017 async { true }
1018 }
1019 });
1020 snapshot
1021 });
1022
1023 let entry = tree
1024 .update(cx, |tree, cx| {
1025 tree.as_local_mut()
1026 .unwrap()
1027 .create_entry("a/e".as_ref(), true, cx)
1028 })
1029 .await
1030 .unwrap();
1031 assert!(entry.is_dir());
1032
1033 cx.foreground().run_until_parked();
1034 tree.read_with(cx, |tree, _| {
1035 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
1036 });
1037
1038 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1039 assert_eq!(
1040 snapshot1.lock().entries(true).collect::<Vec<_>>(),
1041 snapshot2.entries(true).collect::<Vec<_>>()
1042 );
1043}
1044
1045#[gpui::test]
1046async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1047 init_test(cx);
1048 let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1049
1050 let fs_fake = FakeFs::new(cx.background());
1051 fs_fake
1052 .insert_tree(
1053 "/root",
1054 json!({
1055 "a": {},
1056 }),
1057 )
1058 .await;
1059
1060 let tree_fake = Worktree::local(
1061 client_fake,
1062 "/root".as_ref(),
1063 true,
1064 fs_fake,
1065 Default::default(),
1066 &mut cx.to_async(),
1067 )
1068 .await
1069 .unwrap();
1070
1071 let entry = tree_fake
1072 .update(cx, |tree, cx| {
1073 tree.as_local_mut()
1074 .unwrap()
1075 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1076 })
1077 .await
1078 .unwrap();
1079 assert!(entry.is_file());
1080
1081 cx.foreground().run_until_parked();
1082 tree_fake.read_with(cx, |tree, _| {
1083 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1084 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1085 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1086 });
1087
1088 let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1089
1090 let fs_real = Arc::new(RealFs);
1091 let temp_root = temp_tree(json!({
1092 "a": {}
1093 }));
1094
1095 let tree_real = Worktree::local(
1096 client_real,
1097 temp_root.path(),
1098 true,
1099 fs_real,
1100 Default::default(),
1101 &mut cx.to_async(),
1102 )
1103 .await
1104 .unwrap();
1105
1106 let entry = tree_real
1107 .update(cx, |tree, cx| {
1108 tree.as_local_mut()
1109 .unwrap()
1110 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1111 })
1112 .await
1113 .unwrap();
1114 assert!(entry.is_file());
1115
1116 cx.foreground().run_until_parked();
1117 tree_real.read_with(cx, |tree, _| {
1118 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1119 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1120 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1121 });
1122
1123 // Test smallest change
1124 let entry = tree_real
1125 .update(cx, |tree, cx| {
1126 tree.as_local_mut()
1127 .unwrap()
1128 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
1129 })
1130 .await
1131 .unwrap();
1132 assert!(entry.is_file());
1133
1134 cx.foreground().run_until_parked();
1135 tree_real.read_with(cx, |tree, _| {
1136 assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
1137 });
1138
1139 // Test largest change
1140 let entry = tree_real
1141 .update(cx, |tree, cx| {
1142 tree.as_local_mut()
1143 .unwrap()
1144 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
1145 })
1146 .await
1147 .unwrap();
1148 assert!(entry.is_file());
1149
1150 cx.foreground().run_until_parked();
1151 tree_real.read_with(cx, |tree, _| {
1152 assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
1153 assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
1154 assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
1155 assert!(tree.entry_for_path("d/").unwrap().is_dir());
1156 });
1157}
1158
1159#[gpui::test(iterations = 100)]
1160async fn test_random_worktree_operations_during_initial_scan(
1161 cx: &mut TestAppContext,
1162 mut rng: StdRng,
1163) {
1164 init_test(cx);
1165 let operations = env::var("OPERATIONS")
1166 .map(|o| o.parse().unwrap())
1167 .unwrap_or(5);
1168 let initial_entries = env::var("INITIAL_ENTRIES")
1169 .map(|o| o.parse().unwrap())
1170 .unwrap_or(20);
1171
1172 let root_dir = Path::new("/test");
1173 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1174 fs.as_fake().insert_tree(root_dir, json!({})).await;
1175 for _ in 0..initial_entries {
1176 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1177 }
1178 log::info!("generated initial tree");
1179
1180 let worktree = Worktree::local(
1181 build_client(cx),
1182 root_dir,
1183 true,
1184 fs.clone(),
1185 Default::default(),
1186 &mut cx.to_async(),
1187 )
1188 .await
1189 .unwrap();
1190
1191 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1192 let updates = Arc::new(Mutex::new(Vec::new()));
1193 worktree.update(cx, |tree, cx| {
1194 check_worktree_change_events(tree, cx);
1195
1196 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1197 let updates = updates.clone();
1198 move |update| {
1199 updates.lock().push(update);
1200 async { true }
1201 }
1202 });
1203 });
1204
1205 for _ in 0..operations {
1206 worktree
1207 .update(cx, |worktree, cx| {
1208 randomly_mutate_worktree(worktree, &mut rng, cx)
1209 })
1210 .await
1211 .log_err();
1212 worktree.read_with(cx, |tree, _| {
1213 tree.as_local().unwrap().snapshot().check_invariants(true)
1214 });
1215
1216 if rng.gen_bool(0.6) {
1217 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1218 }
1219 }
1220
1221 worktree
1222 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1223 .await;
1224
1225 cx.foreground().run_until_parked();
1226
1227 let final_snapshot = worktree.read_with(cx, |tree, _| {
1228 let tree = tree.as_local().unwrap();
1229 let snapshot = tree.snapshot();
1230 snapshot.check_invariants(true);
1231 snapshot
1232 });
1233
1234 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1235 let mut updated_snapshot = snapshot.clone();
1236 for update in updates.lock().iter() {
1237 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1238 updated_snapshot
1239 .apply_remote_update(update.clone())
1240 .unwrap();
1241 }
1242 }
1243
1244 assert_eq!(
1245 updated_snapshot.entries(true).collect::<Vec<_>>(),
1246 final_snapshot.entries(true).collect::<Vec<_>>(),
1247 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1248 );
1249 }
1250}
1251
1252#[gpui::test(iterations = 100)]
1253async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1254 init_test(cx);
1255 let operations = env::var("OPERATIONS")
1256 .map(|o| o.parse().unwrap())
1257 .unwrap_or(40);
1258 let initial_entries = env::var("INITIAL_ENTRIES")
1259 .map(|o| o.parse().unwrap())
1260 .unwrap_or(20);
1261
1262 let root_dir = Path::new("/test");
1263 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1264 fs.as_fake().insert_tree(root_dir, json!({})).await;
1265 for _ in 0..initial_entries {
1266 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1267 }
1268 log::info!("generated initial tree");
1269
1270 let worktree = Worktree::local(
1271 build_client(cx),
1272 root_dir,
1273 true,
1274 fs.clone(),
1275 Default::default(),
1276 &mut cx.to_async(),
1277 )
1278 .await
1279 .unwrap();
1280
1281 let updates = Arc::new(Mutex::new(Vec::new()));
1282 worktree.update(cx, |tree, cx| {
1283 check_worktree_change_events(tree, cx);
1284
1285 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1286 let updates = updates.clone();
1287 move |update| {
1288 updates.lock().push(update);
1289 async { true }
1290 }
1291 });
1292 });
1293
1294 worktree
1295 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1296 .await;
1297
1298 fs.as_fake().pause_events();
1299 let mut snapshots = Vec::new();
1300 let mut mutations_len = operations;
1301 while mutations_len > 1 {
1302 if rng.gen_bool(0.2) {
1303 worktree
1304 .update(cx, |worktree, cx| {
1305 randomly_mutate_worktree(worktree, &mut rng, cx)
1306 })
1307 .await
1308 .log_err();
1309 } else {
1310 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1311 }
1312
1313 let buffered_event_count = fs.as_fake().buffered_event_count();
1314 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1315 let len = rng.gen_range(0..=buffered_event_count);
1316 log::info!("flushing {} events", len);
1317 fs.as_fake().flush_events(len);
1318 } else {
1319 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1320 mutations_len -= 1;
1321 }
1322
1323 cx.foreground().run_until_parked();
1324 if rng.gen_bool(0.2) {
1325 log::info!("storing snapshot {}", snapshots.len());
1326 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1327 snapshots.push(snapshot);
1328 }
1329 }
1330
1331 log::info!("quiescing");
1332 fs.as_fake().flush_events(usize::MAX);
1333 cx.foreground().run_until_parked();
1334
1335 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1336 snapshot.check_invariants(true);
1337 let expanded_paths = snapshot
1338 .expanded_entries()
1339 .map(|e| e.path.clone())
1340 .collect::<Vec<_>>();
1341
1342 {
1343 let new_worktree = Worktree::local(
1344 build_client(cx),
1345 root_dir,
1346 true,
1347 fs.clone(),
1348 Default::default(),
1349 &mut cx.to_async(),
1350 )
1351 .await
1352 .unwrap();
1353 new_worktree
1354 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1355 .await;
1356 new_worktree
1357 .update(cx, |tree, _| {
1358 tree.as_local_mut()
1359 .unwrap()
1360 .refresh_entries_for_paths(expanded_paths)
1361 })
1362 .recv()
1363 .await;
1364 let new_snapshot =
1365 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1366 assert_eq!(
1367 snapshot.entries_without_ids(true),
1368 new_snapshot.entries_without_ids(true)
1369 );
1370 }
1371
1372 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1373 for update in updates.lock().iter() {
1374 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1375 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1376 }
1377 }
1378
1379 assert_eq!(
1380 prev_snapshot
1381 .entries(true)
1382 .map(ignore_pending_dir)
1383 .collect::<Vec<_>>(),
1384 snapshot
1385 .entries(true)
1386 .map(ignore_pending_dir)
1387 .collect::<Vec<_>>(),
1388 "wrong updates after snapshot {i}: {updates:#?}",
1389 );
1390 }
1391
1392 fn ignore_pending_dir(entry: &Entry) -> Entry {
1393 let mut entry = entry.clone();
1394 if entry.kind.is_dir() {
1395 entry.kind = EntryKind::Dir
1396 }
1397 entry
1398 }
1399}
1400
1401// The worktree's `UpdatedEntries` event can be used to follow along with
1402// all changes to the worktree's snapshot.
1403fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1404 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1405 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1406 if let Event::UpdatedEntries(changes) = event {
1407 for (path, _, change_type) in changes.iter() {
1408 let entry = tree.entry_for_path(&path).cloned();
1409 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1410 Ok(ix) | Err(ix) => ix,
1411 };
1412 match change_type {
1413 PathChange::Added => entries.insert(ix, entry.unwrap()),
1414 PathChange::Removed => drop(entries.remove(ix)),
1415 PathChange::Updated => {
1416 let entry = entry.unwrap();
1417 let existing_entry = entries.get_mut(ix).unwrap();
1418 assert_eq!(existing_entry.path, entry.path);
1419 *existing_entry = entry;
1420 }
1421 PathChange::AddedOrUpdated | PathChange::Loaded => {
1422 let entry = entry.unwrap();
1423 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1424 *entries.get_mut(ix).unwrap() = entry;
1425 } else {
1426 entries.insert(ix, entry);
1427 }
1428 }
1429 }
1430 }
1431
1432 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1433 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1434 }
1435 })
1436 .detach();
1437}
1438
1439fn randomly_mutate_worktree(
1440 worktree: &mut Worktree,
1441 rng: &mut impl Rng,
1442 cx: &mut ModelContext<Worktree>,
1443) -> Task<Result<()>> {
1444 log::info!("mutating worktree");
1445 let worktree = worktree.as_local_mut().unwrap();
1446 let snapshot = worktree.snapshot();
1447 let entry = snapshot.entries(false).choose(rng).unwrap();
1448
1449 match rng.gen_range(0_u32..100) {
1450 0..=33 if entry.path.as_ref() != Path::new("") => {
1451 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1452 worktree.delete_entry(entry.id, cx).unwrap()
1453 }
1454 ..=66 if entry.path.as_ref() != Path::new("") => {
1455 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1456 let new_parent_path = if other_entry.is_dir() {
1457 other_entry.path.clone()
1458 } else {
1459 other_entry.path.parent().unwrap().into()
1460 };
1461 let mut new_path = new_parent_path.join(random_filename(rng));
1462 if new_path.starts_with(&entry.path) {
1463 new_path = random_filename(rng).into();
1464 }
1465
1466 log::info!(
1467 "renaming entry {:?} ({}) to {:?}",
1468 entry.path,
1469 entry.id.0,
1470 new_path
1471 );
1472 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1473 cx.foreground().spawn(async move {
1474 task.await?;
1475 Ok(())
1476 })
1477 }
1478 _ => {
1479 let task = if entry.is_dir() {
1480 let child_path = entry.path.join(random_filename(rng));
1481 let is_dir = rng.gen_bool(0.3);
1482 log::info!(
1483 "creating {} at {:?}",
1484 if is_dir { "dir" } else { "file" },
1485 child_path,
1486 );
1487 worktree.create_entry(child_path, is_dir, cx)
1488 } else {
1489 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1490 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1491 };
1492 cx.foreground().spawn(async move {
1493 task.await?;
1494 Ok(())
1495 })
1496 }
1497 }
1498}
1499
1500async fn randomly_mutate_fs(
1501 fs: &Arc<dyn Fs>,
1502 root_path: &Path,
1503 insertion_probability: f64,
1504 rng: &mut impl Rng,
1505) {
1506 log::info!("mutating fs");
1507 let mut files = Vec::new();
1508 let mut dirs = Vec::new();
1509 for path in fs.as_fake().paths(false) {
1510 if path.starts_with(root_path) {
1511 if fs.is_file(&path).await {
1512 files.push(path);
1513 } else {
1514 dirs.push(path);
1515 }
1516 }
1517 }
1518
1519 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1520 let path = dirs.choose(rng).unwrap();
1521 let new_path = path.join(random_filename(rng));
1522
1523 if rng.gen() {
1524 log::info!(
1525 "creating dir {:?}",
1526 new_path.strip_prefix(root_path).unwrap()
1527 );
1528 fs.create_dir(&new_path).await.unwrap();
1529 } else {
1530 log::info!(
1531 "creating file {:?}",
1532 new_path.strip_prefix(root_path).unwrap()
1533 );
1534 fs.create_file(&new_path, Default::default()).await.unwrap();
1535 }
1536 } else if rng.gen_bool(0.05) {
1537 let ignore_dir_path = dirs.choose(rng).unwrap();
1538 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1539
1540 let subdirs = dirs
1541 .iter()
1542 .filter(|d| d.starts_with(&ignore_dir_path))
1543 .cloned()
1544 .collect::<Vec<_>>();
1545 let subfiles = files
1546 .iter()
1547 .filter(|d| d.starts_with(&ignore_dir_path))
1548 .cloned()
1549 .collect::<Vec<_>>();
1550 let files_to_ignore = {
1551 let len = rng.gen_range(0..=subfiles.len());
1552 subfiles.choose_multiple(rng, len)
1553 };
1554 let dirs_to_ignore = {
1555 let len = rng.gen_range(0..subdirs.len());
1556 subdirs.choose_multiple(rng, len)
1557 };
1558
1559 let mut ignore_contents = String::new();
1560 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1561 writeln!(
1562 ignore_contents,
1563 "{}",
1564 path_to_ignore
1565 .strip_prefix(&ignore_dir_path)
1566 .unwrap()
1567 .to_str()
1568 .unwrap()
1569 )
1570 .unwrap();
1571 }
1572 log::info!(
1573 "creating gitignore {:?} with contents:\n{}",
1574 ignore_path.strip_prefix(&root_path).unwrap(),
1575 ignore_contents
1576 );
1577 fs.save(
1578 &ignore_path,
1579 &ignore_contents.as_str().into(),
1580 Default::default(),
1581 )
1582 .await
1583 .unwrap();
1584 } else {
1585 let old_path = {
1586 let file_path = files.choose(rng);
1587 let dir_path = dirs[1..].choose(rng);
1588 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1589 };
1590
1591 let is_rename = rng.gen();
1592 if is_rename {
1593 let new_path_parent = dirs
1594 .iter()
1595 .filter(|d| !d.starts_with(old_path))
1596 .choose(rng)
1597 .unwrap();
1598
1599 let overwrite_existing_dir =
1600 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1601 let new_path = if overwrite_existing_dir {
1602 fs.remove_dir(
1603 &new_path_parent,
1604 RemoveOptions {
1605 recursive: true,
1606 ignore_if_not_exists: true,
1607 },
1608 )
1609 .await
1610 .unwrap();
1611 new_path_parent.to_path_buf()
1612 } else {
1613 new_path_parent.join(random_filename(rng))
1614 };
1615
1616 log::info!(
1617 "renaming {:?} to {}{:?}",
1618 old_path.strip_prefix(&root_path).unwrap(),
1619 if overwrite_existing_dir {
1620 "overwrite "
1621 } else {
1622 ""
1623 },
1624 new_path.strip_prefix(&root_path).unwrap()
1625 );
1626 fs.rename(
1627 &old_path,
1628 &new_path,
1629 fs::RenameOptions {
1630 overwrite: true,
1631 ignore_if_exists: true,
1632 },
1633 )
1634 .await
1635 .unwrap();
1636 } else if fs.is_file(&old_path).await {
1637 log::info!(
1638 "deleting file {:?}",
1639 old_path.strip_prefix(&root_path).unwrap()
1640 );
1641 fs.remove_file(old_path, Default::default()).await.unwrap();
1642 } else {
1643 log::info!(
1644 "deleting dir {:?}",
1645 old_path.strip_prefix(&root_path).unwrap()
1646 );
1647 fs.remove_dir(
1648 &old_path,
1649 RemoveOptions {
1650 recursive: true,
1651 ignore_if_not_exists: true,
1652 },
1653 )
1654 .await
1655 .unwrap();
1656 }
1657 }
1658}
1659
1660fn random_filename(rng: &mut impl Rng) -> String {
1661 (0..6)
1662 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1663 .map(char::from)
1664 .collect()
1665}
1666
1667#[gpui::test]
1668async fn test_rename_work_directory(cx: &mut TestAppContext) {
1669 init_test(cx);
1670 let root = temp_tree(json!({
1671 "projects": {
1672 "project1": {
1673 "a": "",
1674 "b": "",
1675 }
1676 },
1677
1678 }));
1679 let root_path = root.path();
1680
1681 let tree = Worktree::local(
1682 build_client(cx),
1683 root_path,
1684 true,
1685 Arc::new(RealFs),
1686 Default::default(),
1687 &mut cx.to_async(),
1688 )
1689 .await
1690 .unwrap();
1691
1692 let repo = git_init(&root_path.join("projects/project1"));
1693 git_add("a", &repo);
1694 git_commit("init", &repo);
1695 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1696
1697 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1698 .await;
1699
1700 tree.flush_fs_events(cx).await;
1701
1702 cx.read(|cx| {
1703 let tree = tree.read(cx);
1704 let (work_dir, _) = tree.repositories().next().unwrap();
1705 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1706 assert_eq!(
1707 tree.status_for_file(Path::new("projects/project1/a")),
1708 Some(GitFileStatus::Modified)
1709 );
1710 assert_eq!(
1711 tree.status_for_file(Path::new("projects/project1/b")),
1712 Some(GitFileStatus::Added)
1713 );
1714 });
1715
1716 std::fs::rename(
1717 root_path.join("projects/project1"),
1718 root_path.join("projects/project2"),
1719 )
1720 .ok();
1721 tree.flush_fs_events(cx).await;
1722
1723 cx.read(|cx| {
1724 let tree = tree.read(cx);
1725 let (work_dir, _) = tree.repositories().next().unwrap();
1726 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1727 assert_eq!(
1728 tree.status_for_file(Path::new("projects/project2/a")),
1729 Some(GitFileStatus::Modified)
1730 );
1731 assert_eq!(
1732 tree.status_for_file(Path::new("projects/project2/b")),
1733 Some(GitFileStatus::Added)
1734 );
1735 });
1736}
1737
1738#[gpui::test]
1739async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1740 init_test(cx);
1741 let root = temp_tree(json!({
1742 "c.txt": "",
1743 "dir1": {
1744 ".git": {},
1745 "deps": {
1746 "dep1": {
1747 ".git": {},
1748 "src": {
1749 "a.txt": ""
1750 }
1751 }
1752 },
1753 "src": {
1754 "b.txt": ""
1755 }
1756 },
1757 }));
1758
1759 let tree = Worktree::local(
1760 build_client(cx),
1761 root.path(),
1762 true,
1763 Arc::new(RealFs),
1764 Default::default(),
1765 &mut cx.to_async(),
1766 )
1767 .await
1768 .unwrap();
1769
1770 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1771 .await;
1772 tree.flush_fs_events(cx).await;
1773
1774 tree.read_with(cx, |tree, _cx| {
1775 let tree = tree.as_local().unwrap();
1776
1777 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1778
1779 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1780 assert_eq!(
1781 entry
1782 .work_directory(tree)
1783 .map(|directory| directory.as_ref().to_owned()),
1784 Some(Path::new("dir1").to_owned())
1785 );
1786
1787 let entry = tree
1788 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1789 .unwrap();
1790 assert_eq!(
1791 entry
1792 .work_directory(tree)
1793 .map(|directory| directory.as_ref().to_owned()),
1794 Some(Path::new("dir1/deps/dep1").to_owned())
1795 );
1796
1797 let entries = tree.files(false, 0);
1798
1799 let paths_with_repos = tree
1800 .entries_with_repositories(entries)
1801 .map(|(entry, repo)| {
1802 (
1803 entry.path.as_ref(),
1804 repo.and_then(|repo| {
1805 repo.work_directory(&tree)
1806 .map(|work_directory| work_directory.0.to_path_buf())
1807 }),
1808 )
1809 })
1810 .collect::<Vec<_>>();
1811
1812 assert_eq!(
1813 paths_with_repos,
1814 &[
1815 (Path::new("c.txt"), None),
1816 (
1817 Path::new("dir1/deps/dep1/src/a.txt"),
1818 Some(Path::new("dir1/deps/dep1").into())
1819 ),
1820 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1821 ]
1822 );
1823 });
1824
1825 let repo_update_events = Arc::new(Mutex::new(vec![]));
1826 tree.update(cx, |_, cx| {
1827 let repo_update_events = repo_update_events.clone();
1828 cx.subscribe(&tree, move |_, _, event, _| {
1829 if let Event::UpdatedGitRepositories(update) = event {
1830 repo_update_events.lock().push(update.clone());
1831 }
1832 })
1833 .detach();
1834 });
1835
1836 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1837 tree.flush_fs_events(cx).await;
1838
1839 assert_eq!(
1840 repo_update_events.lock()[0]
1841 .iter()
1842 .map(|e| e.0.clone())
1843 .collect::<Vec<Arc<Path>>>(),
1844 vec![Path::new("dir1").into()]
1845 );
1846
1847 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1848 tree.flush_fs_events(cx).await;
1849
1850 tree.read_with(cx, |tree, _cx| {
1851 let tree = tree.as_local().unwrap();
1852
1853 assert!(tree
1854 .repository_for_path("dir1/src/b.txt".as_ref())
1855 .is_none());
1856 });
1857}
1858
1859#[gpui::test]
1860async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1861 init_test(cx);
1862 const IGNORE_RULE: &'static str = "**/target";
1863
1864 let root = temp_tree(json!({
1865 "project": {
1866 "a.txt": "a",
1867 "b.txt": "bb",
1868 "c": {
1869 "d": {
1870 "e.txt": "eee"
1871 }
1872 },
1873 "f.txt": "ffff",
1874 "target": {
1875 "build_file": "???"
1876 },
1877 ".gitignore": IGNORE_RULE
1878 },
1879
1880 }));
1881
1882 const A_TXT: &'static str = "a.txt";
1883 const B_TXT: &'static str = "b.txt";
1884 const E_TXT: &'static str = "c/d/e.txt";
1885 const F_TXT: &'static str = "f.txt";
1886 const DOTGITIGNORE: &'static str = ".gitignore";
1887 const BUILD_FILE: &'static str = "target/build_file";
1888 let project_path = Path::new("project");
1889
1890 // Set up git repository before creating the worktree.
1891 let work_dir = root.path().join("project");
1892 let mut repo = git_init(work_dir.as_path());
1893 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1894 git_add(A_TXT, &repo);
1895 git_add(E_TXT, &repo);
1896 git_add(DOTGITIGNORE, &repo);
1897 git_commit("Initial commit", &repo);
1898
1899 let tree = Worktree::local(
1900 build_client(cx),
1901 root.path(),
1902 true,
1903 Arc::new(RealFs),
1904 Default::default(),
1905 &mut cx.to_async(),
1906 )
1907 .await
1908 .unwrap();
1909
1910 tree.flush_fs_events(cx).await;
1911 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1912 .await;
1913 deterministic.run_until_parked();
1914
1915 // Check that the right git state is observed on startup
1916 tree.read_with(cx, |tree, _cx| {
1917 let snapshot = tree.snapshot();
1918 assert_eq!(snapshot.repositories().count(), 1);
1919 let (dir, _) = snapshot.repositories().next().unwrap();
1920 assert_eq!(dir.as_ref(), Path::new("project"));
1921
1922 assert_eq!(
1923 snapshot.status_for_file(project_path.join(B_TXT)),
1924 Some(GitFileStatus::Added)
1925 );
1926 assert_eq!(
1927 snapshot.status_for_file(project_path.join(F_TXT)),
1928 Some(GitFileStatus::Added)
1929 );
1930 });
1931
1932 // Modify a file in the working copy.
1933 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1934 tree.flush_fs_events(cx).await;
1935 deterministic.run_until_parked();
1936
1937 // The worktree detects that the file's git status has changed.
1938 tree.read_with(cx, |tree, _cx| {
1939 let snapshot = tree.snapshot();
1940 assert_eq!(
1941 snapshot.status_for_file(project_path.join(A_TXT)),
1942 Some(GitFileStatus::Modified)
1943 );
1944 });
1945
1946 // Create a commit in the git repository.
1947 git_add(A_TXT, &repo);
1948 git_add(B_TXT, &repo);
1949 git_commit("Committing modified and added", &repo);
1950 tree.flush_fs_events(cx).await;
1951 deterministic.run_until_parked();
1952
1953 // The worktree detects that the files' git status have changed.
1954 tree.read_with(cx, |tree, _cx| {
1955 let snapshot = tree.snapshot();
1956 assert_eq!(
1957 snapshot.status_for_file(project_path.join(F_TXT)),
1958 Some(GitFileStatus::Added)
1959 );
1960 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1961 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1962 });
1963
1964 // Modify files in the working copy and perform git operations on other files.
1965 git_reset(0, &repo);
1966 git_remove_index(Path::new(B_TXT), &repo);
1967 git_stash(&mut repo);
1968 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1969 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1970 tree.flush_fs_events(cx).await;
1971 deterministic.run_until_parked();
1972
1973 // Check that more complex repo changes are tracked
1974 tree.read_with(cx, |tree, _cx| {
1975 let snapshot = tree.snapshot();
1976
1977 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1978 assert_eq!(
1979 snapshot.status_for_file(project_path.join(B_TXT)),
1980 Some(GitFileStatus::Added)
1981 );
1982 assert_eq!(
1983 snapshot.status_for_file(project_path.join(E_TXT)),
1984 Some(GitFileStatus::Modified)
1985 );
1986 });
1987
1988 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1989 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1990 std::fs::write(
1991 work_dir.join(DOTGITIGNORE),
1992 [IGNORE_RULE, "f.txt"].join("\n"),
1993 )
1994 .unwrap();
1995
1996 git_add(Path::new(DOTGITIGNORE), &repo);
1997 git_commit("Committing modified git ignore", &repo);
1998
1999 tree.flush_fs_events(cx).await;
2000 deterministic.run_until_parked();
2001
2002 let mut renamed_dir_name = "first_directory/second_directory";
2003 const RENAMED_FILE: &'static str = "rf.txt";
2004
2005 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
2006 std::fs::write(
2007 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
2008 "new-contents",
2009 )
2010 .unwrap();
2011
2012 tree.flush_fs_events(cx).await;
2013 deterministic.run_until_parked();
2014
2015 tree.read_with(cx, |tree, _cx| {
2016 let snapshot = tree.snapshot();
2017 assert_eq!(
2018 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
2019 Some(GitFileStatus::Added)
2020 );
2021 });
2022
2023 renamed_dir_name = "new_first_directory/second_directory";
2024
2025 std::fs::rename(
2026 work_dir.join("first_directory"),
2027 work_dir.join("new_first_directory"),
2028 )
2029 .unwrap();
2030
2031 tree.flush_fs_events(cx).await;
2032 deterministic.run_until_parked();
2033
2034 tree.read_with(cx, |tree, _cx| {
2035 let snapshot = tree.snapshot();
2036
2037 assert_eq!(
2038 snapshot.status_for_file(
2039 project_path
2040 .join(Path::new(renamed_dir_name))
2041 .join(RENAMED_FILE)
2042 ),
2043 Some(GitFileStatus::Added)
2044 );
2045 });
2046}
2047
2048#[gpui::test]
2049async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
2050 init_test(cx);
2051 let fs = FakeFs::new(cx.background());
2052 fs.insert_tree(
2053 "/root",
2054 json!({
2055 ".git": {},
2056 "a": {
2057 "b": {
2058 "c1.txt": "",
2059 "c2.txt": "",
2060 },
2061 "d": {
2062 "e1.txt": "",
2063 "e2.txt": "",
2064 "e3.txt": "",
2065 }
2066 },
2067 "f": {
2068 "no-status.txt": ""
2069 },
2070 "g": {
2071 "h1.txt": "",
2072 "h2.txt": ""
2073 },
2074
2075 }),
2076 )
2077 .await;
2078
2079 fs.set_status_for_repo_via_git_operation(
2080 &Path::new("/root/.git"),
2081 &[
2082 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
2083 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
2084 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
2085 ],
2086 );
2087
2088 let tree = Worktree::local(
2089 build_client(cx),
2090 Path::new("/root"),
2091 true,
2092 fs.clone(),
2093 Default::default(),
2094 &mut cx.to_async(),
2095 )
2096 .await
2097 .unwrap();
2098
2099 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2100 .await;
2101
2102 cx.foreground().run_until_parked();
2103 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
2104
2105 check_propagated_statuses(
2106 &snapshot,
2107 &[
2108 (Path::new(""), Some(GitFileStatus::Conflict)),
2109 (Path::new("a"), Some(GitFileStatus::Modified)),
2110 (Path::new("a/b"), Some(GitFileStatus::Added)),
2111 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2112 (Path::new("a/b/c2.txt"), None),
2113 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2114 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2115 (Path::new("f"), None),
2116 (Path::new("f/no-status.txt"), None),
2117 (Path::new("g"), Some(GitFileStatus::Conflict)),
2118 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
2119 ],
2120 );
2121
2122 check_propagated_statuses(
2123 &snapshot,
2124 &[
2125 (Path::new("a/b"), Some(GitFileStatus::Added)),
2126 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2127 (Path::new("a/b/c2.txt"), None),
2128 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2129 (Path::new("a/d/e1.txt"), None),
2130 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2131 (Path::new("f"), None),
2132 (Path::new("f/no-status.txt"), None),
2133 (Path::new("g"), Some(GitFileStatus::Conflict)),
2134 ],
2135 );
2136
2137 check_propagated_statuses(
2138 &snapshot,
2139 &[
2140 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2141 (Path::new("a/b/c2.txt"), None),
2142 (Path::new("a/d/e1.txt"), None),
2143 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2144 (Path::new("f/no-status.txt"), None),
2145 ],
2146 );
2147
2148 #[track_caller]
2149 fn check_propagated_statuses(
2150 snapshot: &Snapshot,
2151 expected_statuses: &[(&Path, Option<GitFileStatus>)],
2152 ) {
2153 let mut entries = expected_statuses
2154 .iter()
2155 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
2156 .collect::<Vec<_>>();
2157 snapshot.propagate_git_statuses(&mut entries);
2158 assert_eq!(
2159 entries
2160 .iter()
2161 .map(|e| (e.path.as_ref(), e.git_status))
2162 .collect::<Vec<_>>(),
2163 expected_statuses
2164 );
2165 }
2166}
2167
2168fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
2169 let http_client = FakeHttpClient::with_404_response();
2170 cx.read(|cx| Client::new(http_client, cx))
2171}
2172
2173#[track_caller]
2174fn git_init(path: &Path) -> git2::Repository {
2175 git2::Repository::init(path).expect("Failed to initialize git repository")
2176}
2177
2178#[track_caller]
2179fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
2180 let path = path.as_ref();
2181 let mut index = repo.index().expect("Failed to get index");
2182 index.add_path(path).expect("Failed to add a.txt");
2183 index.write().expect("Failed to write index");
2184}
2185
2186#[track_caller]
2187fn git_remove_index(path: &Path, repo: &git2::Repository) {
2188 let mut index = repo.index().expect("Failed to get index");
2189 index.remove_path(path).expect("Failed to add a.txt");
2190 index.write().expect("Failed to write index");
2191}
2192
2193#[track_caller]
2194fn git_commit(msg: &'static str, repo: &git2::Repository) {
2195 use git2::Signature;
2196
2197 let signature = Signature::now("test", "test@zed.dev").unwrap();
2198 let oid = repo.index().unwrap().write_tree().unwrap();
2199 let tree = repo.find_tree(oid).unwrap();
2200 if let Some(head) = repo.head().ok() {
2201 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
2202
2203 let parent_commit = parent_obj.as_commit().unwrap();
2204
2205 repo.commit(
2206 Some("HEAD"),
2207 &signature,
2208 &signature,
2209 msg,
2210 &tree,
2211 &[parent_commit],
2212 )
2213 .expect("Failed to commit with parent");
2214 } else {
2215 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
2216 .expect("Failed to commit");
2217 }
2218}
2219
2220#[track_caller]
2221fn git_stash(repo: &mut git2::Repository) {
2222 use git2::Signature;
2223
2224 let signature = Signature::now("test", "test@zed.dev").unwrap();
2225 repo.stash_save(&signature, "N/A", None)
2226 .expect("Failed to stash");
2227}
2228
2229#[track_caller]
2230fn git_reset(offset: usize, repo: &git2::Repository) {
2231 let head = repo.head().expect("Couldn't get repo head");
2232 let object = head.peel(git2::ObjectType::Commit).unwrap();
2233 let commit = object.as_commit().unwrap();
2234 let new_head = commit
2235 .parents()
2236 .inspect(|parnet| {
2237 parnet.message();
2238 })
2239 .skip(offset)
2240 .next()
2241 .expect("Not enough history");
2242 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2243 .expect("Could not reset");
2244}
2245
2246#[allow(dead_code)]
2247#[track_caller]
2248fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2249 repo.statuses(None)
2250 .unwrap()
2251 .iter()
2252 .map(|status| (status.path().unwrap().to_string(), status.status()))
2253 .collect()
2254}
2255
2256#[track_caller]
2257fn check_worktree_entries(
2258 tree: &Worktree,
2259 expected_excluded_paths: &[&str],
2260 expected_ignored_paths: &[&str],
2261 expected_tracked_paths: &[&str],
2262) {
2263 for path in expected_excluded_paths {
2264 let entry = tree.entry_for_path(path);
2265 assert!(
2266 entry.is_none(),
2267 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2268 );
2269 }
2270 for path in expected_ignored_paths {
2271 let entry = tree.entry_for_path(path).unwrap();
2272 assert!(
2273 entry.is_ignored,
2274 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2275 );
2276 }
2277 for path in expected_tracked_paths {
2278 let entry = tree.entry_for_path(path).unwrap();
2279 assert!(
2280 !entry.is_ignored,
2281 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2282 );
2283 }
2284}
2285
2286fn init_test(cx: &mut gpui::TestAppContext) {
2287 cx.update(|cx| {
2288 cx.set_global(SettingsStore::test(cx));
2289 Project::init_settings(cx);
2290 });
2291}