1use crate::{
2 project_settings::ProjectSettings,
3 worktree::{Event, Snapshot, WorktreeModelHandle},
4 Entry, EntryKind, PathChange, Project, Worktree,
5};
6use anyhow::Result;
7use client::Client;
8use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
9use git::GITIGNORE;
10use gpui::{ModelContext, Task, TestAppContext};
11use parking_lot::Mutex;
12use postage::stream::Stream;
13use pretty_assertions::assert_eq;
14use rand::prelude::*;
15use serde_json::json;
16use settings::SettingsStore;
17use std::{
18 env,
19 fmt::Write,
20 mem,
21 path::{Path, PathBuf},
22 sync::Arc,
23};
24use text::BufferId;
25use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
26
27#[gpui::test]
28async fn test_traversal(cx: &mut TestAppContext) {
29 init_test(cx);
30 let fs = FakeFs::new(cx.background_executor.clone());
31 fs.insert_tree(
32 "/root",
33 json!({
34 ".gitignore": "a/b\n",
35 "a": {
36 "b": "",
37 "c": "",
38 }
39 }),
40 )
41 .await;
42
43 let tree = Worktree::local(
44 build_client(cx),
45 Path::new("/root"),
46 true,
47 fs,
48 Default::default(),
49 &mut cx.to_async(),
50 )
51 .await
52 .unwrap();
53 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
54 .await;
55
56 tree.read_with(cx, |tree, _| {
57 assert_eq!(
58 tree.entries(false)
59 .map(|entry| entry.path.as_ref())
60 .collect::<Vec<_>>(),
61 vec![
62 Path::new(""),
63 Path::new(".gitignore"),
64 Path::new("a"),
65 Path::new("a/c"),
66 ]
67 );
68 assert_eq!(
69 tree.entries(true)
70 .map(|entry| entry.path.as_ref())
71 .collect::<Vec<_>>(),
72 vec![
73 Path::new(""),
74 Path::new(".gitignore"),
75 Path::new("a"),
76 Path::new("a/b"),
77 Path::new("a/c"),
78 ]
79 );
80 })
81}
82
83#[gpui::test]
84async fn test_descendent_entries(cx: &mut TestAppContext) {
85 init_test(cx);
86 let fs = FakeFs::new(cx.background_executor.clone());
87 fs.insert_tree(
88 "/root",
89 json!({
90 "a": "",
91 "b": {
92 "c": {
93 "d": ""
94 },
95 "e": {}
96 },
97 "f": "",
98 "g": {
99 "h": {}
100 },
101 "i": {
102 "j": {
103 "k": ""
104 },
105 "l": {
106
107 }
108 },
109 ".gitignore": "i/j\n",
110 }),
111 )
112 .await;
113
114 let tree = Worktree::local(
115 build_client(cx),
116 Path::new("/root"),
117 true,
118 fs,
119 Default::default(),
120 &mut cx.to_async(),
121 )
122 .await
123 .unwrap();
124 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
125 .await;
126
127 tree.read_with(cx, |tree, _| {
128 assert_eq!(
129 tree.descendent_entries(false, false, Path::new("b"))
130 .map(|entry| entry.path.as_ref())
131 .collect::<Vec<_>>(),
132 vec![Path::new("b/c/d"),]
133 );
134 assert_eq!(
135 tree.descendent_entries(true, false, Path::new("b"))
136 .map(|entry| entry.path.as_ref())
137 .collect::<Vec<_>>(),
138 vec![
139 Path::new("b"),
140 Path::new("b/c"),
141 Path::new("b/c/d"),
142 Path::new("b/e"),
143 ]
144 );
145
146 assert_eq!(
147 tree.descendent_entries(false, false, Path::new("g"))
148 .map(|entry| entry.path.as_ref())
149 .collect::<Vec<_>>(),
150 Vec::<PathBuf>::new()
151 );
152 assert_eq!(
153 tree.descendent_entries(true, false, Path::new("g"))
154 .map(|entry| entry.path.as_ref())
155 .collect::<Vec<_>>(),
156 vec![Path::new("g"), Path::new("g/h"),]
157 );
158 });
159
160 // Expand gitignored directory.
161 tree.read_with(cx, |tree, _| {
162 tree.as_local()
163 .unwrap()
164 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
165 })
166 .recv()
167 .await;
168
169 tree.read_with(cx, |tree, _| {
170 assert_eq!(
171 tree.descendent_entries(false, false, Path::new("i"))
172 .map(|entry| entry.path.as_ref())
173 .collect::<Vec<_>>(),
174 Vec::<PathBuf>::new()
175 );
176 assert_eq!(
177 tree.descendent_entries(false, true, Path::new("i"))
178 .map(|entry| entry.path.as_ref())
179 .collect::<Vec<_>>(),
180 vec![Path::new("i/j/k")]
181 );
182 assert_eq!(
183 tree.descendent_entries(true, false, Path::new("i"))
184 .map(|entry| entry.path.as_ref())
185 .collect::<Vec<_>>(),
186 vec![Path::new("i"), Path::new("i/l"),]
187 );
188 })
189}
190
191#[gpui::test(iterations = 10)]
192async fn test_circular_symlinks(cx: &mut TestAppContext) {
193 init_test(cx);
194 let fs = FakeFs::new(cx.background_executor.clone());
195 fs.insert_tree(
196 "/root",
197 json!({
198 "lib": {
199 "a": {
200 "a.txt": ""
201 },
202 "b": {
203 "b.txt": ""
204 }
205 }
206 }),
207 )
208 .await;
209 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
210 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
211
212 let tree = Worktree::local(
213 build_client(cx),
214 Path::new("/root"),
215 true,
216 fs.clone(),
217 Default::default(),
218 &mut cx.to_async(),
219 )
220 .await
221 .unwrap();
222
223 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
224 .await;
225
226 tree.read_with(cx, |tree, _| {
227 assert_eq!(
228 tree.entries(false)
229 .map(|entry| entry.path.as_ref())
230 .collect::<Vec<_>>(),
231 vec![
232 Path::new(""),
233 Path::new("lib"),
234 Path::new("lib/a"),
235 Path::new("lib/a/a.txt"),
236 Path::new("lib/a/lib"),
237 Path::new("lib/b"),
238 Path::new("lib/b/b.txt"),
239 Path::new("lib/b/lib"),
240 ]
241 );
242 });
243
244 fs.rename(
245 Path::new("/root/lib/a/lib"),
246 Path::new("/root/lib/a/lib-2"),
247 Default::default(),
248 )
249 .await
250 .unwrap();
251 cx.executor().run_until_parked();
252 tree.read_with(cx, |tree, _| {
253 assert_eq!(
254 tree.entries(false)
255 .map(|entry| entry.path.as_ref())
256 .collect::<Vec<_>>(),
257 vec![
258 Path::new(""),
259 Path::new("lib"),
260 Path::new("lib/a"),
261 Path::new("lib/a/a.txt"),
262 Path::new("lib/a/lib-2"),
263 Path::new("lib/b"),
264 Path::new("lib/b/b.txt"),
265 Path::new("lib/b/lib"),
266 ]
267 );
268 });
269}
270
271#[gpui::test]
272async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
273 init_test(cx);
274 let fs = FakeFs::new(cx.background_executor.clone());
275 fs.insert_tree(
276 "/root",
277 json!({
278 "dir1": {
279 "deps": {
280 // symlinks here
281 },
282 "src": {
283 "a.rs": "",
284 "b.rs": "",
285 },
286 },
287 "dir2": {
288 "src": {
289 "c.rs": "",
290 "d.rs": "",
291 }
292 },
293 "dir3": {
294 "deps": {},
295 "src": {
296 "e.rs": "",
297 "f.rs": "",
298 },
299 }
300 }),
301 )
302 .await;
303
304 // These symlinks point to directories outside of the worktree's root, dir1.
305 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
306 .await;
307 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
308 .await;
309
310 let tree = Worktree::local(
311 build_client(cx),
312 Path::new("/root/dir1"),
313 true,
314 fs.clone(),
315 Default::default(),
316 &mut cx.to_async(),
317 )
318 .await
319 .unwrap();
320
321 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
322 .await;
323
324 let tree_updates = Arc::new(Mutex::new(Vec::new()));
325 tree.update(cx, |_, cx| {
326 let tree_updates = tree_updates.clone();
327 cx.subscribe(&tree, move |_, _, event, _| {
328 if let Event::UpdatedEntries(update) = event {
329 tree_updates.lock().extend(
330 update
331 .iter()
332 .map(|(path, _, change)| (path.clone(), *change)),
333 );
334 }
335 })
336 .detach();
337 });
338
339 // The symlinked directories are not scanned by default.
340 tree.read_with(cx, |tree, _| {
341 assert_eq!(
342 tree.entries(true)
343 .map(|entry| (entry.path.as_ref(), entry.is_external))
344 .collect::<Vec<_>>(),
345 vec![
346 (Path::new(""), false),
347 (Path::new("deps"), false),
348 (Path::new("deps/dep-dir2"), true),
349 (Path::new("deps/dep-dir3"), true),
350 (Path::new("src"), false),
351 (Path::new("src/a.rs"), false),
352 (Path::new("src/b.rs"), false),
353 ]
354 );
355
356 assert_eq!(
357 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
358 EntryKind::UnloadedDir
359 );
360 });
361
362 // Expand one of the symlinked directories.
363 tree.read_with(cx, |tree, _| {
364 tree.as_local()
365 .unwrap()
366 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
367 })
368 .recv()
369 .await;
370
371 // The expanded directory's contents are loaded. Subdirectories are
372 // not scanned yet.
373 tree.read_with(cx, |tree, _| {
374 assert_eq!(
375 tree.entries(true)
376 .map(|entry| (entry.path.as_ref(), entry.is_external))
377 .collect::<Vec<_>>(),
378 vec![
379 (Path::new(""), false),
380 (Path::new("deps"), false),
381 (Path::new("deps/dep-dir2"), true),
382 (Path::new("deps/dep-dir3"), true),
383 (Path::new("deps/dep-dir3/deps"), true),
384 (Path::new("deps/dep-dir3/src"), true),
385 (Path::new("src"), false),
386 (Path::new("src/a.rs"), false),
387 (Path::new("src/b.rs"), false),
388 ]
389 );
390 });
391 assert_eq!(
392 mem::take(&mut *tree_updates.lock()),
393 &[
394 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
395 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
396 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
397 ]
398 );
399
400 // Expand a subdirectory of one of the symlinked directories.
401 tree.read_with(cx, |tree, _| {
402 tree.as_local()
403 .unwrap()
404 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
405 })
406 .recv()
407 .await;
408
409 // The expanded subdirectory's contents are loaded.
410 tree.read_with(cx, |tree, _| {
411 assert_eq!(
412 tree.entries(true)
413 .map(|entry| (entry.path.as_ref(), entry.is_external))
414 .collect::<Vec<_>>(),
415 vec![
416 (Path::new(""), false),
417 (Path::new("deps"), false),
418 (Path::new("deps/dep-dir2"), true),
419 (Path::new("deps/dep-dir3"), true),
420 (Path::new("deps/dep-dir3/deps"), true),
421 (Path::new("deps/dep-dir3/src"), true),
422 (Path::new("deps/dep-dir3/src/e.rs"), true),
423 (Path::new("deps/dep-dir3/src/f.rs"), true),
424 (Path::new("src"), false),
425 (Path::new("src/a.rs"), false),
426 (Path::new("src/b.rs"), false),
427 ]
428 );
429 });
430
431 assert_eq!(
432 mem::take(&mut *tree_updates.lock()),
433 &[
434 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
435 (
436 Path::new("deps/dep-dir3/src/e.rs").into(),
437 PathChange::Loaded
438 ),
439 (
440 Path::new("deps/dep-dir3/src/f.rs").into(),
441 PathChange::Loaded
442 )
443 ]
444 );
445}
446
447#[cfg(target_os = "macos")]
448#[gpui::test]
449async fn test_renaming_case_only(cx: &mut TestAppContext) {
450 cx.executor().allow_parking();
451 init_test(cx);
452
453 const OLD_NAME: &str = "aaa.rs";
454 const NEW_NAME: &str = "AAA.rs";
455
456 let fs = Arc::new(RealFs);
457 let temp_root = temp_tree(json!({
458 OLD_NAME: "",
459 }));
460
461 let tree = Worktree::local(
462 build_client(cx),
463 temp_root.path(),
464 true,
465 fs.clone(),
466 Default::default(),
467 &mut cx.to_async(),
468 )
469 .await
470 .unwrap();
471
472 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
473 .await;
474 tree.read_with(cx, |tree, _| {
475 assert_eq!(
476 tree.entries(true)
477 .map(|entry| entry.path.as_ref())
478 .collect::<Vec<_>>(),
479 vec![Path::new(""), Path::new(OLD_NAME)]
480 );
481 });
482
483 fs.rename(
484 &temp_root.path().join(OLD_NAME),
485 &temp_root.path().join(NEW_NAME),
486 fs::RenameOptions {
487 overwrite: true,
488 ignore_if_exists: true,
489 },
490 )
491 .await
492 .unwrap();
493
494 tree.flush_fs_events(cx).await;
495
496 tree.read_with(cx, |tree, _| {
497 assert_eq!(
498 tree.entries(true)
499 .map(|entry| entry.path.as_ref())
500 .collect::<Vec<_>>(),
501 vec![Path::new(""), Path::new(NEW_NAME)]
502 );
503 });
504}
505
506#[gpui::test]
507async fn test_open_gitignored_files(cx: &mut TestAppContext) {
508 init_test(cx);
509 let fs = FakeFs::new(cx.background_executor.clone());
510 fs.insert_tree(
511 "/root",
512 json!({
513 ".gitignore": "node_modules\n",
514 "one": {
515 "node_modules": {
516 "a": {
517 "a1.js": "a1",
518 "a2.js": "a2",
519 },
520 "b": {
521 "b1.js": "b1",
522 "b2.js": "b2",
523 },
524 "c": {
525 "c1.js": "c1",
526 "c2.js": "c2",
527 }
528 },
529 },
530 "two": {
531 "x.js": "",
532 "y.js": "",
533 },
534 }),
535 )
536 .await;
537
538 let tree = Worktree::local(
539 build_client(cx),
540 Path::new("/root"),
541 true,
542 fs.clone(),
543 Default::default(),
544 &mut cx.to_async(),
545 )
546 .await
547 .unwrap();
548
549 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
550 .await;
551
552 tree.read_with(cx, |tree, _| {
553 assert_eq!(
554 tree.entries(true)
555 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
556 .collect::<Vec<_>>(),
557 vec![
558 (Path::new(""), false),
559 (Path::new(".gitignore"), false),
560 (Path::new("one"), false),
561 (Path::new("one/node_modules"), true),
562 (Path::new("two"), false),
563 (Path::new("two/x.js"), false),
564 (Path::new("two/y.js"), false),
565 ]
566 );
567 });
568
569 // Open a file that is nested inside of a gitignored directory that
570 // has not yet been expanded.
571 let prev_read_dir_count = fs.read_dir_call_count();
572 let buffer = tree
573 .update(cx, |tree, cx| {
574 tree.as_local_mut().unwrap().load_buffer(
575 BufferId::new(1).unwrap(),
576 "one/node_modules/b/b1.js".as_ref(),
577 cx,
578 )
579 })
580 .await
581 .unwrap();
582
583 tree.read_with(cx, |tree, cx| {
584 assert_eq!(
585 tree.entries(true)
586 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
587 .collect::<Vec<_>>(),
588 vec![
589 (Path::new(""), false),
590 (Path::new(".gitignore"), false),
591 (Path::new("one"), false),
592 (Path::new("one/node_modules"), true),
593 (Path::new("one/node_modules/a"), true),
594 (Path::new("one/node_modules/b"), true),
595 (Path::new("one/node_modules/b/b1.js"), true),
596 (Path::new("one/node_modules/b/b2.js"), true),
597 (Path::new("one/node_modules/c"), true),
598 (Path::new("two"), false),
599 (Path::new("two/x.js"), false),
600 (Path::new("two/y.js"), false),
601 ]
602 );
603
604 assert_eq!(
605 buffer.read(cx).file().unwrap().path().as_ref(),
606 Path::new("one/node_modules/b/b1.js")
607 );
608
609 // Only the newly-expanded directories are scanned.
610 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
611 });
612
613 // Open another file in a different subdirectory of the same
614 // gitignored directory.
615 let prev_read_dir_count = fs.read_dir_call_count();
616 let buffer = tree
617 .update(cx, |tree, cx| {
618 tree.as_local_mut().unwrap().load_buffer(
619 BufferId::new(1).unwrap(),
620 "one/node_modules/a/a2.js".as_ref(),
621 cx,
622 )
623 })
624 .await
625 .unwrap();
626
627 tree.read_with(cx, |tree, cx| {
628 assert_eq!(
629 tree.entries(true)
630 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
631 .collect::<Vec<_>>(),
632 vec![
633 (Path::new(""), false),
634 (Path::new(".gitignore"), false),
635 (Path::new("one"), false),
636 (Path::new("one/node_modules"), true),
637 (Path::new("one/node_modules/a"), true),
638 (Path::new("one/node_modules/a/a1.js"), true),
639 (Path::new("one/node_modules/a/a2.js"), true),
640 (Path::new("one/node_modules/b"), true),
641 (Path::new("one/node_modules/b/b1.js"), true),
642 (Path::new("one/node_modules/b/b2.js"), true),
643 (Path::new("one/node_modules/c"), true),
644 (Path::new("two"), false),
645 (Path::new("two/x.js"), false),
646 (Path::new("two/y.js"), false),
647 ]
648 );
649
650 assert_eq!(
651 buffer.read(cx).file().unwrap().path().as_ref(),
652 Path::new("one/node_modules/a/a2.js")
653 );
654
655 // Only the newly-expanded directory is scanned.
656 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
657 });
658
659 // No work happens when files and directories change within an unloaded directory.
660 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
661 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
662 .await
663 .unwrap();
664 cx.executor().run_until_parked();
665 assert_eq!(
666 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
667 0
668 );
669}
670
671#[gpui::test]
672async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
673 init_test(cx);
674 let fs = FakeFs::new(cx.background_executor.clone());
675 fs.insert_tree(
676 "/root",
677 json!({
678 ".gitignore": "node_modules\n",
679 "a": {
680 "a.js": "",
681 },
682 "b": {
683 "b.js": "",
684 },
685 "node_modules": {
686 "c": {
687 "c.js": "",
688 },
689 "d": {
690 "d.js": "",
691 "e": {
692 "e1.js": "",
693 "e2.js": "",
694 },
695 "f": {
696 "f1.js": "",
697 "f2.js": "",
698 }
699 },
700 },
701 }),
702 )
703 .await;
704
705 let tree = Worktree::local(
706 build_client(cx),
707 Path::new("/root"),
708 true,
709 fs.clone(),
710 Default::default(),
711 &mut cx.to_async(),
712 )
713 .await
714 .unwrap();
715
716 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
717 .await;
718
719 // Open a file within the gitignored directory, forcing some of its
720 // subdirectories to be read, but not all.
721 let read_dir_count_1 = fs.read_dir_call_count();
722 tree.read_with(cx, |tree, _| {
723 tree.as_local()
724 .unwrap()
725 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
726 })
727 .recv()
728 .await;
729
730 // Those subdirectories are now loaded.
731 tree.read_with(cx, |tree, _| {
732 assert_eq!(
733 tree.entries(true)
734 .map(|e| (e.path.as_ref(), e.is_ignored))
735 .collect::<Vec<_>>(),
736 &[
737 (Path::new(""), false),
738 (Path::new(".gitignore"), false),
739 (Path::new("a"), false),
740 (Path::new("a/a.js"), false),
741 (Path::new("b"), false),
742 (Path::new("b/b.js"), false),
743 (Path::new("node_modules"), true),
744 (Path::new("node_modules/c"), true),
745 (Path::new("node_modules/d"), true),
746 (Path::new("node_modules/d/d.js"), true),
747 (Path::new("node_modules/d/e"), true),
748 (Path::new("node_modules/d/f"), true),
749 ]
750 );
751 });
752 let read_dir_count_2 = fs.read_dir_call_count();
753 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
754
755 // Update the gitignore so that node_modules is no longer ignored,
756 // but a subdirectory is ignored
757 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
758 .await
759 .unwrap();
760 cx.executor().run_until_parked();
761
762 // All of the directories that are no longer ignored are now loaded.
763 tree.read_with(cx, |tree, _| {
764 assert_eq!(
765 tree.entries(true)
766 .map(|e| (e.path.as_ref(), e.is_ignored))
767 .collect::<Vec<_>>(),
768 &[
769 (Path::new(""), false),
770 (Path::new(".gitignore"), false),
771 (Path::new("a"), false),
772 (Path::new("a/a.js"), false),
773 (Path::new("b"), false),
774 (Path::new("b/b.js"), false),
775 // This directory is no longer ignored
776 (Path::new("node_modules"), false),
777 (Path::new("node_modules/c"), false),
778 (Path::new("node_modules/c/c.js"), false),
779 (Path::new("node_modules/d"), false),
780 (Path::new("node_modules/d/d.js"), false),
781 // This subdirectory is now ignored
782 (Path::new("node_modules/d/e"), true),
783 (Path::new("node_modules/d/f"), false),
784 (Path::new("node_modules/d/f/f1.js"), false),
785 (Path::new("node_modules/d/f/f2.js"), false),
786 ]
787 );
788 });
789
790 // Each of the newly-loaded directories is scanned only once.
791 let read_dir_count_3 = fs.read_dir_call_count();
792 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
793}
794
795#[gpui::test(iterations = 10)]
796async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
797 init_test(cx);
798 cx.update(|cx| {
799 cx.update_global::<SettingsStore, _>(|store, cx| {
800 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
801 project_settings.file_scan_exclusions = Some(Vec::new());
802 });
803 });
804 });
805 let fs = FakeFs::new(cx.background_executor.clone());
806 fs.insert_tree(
807 "/root",
808 json!({
809 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
810 "tree": {
811 ".git": {},
812 ".gitignore": "ignored-dir\n",
813 "tracked-dir": {
814 "tracked-file1": "",
815 "ancestor-ignored-file1": "",
816 },
817 "ignored-dir": {
818 "ignored-file1": ""
819 }
820 }
821 }),
822 )
823 .await;
824
825 let tree = Worktree::local(
826 build_client(cx),
827 "/root/tree".as_ref(),
828 true,
829 fs.clone(),
830 Default::default(),
831 &mut cx.to_async(),
832 )
833 .await
834 .unwrap();
835 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
836 .await;
837
838 tree.read_with(cx, |tree, _| {
839 tree.as_local()
840 .unwrap()
841 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
842 })
843 .recv()
844 .await;
845
846 cx.read(|cx| {
847 let tree = tree.read(cx);
848 assert!(
849 !tree
850 .entry_for_path("tracked-dir/tracked-file1")
851 .unwrap()
852 .is_ignored
853 );
854 assert!(
855 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
856 .unwrap()
857 .is_ignored
858 );
859 assert!(
860 tree.entry_for_path("ignored-dir/ignored-file1")
861 .unwrap()
862 .is_ignored
863 );
864 });
865
866 fs.create_file(
867 "/root/tree/tracked-dir/tracked-file2".as_ref(),
868 Default::default(),
869 )
870 .await
871 .unwrap();
872 fs.create_file(
873 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
874 Default::default(),
875 )
876 .await
877 .unwrap();
878 fs.create_file(
879 "/root/tree/ignored-dir/ignored-file2".as_ref(),
880 Default::default(),
881 )
882 .await
883 .unwrap();
884
885 cx.executor().run_until_parked();
886 cx.read(|cx| {
887 let tree = tree.read(cx);
888 assert!(
889 !tree
890 .entry_for_path("tracked-dir/tracked-file2")
891 .unwrap()
892 .is_ignored
893 );
894 assert!(
895 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
896 .unwrap()
897 .is_ignored
898 );
899 assert!(
900 tree.entry_for_path("ignored-dir/ignored-file2")
901 .unwrap()
902 .is_ignored
903 );
904 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
905 });
906}
907
908#[gpui::test]
909async fn test_write_file(cx: &mut TestAppContext) {
910 init_test(cx);
911 cx.executor().allow_parking();
912 let dir = temp_tree(json!({
913 ".git": {},
914 ".gitignore": "ignored-dir\n",
915 "tracked-dir": {},
916 "ignored-dir": {}
917 }));
918
919 let tree = Worktree::local(
920 build_client(cx),
921 dir.path(),
922 true,
923 Arc::new(RealFs),
924 Default::default(),
925 &mut cx.to_async(),
926 )
927 .await
928 .unwrap();
929 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
930 .await;
931 tree.flush_fs_events(cx).await;
932
933 tree.update(cx, |tree, cx| {
934 tree.as_local().unwrap().write_file(
935 Path::new("tracked-dir/file.txt"),
936 "hello".into(),
937 Default::default(),
938 cx,
939 )
940 })
941 .await
942 .unwrap();
943 tree.update(cx, |tree, cx| {
944 tree.as_local().unwrap().write_file(
945 Path::new("ignored-dir/file.txt"),
946 "world".into(),
947 Default::default(),
948 cx,
949 )
950 })
951 .await
952 .unwrap();
953
954 tree.read_with(cx, |tree, _| {
955 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
956 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
957 assert!(!tracked.is_ignored);
958 assert!(ignored.is_ignored);
959 });
960}
961
962#[gpui::test]
963async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
964 init_test(cx);
965 cx.executor().allow_parking();
966 let dir = temp_tree(json!({
967 ".gitignore": "**/target\n/node_modules\n",
968 "target": {
969 "index": "blah2"
970 },
971 "node_modules": {
972 ".DS_Store": "",
973 "prettier": {
974 "package.json": "{}",
975 },
976 },
977 "src": {
978 ".DS_Store": "",
979 "foo": {
980 "foo.rs": "mod another;\n",
981 "another.rs": "// another",
982 },
983 "bar": {
984 "bar.rs": "// bar",
985 },
986 "lib.rs": "mod foo;\nmod bar;\n",
987 },
988 ".DS_Store": "",
989 }));
990 cx.update(|cx| {
991 cx.update_global::<SettingsStore, _>(|store, cx| {
992 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
993 project_settings.file_scan_exclusions =
994 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
995 });
996 });
997 });
998
999 let tree = Worktree::local(
1000 build_client(cx),
1001 dir.path(),
1002 true,
1003 Arc::new(RealFs),
1004 Default::default(),
1005 &mut cx.to_async(),
1006 )
1007 .await
1008 .unwrap();
1009 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1010 .await;
1011 tree.flush_fs_events(cx).await;
1012 tree.read_with(cx, |tree, _| {
1013 check_worktree_entries(
1014 tree,
1015 &[
1016 "src/foo/foo.rs",
1017 "src/foo/another.rs",
1018 "node_modules/.DS_Store",
1019 "src/.DS_Store",
1020 ".DS_Store",
1021 ],
1022 &["target", "node_modules"],
1023 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
1024 )
1025 });
1026
1027 cx.update(|cx| {
1028 cx.update_global::<SettingsStore, _>(|store, cx| {
1029 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
1030 project_settings.file_scan_exclusions =
1031 Some(vec!["**/node_modules/**".to_string()]);
1032 });
1033 });
1034 });
1035 tree.flush_fs_events(cx).await;
1036 cx.executor().run_until_parked();
1037 tree.read_with(cx, |tree, _| {
1038 check_worktree_entries(
1039 tree,
1040 &[
1041 "node_modules/prettier/package.json",
1042 "node_modules/.DS_Store",
1043 "node_modules",
1044 ],
1045 &["target"],
1046 &[
1047 ".gitignore",
1048 "src/lib.rs",
1049 "src/bar/bar.rs",
1050 "src/foo/foo.rs",
1051 "src/foo/another.rs",
1052 "src/.DS_Store",
1053 ".DS_Store",
1054 ],
1055 )
1056 });
1057}
1058
1059#[gpui::test]
1060async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
1061 init_test(cx);
1062 cx.executor().allow_parking();
1063 let dir = temp_tree(json!({
1064 ".git": {
1065 "HEAD": "ref: refs/heads/main\n",
1066 "foo": "bar",
1067 },
1068 ".gitignore": "**/target\n/node_modules\ntest_output\n",
1069 "target": {
1070 "index": "blah2"
1071 },
1072 "node_modules": {
1073 ".DS_Store": "",
1074 "prettier": {
1075 "package.json": "{}",
1076 },
1077 },
1078 "src": {
1079 ".DS_Store": "",
1080 "foo": {
1081 "foo.rs": "mod another;\n",
1082 "another.rs": "// another",
1083 },
1084 "bar": {
1085 "bar.rs": "// bar",
1086 },
1087 "lib.rs": "mod foo;\nmod bar;\n",
1088 },
1089 ".DS_Store": "",
1090 }));
1091 cx.update(|cx| {
1092 cx.update_global::<SettingsStore, _>(|store, cx| {
1093 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
1094 project_settings.file_scan_exclusions = Some(vec![
1095 "**/.git".to_string(),
1096 "node_modules/".to_string(),
1097 "build_output".to_string(),
1098 ]);
1099 });
1100 });
1101 });
1102
1103 let tree = Worktree::local(
1104 build_client(cx),
1105 dir.path(),
1106 true,
1107 Arc::new(RealFs),
1108 Default::default(),
1109 &mut cx.to_async(),
1110 )
1111 .await
1112 .unwrap();
1113 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1114 .await;
1115 tree.flush_fs_events(cx).await;
1116 tree.read_with(cx, |tree, _| {
1117 check_worktree_entries(
1118 tree,
1119 &[
1120 ".git/HEAD",
1121 ".git/foo",
1122 "node_modules",
1123 "node_modules/.DS_Store",
1124 "node_modules/prettier",
1125 "node_modules/prettier/package.json",
1126 ],
1127 &["target"],
1128 &[
1129 ".DS_Store",
1130 "src/.DS_Store",
1131 "src/lib.rs",
1132 "src/foo/foo.rs",
1133 "src/foo/another.rs",
1134 "src/bar/bar.rs",
1135 ".gitignore",
1136 ],
1137 )
1138 });
1139
1140 let new_excluded_dir = dir.path().join("build_output");
1141 let new_ignored_dir = dir.path().join("test_output");
1142 std::fs::create_dir_all(&new_excluded_dir)
1143 .unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
1144 std::fs::create_dir_all(&new_ignored_dir)
1145 .unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
1146 let node_modules_dir = dir.path().join("node_modules");
1147 let dot_git_dir = dir.path().join(".git");
1148 let src_dir = dir.path().join("src");
1149 for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
1150 assert!(
1151 existing_dir.is_dir(),
1152 "Expect {existing_dir:?} to be present in the FS already"
1153 );
1154 }
1155
1156 for directory_for_new_file in [
1157 new_excluded_dir,
1158 new_ignored_dir,
1159 node_modules_dir,
1160 dot_git_dir,
1161 src_dir,
1162 ] {
1163 std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
1164 .unwrap_or_else(|e| {
1165 panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
1166 });
1167 }
1168 tree.flush_fs_events(cx).await;
1169
1170 tree.read_with(cx, |tree, _| {
1171 check_worktree_entries(
1172 tree,
1173 &[
1174 ".git/HEAD",
1175 ".git/foo",
1176 ".git/new_file",
1177 "node_modules",
1178 "node_modules/.DS_Store",
1179 "node_modules/prettier",
1180 "node_modules/prettier/package.json",
1181 "node_modules/new_file",
1182 "build_output",
1183 "build_output/new_file",
1184 "test_output/new_file",
1185 ],
1186 &["target", "test_output"],
1187 &[
1188 ".DS_Store",
1189 "src/.DS_Store",
1190 "src/lib.rs",
1191 "src/foo/foo.rs",
1192 "src/foo/another.rs",
1193 "src/bar/bar.rs",
1194 "src/new_file",
1195 ".gitignore",
1196 ],
1197 )
1198 });
1199}
1200
1201#[gpui::test(iterations = 30)]
1202async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
1203 init_test(cx);
1204 let fs = FakeFs::new(cx.background_executor.clone());
1205 fs.insert_tree(
1206 "/root",
1207 json!({
1208 "b": {},
1209 "c": {},
1210 "d": {},
1211 }),
1212 )
1213 .await;
1214
1215 let tree = Worktree::local(
1216 build_client(cx),
1217 "/root".as_ref(),
1218 true,
1219 fs,
1220 Default::default(),
1221 &mut cx.to_async(),
1222 )
1223 .await
1224 .unwrap();
1225
1226 let snapshot1 = tree.update(cx, |tree, cx| {
1227 let tree = tree.as_local_mut().unwrap();
1228 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1229 let _ = tree.observe_updates(0, cx, {
1230 let snapshot = snapshot.clone();
1231 move |update| {
1232 snapshot.lock().apply_remote_update(update).unwrap();
1233 async { true }
1234 }
1235 });
1236 snapshot
1237 });
1238
1239 let entry = tree
1240 .update(cx, |tree, cx| {
1241 tree.as_local_mut()
1242 .unwrap()
1243 .create_entry("a/e".as_ref(), true, cx)
1244 })
1245 .await
1246 .unwrap()
1247 .unwrap();
1248 assert!(entry.is_dir());
1249
1250 cx.executor().run_until_parked();
1251 tree.read_with(cx, |tree, _| {
1252 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
1253 });
1254
1255 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1256 assert_eq!(
1257 snapshot1.lock().entries(true).collect::<Vec<_>>(),
1258 snapshot2.entries(true).collect::<Vec<_>>()
1259 );
1260}
1261
1262#[gpui::test]
1263async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1264 init_test(cx);
1265 cx.executor().allow_parking();
1266 let client_fake = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1267
1268 let fs_fake = FakeFs::new(cx.background_executor.clone());
1269 fs_fake
1270 .insert_tree(
1271 "/root",
1272 json!({
1273 "a": {},
1274 }),
1275 )
1276 .await;
1277
1278 let tree_fake = Worktree::local(
1279 client_fake,
1280 "/root".as_ref(),
1281 true,
1282 fs_fake,
1283 Default::default(),
1284 &mut cx.to_async(),
1285 )
1286 .await
1287 .unwrap();
1288
1289 let entry = tree_fake
1290 .update(cx, |tree, cx| {
1291 tree.as_local_mut()
1292 .unwrap()
1293 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1294 })
1295 .await
1296 .unwrap()
1297 .unwrap();
1298 assert!(entry.is_file());
1299
1300 cx.executor().run_until_parked();
1301 tree_fake.read_with(cx, |tree, _| {
1302 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1303 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1304 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1305 });
1306
1307 let client_real = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1308
1309 let fs_real = Arc::new(RealFs);
1310 let temp_root = temp_tree(json!({
1311 "a": {}
1312 }));
1313
1314 let tree_real = Worktree::local(
1315 client_real,
1316 temp_root.path(),
1317 true,
1318 fs_real,
1319 Default::default(),
1320 &mut cx.to_async(),
1321 )
1322 .await
1323 .unwrap();
1324
1325 let entry = tree_real
1326 .update(cx, |tree, cx| {
1327 tree.as_local_mut()
1328 .unwrap()
1329 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1330 })
1331 .await
1332 .unwrap()
1333 .unwrap();
1334 assert!(entry.is_file());
1335
1336 cx.executor().run_until_parked();
1337 tree_real.read_with(cx, |tree, _| {
1338 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1339 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1340 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1341 });
1342
1343 // Test smallest change
1344 let entry = tree_real
1345 .update(cx, |tree, cx| {
1346 tree.as_local_mut()
1347 .unwrap()
1348 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
1349 })
1350 .await
1351 .unwrap()
1352 .unwrap();
1353 assert!(entry.is_file());
1354
1355 cx.executor().run_until_parked();
1356 tree_real.read_with(cx, |tree, _| {
1357 assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
1358 });
1359
1360 // Test largest change
1361 let entry = tree_real
1362 .update(cx, |tree, cx| {
1363 tree.as_local_mut()
1364 .unwrap()
1365 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
1366 })
1367 .await
1368 .unwrap()
1369 .unwrap();
1370 assert!(entry.is_file());
1371
1372 cx.executor().run_until_parked();
1373 tree_real.read_with(cx, |tree, _| {
1374 assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
1375 assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
1376 assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
1377 assert!(tree.entry_for_path("d/").unwrap().is_dir());
1378 });
1379}
1380
1381#[gpui::test(iterations = 100)]
1382async fn test_random_worktree_operations_during_initial_scan(
1383 cx: &mut TestAppContext,
1384 mut rng: StdRng,
1385) {
1386 init_test(cx);
1387 let operations = env::var("OPERATIONS")
1388 .map(|o| o.parse().unwrap())
1389 .unwrap_or(5);
1390 let initial_entries = env::var("INITIAL_ENTRIES")
1391 .map(|o| o.parse().unwrap())
1392 .unwrap_or(20);
1393
1394 let root_dir = Path::new("/test");
1395 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1396 fs.as_fake().insert_tree(root_dir, json!({})).await;
1397 for _ in 0..initial_entries {
1398 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1399 }
1400 log::info!("generated initial tree");
1401
1402 let worktree = Worktree::local(
1403 build_client(cx),
1404 root_dir,
1405 true,
1406 fs.clone(),
1407 Default::default(),
1408 &mut cx.to_async(),
1409 )
1410 .await
1411 .unwrap();
1412
1413 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1414 let updates = Arc::new(Mutex::new(Vec::new()));
1415 worktree.update(cx, |tree, cx| {
1416 check_worktree_change_events(tree, cx);
1417
1418 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1419 let updates = updates.clone();
1420 move |update| {
1421 updates.lock().push(update);
1422 async { true }
1423 }
1424 });
1425 });
1426
1427 for _ in 0..operations {
1428 worktree
1429 .update(cx, |worktree, cx| {
1430 randomly_mutate_worktree(worktree, &mut rng, cx)
1431 })
1432 .await
1433 .log_err();
1434 worktree.read_with(cx, |tree, _| {
1435 tree.as_local().unwrap().snapshot().check_invariants(true)
1436 });
1437
1438 if rng.gen_bool(0.6) {
1439 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1440 }
1441 }
1442
1443 worktree
1444 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1445 .await;
1446
1447 cx.executor().run_until_parked();
1448
1449 let final_snapshot = worktree.read_with(cx, |tree, _| {
1450 let tree = tree.as_local().unwrap();
1451 let snapshot = tree.snapshot();
1452 snapshot.check_invariants(true);
1453 snapshot
1454 });
1455
1456 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1457 let mut updated_snapshot = snapshot.clone();
1458 for update in updates.lock().iter() {
1459 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1460 updated_snapshot
1461 .apply_remote_update(update.clone())
1462 .unwrap();
1463 }
1464 }
1465
1466 assert_eq!(
1467 updated_snapshot.entries(true).collect::<Vec<_>>(),
1468 final_snapshot.entries(true).collect::<Vec<_>>(),
1469 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1470 );
1471 }
1472}
1473
1474#[gpui::test(iterations = 100)]
1475async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1476 init_test(cx);
1477 let operations = env::var("OPERATIONS")
1478 .map(|o| o.parse().unwrap())
1479 .unwrap_or(40);
1480 let initial_entries = env::var("INITIAL_ENTRIES")
1481 .map(|o| o.parse().unwrap())
1482 .unwrap_or(20);
1483
1484 let root_dir = Path::new("/test");
1485 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1486 fs.as_fake().insert_tree(root_dir, json!({})).await;
1487 for _ in 0..initial_entries {
1488 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1489 }
1490 log::info!("generated initial tree");
1491
1492 let worktree = Worktree::local(
1493 build_client(cx),
1494 root_dir,
1495 true,
1496 fs.clone(),
1497 Default::default(),
1498 &mut cx.to_async(),
1499 )
1500 .await
1501 .unwrap();
1502
1503 let updates = Arc::new(Mutex::new(Vec::new()));
1504 worktree.update(cx, |tree, cx| {
1505 check_worktree_change_events(tree, cx);
1506
1507 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1508 let updates = updates.clone();
1509 move |update| {
1510 updates.lock().push(update);
1511 async { true }
1512 }
1513 });
1514 });
1515
1516 worktree
1517 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1518 .await;
1519
1520 fs.as_fake().pause_events();
1521 let mut snapshots = Vec::new();
1522 let mut mutations_len = operations;
1523 while mutations_len > 1 {
1524 if rng.gen_bool(0.2) {
1525 worktree
1526 .update(cx, |worktree, cx| {
1527 randomly_mutate_worktree(worktree, &mut rng, cx)
1528 })
1529 .await
1530 .log_err();
1531 } else {
1532 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1533 }
1534
1535 let buffered_event_count = fs.as_fake().buffered_event_count();
1536 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1537 let len = rng.gen_range(0..=buffered_event_count);
1538 log::info!("flushing {} events", len);
1539 fs.as_fake().flush_events(len);
1540 } else {
1541 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1542 mutations_len -= 1;
1543 }
1544
1545 cx.executor().run_until_parked();
1546 if rng.gen_bool(0.2) {
1547 log::info!("storing snapshot {}", snapshots.len());
1548 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1549 snapshots.push(snapshot);
1550 }
1551 }
1552
1553 log::info!("quiescing");
1554 fs.as_fake().flush_events(usize::MAX);
1555 cx.executor().run_until_parked();
1556
1557 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1558 snapshot.check_invariants(true);
1559 let expanded_paths = snapshot
1560 .expanded_entries()
1561 .map(|e| e.path.clone())
1562 .collect::<Vec<_>>();
1563
1564 {
1565 let new_worktree = Worktree::local(
1566 build_client(cx),
1567 root_dir,
1568 true,
1569 fs.clone(),
1570 Default::default(),
1571 &mut cx.to_async(),
1572 )
1573 .await
1574 .unwrap();
1575 new_worktree
1576 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1577 .await;
1578 new_worktree
1579 .update(cx, |tree, _| {
1580 tree.as_local_mut()
1581 .unwrap()
1582 .refresh_entries_for_paths(expanded_paths)
1583 })
1584 .recv()
1585 .await;
1586 let new_snapshot =
1587 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1588 assert_eq!(
1589 snapshot.entries_without_ids(true),
1590 new_snapshot.entries_without_ids(true)
1591 );
1592 }
1593
1594 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1595 for update in updates.lock().iter() {
1596 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1597 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1598 }
1599 }
1600
1601 assert_eq!(
1602 prev_snapshot
1603 .entries(true)
1604 .map(ignore_pending_dir)
1605 .collect::<Vec<_>>(),
1606 snapshot
1607 .entries(true)
1608 .map(ignore_pending_dir)
1609 .collect::<Vec<_>>(),
1610 "wrong updates after snapshot {i}: {updates:#?}",
1611 );
1612 }
1613
1614 fn ignore_pending_dir(entry: &Entry) -> Entry {
1615 let mut entry = entry.clone();
1616 if entry.kind.is_dir() {
1617 entry.kind = EntryKind::Dir
1618 }
1619 entry
1620 }
1621}
1622
1623// The worktree's `UpdatedEntries` event can be used to follow along with
1624// all changes to the worktree's snapshot.
1625fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1626 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1627 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1628 if let Event::UpdatedEntries(changes) = event {
1629 for (path, _, change_type) in changes.iter() {
1630 let entry = tree.entry_for_path(&path).cloned();
1631 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1632 Ok(ix) | Err(ix) => ix,
1633 };
1634 match change_type {
1635 PathChange::Added => entries.insert(ix, entry.unwrap()),
1636 PathChange::Removed => drop(entries.remove(ix)),
1637 PathChange::Updated => {
1638 let entry = entry.unwrap();
1639 let existing_entry = entries.get_mut(ix).unwrap();
1640 assert_eq!(existing_entry.path, entry.path);
1641 *existing_entry = entry;
1642 }
1643 PathChange::AddedOrUpdated | PathChange::Loaded => {
1644 let entry = entry.unwrap();
1645 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1646 *entries.get_mut(ix).unwrap() = entry;
1647 } else {
1648 entries.insert(ix, entry);
1649 }
1650 }
1651 }
1652 }
1653
1654 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1655 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1656 }
1657 })
1658 .detach();
1659}
1660
1661fn randomly_mutate_worktree(
1662 worktree: &mut Worktree,
1663 rng: &mut impl Rng,
1664 cx: &mut ModelContext<Worktree>,
1665) -> Task<Result<()>> {
1666 log::info!("mutating worktree");
1667 let worktree = worktree.as_local_mut().unwrap();
1668 let snapshot = worktree.snapshot();
1669 let entry = snapshot.entries(false).choose(rng).unwrap();
1670
1671 match rng.gen_range(0_u32..100) {
1672 0..=33 if entry.path.as_ref() != Path::new("") => {
1673 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1674 worktree.delete_entry(entry.id, cx).unwrap()
1675 }
1676 ..=66 if entry.path.as_ref() != Path::new("") => {
1677 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1678 let new_parent_path = if other_entry.is_dir() {
1679 other_entry.path.clone()
1680 } else {
1681 other_entry.path.parent().unwrap().into()
1682 };
1683 let mut new_path = new_parent_path.join(random_filename(rng));
1684 if new_path.starts_with(&entry.path) {
1685 new_path = random_filename(rng).into();
1686 }
1687
1688 log::info!(
1689 "renaming entry {:?} ({}) to {:?}",
1690 entry.path,
1691 entry.id.0,
1692 new_path
1693 );
1694 let task = worktree.rename_entry(entry.id, new_path, cx);
1695 cx.background_executor().spawn(async move {
1696 task.await?.unwrap();
1697 Ok(())
1698 })
1699 }
1700 _ => {
1701 if entry.is_dir() {
1702 let child_path = entry.path.join(random_filename(rng));
1703 let is_dir = rng.gen_bool(0.3);
1704 log::info!(
1705 "creating {} at {:?}",
1706 if is_dir { "dir" } else { "file" },
1707 child_path,
1708 );
1709 let task = worktree.create_entry(child_path, is_dir, cx);
1710 cx.background_executor().spawn(async move {
1711 task.await?;
1712 Ok(())
1713 })
1714 } else {
1715 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1716 let task =
1717 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
1718 cx.background_executor().spawn(async move {
1719 task.await?;
1720 Ok(())
1721 })
1722 }
1723 }
1724 }
1725}
1726
1727async fn randomly_mutate_fs(
1728 fs: &Arc<dyn Fs>,
1729 root_path: &Path,
1730 insertion_probability: f64,
1731 rng: &mut impl Rng,
1732) {
1733 log::info!("mutating fs");
1734 let mut files = Vec::new();
1735 let mut dirs = Vec::new();
1736 for path in fs.as_fake().paths(false) {
1737 if path.starts_with(root_path) {
1738 if fs.is_file(&path).await {
1739 files.push(path);
1740 } else {
1741 dirs.push(path);
1742 }
1743 }
1744 }
1745
1746 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1747 let path = dirs.choose(rng).unwrap();
1748 let new_path = path.join(random_filename(rng));
1749
1750 if rng.gen() {
1751 log::info!(
1752 "creating dir {:?}",
1753 new_path.strip_prefix(root_path).unwrap()
1754 );
1755 fs.create_dir(&new_path).await.unwrap();
1756 } else {
1757 log::info!(
1758 "creating file {:?}",
1759 new_path.strip_prefix(root_path).unwrap()
1760 );
1761 fs.create_file(&new_path, Default::default()).await.unwrap();
1762 }
1763 } else if rng.gen_bool(0.05) {
1764 let ignore_dir_path = dirs.choose(rng).unwrap();
1765 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1766
1767 let subdirs = dirs
1768 .iter()
1769 .filter(|d| d.starts_with(&ignore_dir_path))
1770 .cloned()
1771 .collect::<Vec<_>>();
1772 let subfiles = files
1773 .iter()
1774 .filter(|d| d.starts_with(&ignore_dir_path))
1775 .cloned()
1776 .collect::<Vec<_>>();
1777 let files_to_ignore = {
1778 let len = rng.gen_range(0..=subfiles.len());
1779 subfiles.choose_multiple(rng, len)
1780 };
1781 let dirs_to_ignore = {
1782 let len = rng.gen_range(0..subdirs.len());
1783 subdirs.choose_multiple(rng, len)
1784 };
1785
1786 let mut ignore_contents = String::new();
1787 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1788 writeln!(
1789 ignore_contents,
1790 "{}",
1791 path_to_ignore
1792 .strip_prefix(&ignore_dir_path)
1793 .unwrap()
1794 .to_str()
1795 .unwrap()
1796 )
1797 .unwrap();
1798 }
1799 log::info!(
1800 "creating gitignore {:?} with contents:\n{}",
1801 ignore_path.strip_prefix(&root_path).unwrap(),
1802 ignore_contents
1803 );
1804 fs.save(
1805 &ignore_path,
1806 &ignore_contents.as_str().into(),
1807 Default::default(),
1808 )
1809 .await
1810 .unwrap();
1811 } else {
1812 let old_path = {
1813 let file_path = files.choose(rng);
1814 let dir_path = dirs[1..].choose(rng);
1815 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1816 };
1817
1818 let is_rename = rng.gen();
1819 if is_rename {
1820 let new_path_parent = dirs
1821 .iter()
1822 .filter(|d| !d.starts_with(old_path))
1823 .choose(rng)
1824 .unwrap();
1825
1826 let overwrite_existing_dir =
1827 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1828 let new_path = if overwrite_existing_dir {
1829 fs.remove_dir(
1830 &new_path_parent,
1831 RemoveOptions {
1832 recursive: true,
1833 ignore_if_not_exists: true,
1834 },
1835 )
1836 .await
1837 .unwrap();
1838 new_path_parent.to_path_buf()
1839 } else {
1840 new_path_parent.join(random_filename(rng))
1841 };
1842
1843 log::info!(
1844 "renaming {:?} to {}{:?}",
1845 old_path.strip_prefix(&root_path).unwrap(),
1846 if overwrite_existing_dir {
1847 "overwrite "
1848 } else {
1849 ""
1850 },
1851 new_path.strip_prefix(&root_path).unwrap()
1852 );
1853 fs.rename(
1854 &old_path,
1855 &new_path,
1856 fs::RenameOptions {
1857 overwrite: true,
1858 ignore_if_exists: true,
1859 },
1860 )
1861 .await
1862 .unwrap();
1863 } else if fs.is_file(&old_path).await {
1864 log::info!(
1865 "deleting file {:?}",
1866 old_path.strip_prefix(&root_path).unwrap()
1867 );
1868 fs.remove_file(old_path, Default::default()).await.unwrap();
1869 } else {
1870 log::info!(
1871 "deleting dir {:?}",
1872 old_path.strip_prefix(&root_path).unwrap()
1873 );
1874 fs.remove_dir(
1875 &old_path,
1876 RemoveOptions {
1877 recursive: true,
1878 ignore_if_not_exists: true,
1879 },
1880 )
1881 .await
1882 .unwrap();
1883 }
1884 }
1885}
1886
1887fn random_filename(rng: &mut impl Rng) -> String {
1888 (0..6)
1889 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1890 .map(char::from)
1891 .collect()
1892}
1893
1894#[gpui::test]
1895async fn test_rename_work_directory(cx: &mut TestAppContext) {
1896 init_test(cx);
1897 cx.executor().allow_parking();
1898 let root = temp_tree(json!({
1899 "projects": {
1900 "project1": {
1901 "a": "",
1902 "b": "",
1903 }
1904 },
1905
1906 }));
1907 let root_path = root.path();
1908
1909 let tree = Worktree::local(
1910 build_client(cx),
1911 root_path,
1912 true,
1913 Arc::new(RealFs),
1914 Default::default(),
1915 &mut cx.to_async(),
1916 )
1917 .await
1918 .unwrap();
1919
1920 let repo = git_init(&root_path.join("projects/project1"));
1921 git_add("a", &repo);
1922 git_commit("init", &repo);
1923 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1924
1925 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1926 .await;
1927
1928 tree.flush_fs_events(cx).await;
1929
1930 cx.read(|cx| {
1931 let tree = tree.read(cx);
1932 let (work_dir, _) = tree.repositories().next().unwrap();
1933 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1934 assert_eq!(
1935 tree.status_for_file(Path::new("projects/project1/a")),
1936 Some(GitFileStatus::Modified)
1937 );
1938 assert_eq!(
1939 tree.status_for_file(Path::new("projects/project1/b")),
1940 Some(GitFileStatus::Added)
1941 );
1942 });
1943
1944 std::fs::rename(
1945 root_path.join("projects/project1"),
1946 root_path.join("projects/project2"),
1947 )
1948 .ok();
1949 tree.flush_fs_events(cx).await;
1950
1951 cx.read(|cx| {
1952 let tree = tree.read(cx);
1953 let (work_dir, _) = tree.repositories().next().unwrap();
1954 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1955 assert_eq!(
1956 tree.status_for_file(Path::new("projects/project2/a")),
1957 Some(GitFileStatus::Modified)
1958 );
1959 assert_eq!(
1960 tree.status_for_file(Path::new("projects/project2/b")),
1961 Some(GitFileStatus::Added)
1962 );
1963 });
1964}
1965
1966#[gpui::test]
1967async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1968 init_test(cx);
1969 cx.executor().allow_parking();
1970 let root = temp_tree(json!({
1971 "c.txt": "",
1972 "dir1": {
1973 ".git": {},
1974 "deps": {
1975 "dep1": {
1976 ".git": {},
1977 "src": {
1978 "a.txt": ""
1979 }
1980 }
1981 },
1982 "src": {
1983 "b.txt": ""
1984 }
1985 },
1986 }));
1987
1988 let tree = Worktree::local(
1989 build_client(cx),
1990 root.path(),
1991 true,
1992 Arc::new(RealFs),
1993 Default::default(),
1994 &mut cx.to_async(),
1995 )
1996 .await
1997 .unwrap();
1998
1999 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2000 .await;
2001 tree.flush_fs_events(cx).await;
2002
2003 tree.read_with(cx, |tree, _cx| {
2004 let tree = tree.as_local().unwrap();
2005
2006 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
2007
2008 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
2009 assert_eq!(
2010 entry
2011 .work_directory(tree)
2012 .map(|directory| directory.as_ref().to_owned()),
2013 Some(Path::new("dir1").to_owned())
2014 );
2015
2016 let entry = tree
2017 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
2018 .unwrap();
2019 assert_eq!(
2020 entry
2021 .work_directory(tree)
2022 .map(|directory| directory.as_ref().to_owned()),
2023 Some(Path::new("dir1/deps/dep1").to_owned())
2024 );
2025
2026 let entries = tree.files(false, 0);
2027
2028 let paths_with_repos = tree
2029 .entries_with_repositories(entries)
2030 .map(|(entry, repo)| {
2031 (
2032 entry.path.as_ref(),
2033 repo.and_then(|repo| {
2034 repo.work_directory(&tree)
2035 .map(|work_directory| work_directory.0.to_path_buf())
2036 }),
2037 )
2038 })
2039 .collect::<Vec<_>>();
2040
2041 assert_eq!(
2042 paths_with_repos,
2043 &[
2044 (Path::new("c.txt"), None),
2045 (
2046 Path::new("dir1/deps/dep1/src/a.txt"),
2047 Some(Path::new("dir1/deps/dep1").into())
2048 ),
2049 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
2050 ]
2051 );
2052 });
2053
2054 let repo_update_events = Arc::new(Mutex::new(vec![]));
2055 tree.update(cx, |_, cx| {
2056 let repo_update_events = repo_update_events.clone();
2057 cx.subscribe(&tree, move |_, _, event, _| {
2058 if let Event::UpdatedGitRepositories(update) = event {
2059 repo_update_events.lock().push(update.clone());
2060 }
2061 })
2062 .detach();
2063 });
2064
2065 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
2066 tree.flush_fs_events(cx).await;
2067
2068 assert_eq!(
2069 repo_update_events.lock()[0]
2070 .iter()
2071 .map(|e| e.0.clone())
2072 .collect::<Vec<Arc<Path>>>(),
2073 vec![Path::new("dir1").into()]
2074 );
2075
2076 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
2077 tree.flush_fs_events(cx).await;
2078
2079 tree.read_with(cx, |tree, _cx| {
2080 let tree = tree.as_local().unwrap();
2081
2082 assert!(tree
2083 .repository_for_path("dir1/src/b.txt".as_ref())
2084 .is_none());
2085 });
2086}
2087
2088#[gpui::test]
2089async fn test_git_status(cx: &mut TestAppContext) {
2090 init_test(cx);
2091 cx.executor().allow_parking();
2092 const IGNORE_RULE: &'static str = "**/target";
2093
2094 let root = temp_tree(json!({
2095 "project": {
2096 "a.txt": "a",
2097 "b.txt": "bb",
2098 "c": {
2099 "d": {
2100 "e.txt": "eee"
2101 }
2102 },
2103 "f.txt": "ffff",
2104 "target": {
2105 "build_file": "???"
2106 },
2107 ".gitignore": IGNORE_RULE
2108 },
2109
2110 }));
2111
2112 const A_TXT: &'static str = "a.txt";
2113 const B_TXT: &'static str = "b.txt";
2114 const E_TXT: &'static str = "c/d/e.txt";
2115 const F_TXT: &'static str = "f.txt";
2116 const DOTGITIGNORE: &'static str = ".gitignore";
2117 const BUILD_FILE: &'static str = "target/build_file";
2118 let project_path = Path::new("project");
2119
2120 // Set up git repository before creating the worktree.
2121 let work_dir = root.path().join("project");
2122 let mut repo = git_init(work_dir.as_path());
2123 repo.add_ignore_rule(IGNORE_RULE).unwrap();
2124 git_add(A_TXT, &repo);
2125 git_add(E_TXT, &repo);
2126 git_add(DOTGITIGNORE, &repo);
2127 git_commit("Initial commit", &repo);
2128
2129 let tree = Worktree::local(
2130 build_client(cx),
2131 root.path(),
2132 true,
2133 Arc::new(RealFs),
2134 Default::default(),
2135 &mut cx.to_async(),
2136 )
2137 .await
2138 .unwrap();
2139
2140 tree.flush_fs_events(cx).await;
2141 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2142 .await;
2143 cx.executor().run_until_parked();
2144
2145 // Check that the right git state is observed on startup
2146 tree.read_with(cx, |tree, _cx| {
2147 let snapshot = tree.snapshot();
2148 assert_eq!(snapshot.repositories().count(), 1);
2149 let (dir, _) = snapshot.repositories().next().unwrap();
2150 assert_eq!(dir.as_ref(), Path::new("project"));
2151
2152 assert_eq!(
2153 snapshot.status_for_file(project_path.join(B_TXT)),
2154 Some(GitFileStatus::Added)
2155 );
2156 assert_eq!(
2157 snapshot.status_for_file(project_path.join(F_TXT)),
2158 Some(GitFileStatus::Added)
2159 );
2160 });
2161
2162 // Modify a file in the working copy.
2163 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
2164 tree.flush_fs_events(cx).await;
2165 cx.executor().run_until_parked();
2166
2167 // The worktree detects that the file's git status has changed.
2168 tree.read_with(cx, |tree, _cx| {
2169 let snapshot = tree.snapshot();
2170 assert_eq!(
2171 snapshot.status_for_file(project_path.join(A_TXT)),
2172 Some(GitFileStatus::Modified)
2173 );
2174 });
2175
2176 // Create a commit in the git repository.
2177 git_add(A_TXT, &repo);
2178 git_add(B_TXT, &repo);
2179 git_commit("Committing modified and added", &repo);
2180 tree.flush_fs_events(cx).await;
2181 cx.executor().run_until_parked();
2182
2183 // The worktree detects that the files' git status have changed.
2184 tree.read_with(cx, |tree, _cx| {
2185 let snapshot = tree.snapshot();
2186 assert_eq!(
2187 snapshot.status_for_file(project_path.join(F_TXT)),
2188 Some(GitFileStatus::Added)
2189 );
2190 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
2191 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
2192 });
2193
2194 // Modify files in the working copy and perform git operations on other files.
2195 git_reset(0, &repo);
2196 git_remove_index(Path::new(B_TXT), &repo);
2197 git_stash(&mut repo);
2198 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
2199 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
2200 tree.flush_fs_events(cx).await;
2201 cx.executor().run_until_parked();
2202
2203 // Check that more complex repo changes are tracked
2204 tree.read_with(cx, |tree, _cx| {
2205 let snapshot = tree.snapshot();
2206
2207 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
2208 assert_eq!(
2209 snapshot.status_for_file(project_path.join(B_TXT)),
2210 Some(GitFileStatus::Added)
2211 );
2212 assert_eq!(
2213 snapshot.status_for_file(project_path.join(E_TXT)),
2214 Some(GitFileStatus::Modified)
2215 );
2216 });
2217
2218 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
2219 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
2220 std::fs::write(
2221 work_dir.join(DOTGITIGNORE),
2222 [IGNORE_RULE, "f.txt"].join("\n"),
2223 )
2224 .unwrap();
2225
2226 git_add(Path::new(DOTGITIGNORE), &repo);
2227 git_commit("Committing modified git ignore", &repo);
2228
2229 tree.flush_fs_events(cx).await;
2230 cx.executor().run_until_parked();
2231
2232 let mut renamed_dir_name = "first_directory/second_directory";
2233 const RENAMED_FILE: &'static str = "rf.txt";
2234
2235 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
2236 std::fs::write(
2237 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
2238 "new-contents",
2239 )
2240 .unwrap();
2241
2242 tree.flush_fs_events(cx).await;
2243 cx.executor().run_until_parked();
2244
2245 tree.read_with(cx, |tree, _cx| {
2246 let snapshot = tree.snapshot();
2247 assert_eq!(
2248 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
2249 Some(GitFileStatus::Added)
2250 );
2251 });
2252
2253 renamed_dir_name = "new_first_directory/second_directory";
2254
2255 std::fs::rename(
2256 work_dir.join("first_directory"),
2257 work_dir.join("new_first_directory"),
2258 )
2259 .unwrap();
2260
2261 tree.flush_fs_events(cx).await;
2262 cx.executor().run_until_parked();
2263
2264 tree.read_with(cx, |tree, _cx| {
2265 let snapshot = tree.snapshot();
2266
2267 assert_eq!(
2268 snapshot.status_for_file(
2269 project_path
2270 .join(Path::new(renamed_dir_name))
2271 .join(RENAMED_FILE)
2272 ),
2273 Some(GitFileStatus::Added)
2274 );
2275 });
2276}
2277
2278#[gpui::test]
2279async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
2280 init_test(cx);
2281 let fs = FakeFs::new(cx.background_executor.clone());
2282 fs.insert_tree(
2283 "/root",
2284 json!({
2285 ".git": {},
2286 "a": {
2287 "b": {
2288 "c1.txt": "",
2289 "c2.txt": "",
2290 },
2291 "d": {
2292 "e1.txt": "",
2293 "e2.txt": "",
2294 "e3.txt": "",
2295 }
2296 },
2297 "f": {
2298 "no-status.txt": ""
2299 },
2300 "g": {
2301 "h1.txt": "",
2302 "h2.txt": ""
2303 },
2304
2305 }),
2306 )
2307 .await;
2308
2309 fs.set_status_for_repo_via_git_operation(
2310 &Path::new("/root/.git"),
2311 &[
2312 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
2313 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
2314 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
2315 ],
2316 );
2317
2318 let tree = Worktree::local(
2319 build_client(cx),
2320 Path::new("/root"),
2321 true,
2322 fs.clone(),
2323 Default::default(),
2324 &mut cx.to_async(),
2325 )
2326 .await
2327 .unwrap();
2328
2329 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2330 .await;
2331
2332 cx.executor().run_until_parked();
2333 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
2334
2335 check_propagated_statuses(
2336 &snapshot,
2337 &[
2338 (Path::new(""), Some(GitFileStatus::Conflict)),
2339 (Path::new("a"), Some(GitFileStatus::Modified)),
2340 (Path::new("a/b"), Some(GitFileStatus::Added)),
2341 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2342 (Path::new("a/b/c2.txt"), None),
2343 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2344 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2345 (Path::new("f"), None),
2346 (Path::new("f/no-status.txt"), None),
2347 (Path::new("g"), Some(GitFileStatus::Conflict)),
2348 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
2349 ],
2350 );
2351
2352 check_propagated_statuses(
2353 &snapshot,
2354 &[
2355 (Path::new("a/b"), Some(GitFileStatus::Added)),
2356 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2357 (Path::new("a/b/c2.txt"), None),
2358 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2359 (Path::new("a/d/e1.txt"), None),
2360 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2361 (Path::new("f"), None),
2362 (Path::new("f/no-status.txt"), None),
2363 (Path::new("g"), Some(GitFileStatus::Conflict)),
2364 ],
2365 );
2366
2367 check_propagated_statuses(
2368 &snapshot,
2369 &[
2370 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2371 (Path::new("a/b/c2.txt"), None),
2372 (Path::new("a/d/e1.txt"), None),
2373 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2374 (Path::new("f/no-status.txt"), None),
2375 ],
2376 );
2377
2378 #[track_caller]
2379 fn check_propagated_statuses(
2380 snapshot: &Snapshot,
2381 expected_statuses: &[(&Path, Option<GitFileStatus>)],
2382 ) {
2383 let mut entries = expected_statuses
2384 .iter()
2385 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
2386 .collect::<Vec<_>>();
2387 snapshot.propagate_git_statuses(&mut entries);
2388 assert_eq!(
2389 entries
2390 .iter()
2391 .map(|e| (e.path.as_ref(), e.git_status))
2392 .collect::<Vec<_>>(),
2393 expected_statuses
2394 );
2395 }
2396}
2397
2398fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
2399 let http_client = FakeHttpClient::with_404_response();
2400 cx.update(|cx| Client::new(http_client, cx))
2401}
2402
2403#[track_caller]
2404fn git_init(path: &Path) -> git2::Repository {
2405 git2::Repository::init(path).expect("Failed to initialize git repository")
2406}
2407
2408#[track_caller]
2409fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
2410 let path = path.as_ref();
2411 let mut index = repo.index().expect("Failed to get index");
2412 index.add_path(path).expect("Failed to add a.txt");
2413 index.write().expect("Failed to write index");
2414}
2415
2416#[track_caller]
2417fn git_remove_index(path: &Path, repo: &git2::Repository) {
2418 let mut index = repo.index().expect("Failed to get index");
2419 index.remove_path(path).expect("Failed to add a.txt");
2420 index.write().expect("Failed to write index");
2421}
2422
2423#[track_caller]
2424fn git_commit(msg: &'static str, repo: &git2::Repository) {
2425 use git2::Signature;
2426
2427 let signature = Signature::now("test", "test@zed.dev").unwrap();
2428 let oid = repo.index().unwrap().write_tree().unwrap();
2429 let tree = repo.find_tree(oid).unwrap();
2430 if let Some(head) = repo.head().ok() {
2431 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
2432
2433 let parent_commit = parent_obj.as_commit().unwrap();
2434
2435 repo.commit(
2436 Some("HEAD"),
2437 &signature,
2438 &signature,
2439 msg,
2440 &tree,
2441 &[parent_commit],
2442 )
2443 .expect("Failed to commit with parent");
2444 } else {
2445 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
2446 .expect("Failed to commit");
2447 }
2448}
2449
2450#[track_caller]
2451fn git_stash(repo: &mut git2::Repository) {
2452 use git2::Signature;
2453
2454 let signature = Signature::now("test", "test@zed.dev").unwrap();
2455 repo.stash_save(&signature, "N/A", None)
2456 .expect("Failed to stash");
2457}
2458
2459#[track_caller]
2460fn git_reset(offset: usize, repo: &git2::Repository) {
2461 let head = repo.head().expect("Couldn't get repo head");
2462 let object = head.peel(git2::ObjectType::Commit).unwrap();
2463 let commit = object.as_commit().unwrap();
2464 let new_head = commit
2465 .parents()
2466 .inspect(|parnet| {
2467 parnet.message();
2468 })
2469 .skip(offset)
2470 .next()
2471 .expect("Not enough history");
2472 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2473 .expect("Could not reset");
2474}
2475
2476#[allow(dead_code)]
2477#[track_caller]
2478fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2479 repo.statuses(None)
2480 .unwrap()
2481 .iter()
2482 .map(|status| (status.path().unwrap().to_string(), status.status()))
2483 .collect()
2484}
2485
2486#[track_caller]
2487fn check_worktree_entries(
2488 tree: &Worktree,
2489 expected_excluded_paths: &[&str],
2490 expected_ignored_paths: &[&str],
2491 expected_tracked_paths: &[&str],
2492) {
2493 for path in expected_excluded_paths {
2494 let entry = tree.entry_for_path(path);
2495 assert!(
2496 entry.is_none(),
2497 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2498 );
2499 }
2500 for path in expected_ignored_paths {
2501 let entry = tree
2502 .entry_for_path(path)
2503 .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
2504 assert!(
2505 entry.is_ignored,
2506 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2507 );
2508 }
2509 for path in expected_tracked_paths {
2510 let entry = tree
2511 .entry_for_path(path)
2512 .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
2513 assert!(
2514 !entry.is_ignored,
2515 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2516 );
2517 }
2518}
2519
2520fn init_test(cx: &mut gpui::TestAppContext) {
2521 cx.update(|cx| {
2522 let settings_store = SettingsStore::test(cx);
2523 cx.set_global(settings_store);
2524 Project::init_settings(cx);
2525 });
2526}