1use crate::{
2 worktree::{Event, Snapshot, WorktreeHandle},
3 Entry, EntryKind, PathChange, Worktree,
4};
5use anyhow::Result;
6use client::Client;
7use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
8use git::GITIGNORE;
9use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
10use parking_lot::Mutex;
11use postage::stream::Stream;
12use pretty_assertions::assert_eq;
13use rand::prelude::*;
14use serde_json::json;
15use std::{
16 env,
17 fmt::Write,
18 mem,
19 path::{Path, PathBuf},
20 sync::Arc,
21};
22use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
23
24#[gpui::test]
25async fn test_traversal(cx: &mut TestAppContext) {
26 let fs = FakeFs::new(cx.background());
27 fs.insert_tree(
28 "/root",
29 json!({
30 ".gitignore": "a/b\n",
31 "a": {
32 "b": "",
33 "c": "",
34 }
35 }),
36 )
37 .await;
38
39 let tree = Worktree::local(
40 build_client(cx),
41 Path::new("/root"),
42 true,
43 fs,
44 Default::default(),
45 &mut cx.to_async(),
46 )
47 .await
48 .unwrap();
49 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
50 .await;
51
52 tree.read_with(cx, |tree, _| {
53 assert_eq!(
54 tree.entries(false)
55 .map(|entry| entry.path.as_ref())
56 .collect::<Vec<_>>(),
57 vec![
58 Path::new(""),
59 Path::new(".gitignore"),
60 Path::new("a"),
61 Path::new("a/c"),
62 ]
63 );
64 assert_eq!(
65 tree.entries(true)
66 .map(|entry| entry.path.as_ref())
67 .collect::<Vec<_>>(),
68 vec![
69 Path::new(""),
70 Path::new(".gitignore"),
71 Path::new("a"),
72 Path::new("a/b"),
73 Path::new("a/c"),
74 ]
75 );
76 })
77}
78
79#[gpui::test]
80async fn test_descendent_entries(cx: &mut TestAppContext) {
81 let fs = FakeFs::new(cx.background());
82 fs.insert_tree(
83 "/root",
84 json!({
85 "a": "",
86 "b": {
87 "c": {
88 "d": ""
89 },
90 "e": {}
91 },
92 "f": "",
93 "g": {
94 "h": {}
95 },
96 "i": {
97 "j": {
98 "k": ""
99 },
100 "l": {
101
102 }
103 },
104 ".gitignore": "i/j\n",
105 }),
106 )
107 .await;
108
109 let tree = Worktree::local(
110 build_client(cx),
111 Path::new("/root"),
112 true,
113 fs,
114 Default::default(),
115 &mut cx.to_async(),
116 )
117 .await
118 .unwrap();
119 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
120 .await;
121
122 tree.read_with(cx, |tree, _| {
123 assert_eq!(
124 tree.descendent_entries(false, false, Path::new("b"))
125 .map(|entry| entry.path.as_ref())
126 .collect::<Vec<_>>(),
127 vec![Path::new("b/c/d"),]
128 );
129 assert_eq!(
130 tree.descendent_entries(true, false, Path::new("b"))
131 .map(|entry| entry.path.as_ref())
132 .collect::<Vec<_>>(),
133 vec![
134 Path::new("b"),
135 Path::new("b/c"),
136 Path::new("b/c/d"),
137 Path::new("b/e"),
138 ]
139 );
140
141 assert_eq!(
142 tree.descendent_entries(false, false, Path::new("g"))
143 .map(|entry| entry.path.as_ref())
144 .collect::<Vec<_>>(),
145 Vec::<PathBuf>::new()
146 );
147 assert_eq!(
148 tree.descendent_entries(true, false, Path::new("g"))
149 .map(|entry| entry.path.as_ref())
150 .collect::<Vec<_>>(),
151 vec![Path::new("g"), Path::new("g/h"),]
152 );
153 });
154
155 // Expand gitignored directory.
156 tree.read_with(cx, |tree, _| {
157 tree.as_local()
158 .unwrap()
159 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
160 })
161 .recv()
162 .await;
163
164 tree.read_with(cx, |tree, _| {
165 assert_eq!(
166 tree.descendent_entries(false, false, Path::new("i"))
167 .map(|entry| entry.path.as_ref())
168 .collect::<Vec<_>>(),
169 Vec::<PathBuf>::new()
170 );
171 assert_eq!(
172 tree.descendent_entries(false, true, Path::new("i"))
173 .map(|entry| entry.path.as_ref())
174 .collect::<Vec<_>>(),
175 vec![Path::new("i/j/k")]
176 );
177 assert_eq!(
178 tree.descendent_entries(true, false, Path::new("i"))
179 .map(|entry| entry.path.as_ref())
180 .collect::<Vec<_>>(),
181 vec![Path::new("i"), Path::new("i/l"),]
182 );
183 })
184}
185
186#[gpui::test(iterations = 10)]
187async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
188 let fs = FakeFs::new(cx.background());
189 fs.insert_tree(
190 "/root",
191 json!({
192 "lib": {
193 "a": {
194 "a.txt": ""
195 },
196 "b": {
197 "b.txt": ""
198 }
199 }
200 }),
201 )
202 .await;
203 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
204 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
205
206 let tree = Worktree::local(
207 build_client(cx),
208 Path::new("/root"),
209 true,
210 fs.clone(),
211 Default::default(),
212 &mut cx.to_async(),
213 )
214 .await
215 .unwrap();
216
217 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
218 .await;
219
220 tree.read_with(cx, |tree, _| {
221 assert_eq!(
222 tree.entries(false)
223 .map(|entry| entry.path.as_ref())
224 .collect::<Vec<_>>(),
225 vec![
226 Path::new(""),
227 Path::new("lib"),
228 Path::new("lib/a"),
229 Path::new("lib/a/a.txt"),
230 Path::new("lib/a/lib"),
231 Path::new("lib/b"),
232 Path::new("lib/b/b.txt"),
233 Path::new("lib/b/lib"),
234 ]
235 );
236 });
237
238 fs.rename(
239 Path::new("/root/lib/a/lib"),
240 Path::new("/root/lib/a/lib-2"),
241 Default::default(),
242 )
243 .await
244 .unwrap();
245 executor.run_until_parked();
246 tree.read_with(cx, |tree, _| {
247 assert_eq!(
248 tree.entries(false)
249 .map(|entry| entry.path.as_ref())
250 .collect::<Vec<_>>(),
251 vec![
252 Path::new(""),
253 Path::new("lib"),
254 Path::new("lib/a"),
255 Path::new("lib/a/a.txt"),
256 Path::new("lib/a/lib-2"),
257 Path::new("lib/b"),
258 Path::new("lib/b/b.txt"),
259 Path::new("lib/b/lib"),
260 ]
261 );
262 });
263}
264
265#[gpui::test]
266async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
267 let fs = FakeFs::new(cx.background());
268 fs.insert_tree(
269 "/root",
270 json!({
271 "dir1": {
272 "deps": {
273 // symlinks here
274 },
275 "src": {
276 "a.rs": "",
277 "b.rs": "",
278 },
279 },
280 "dir2": {
281 "src": {
282 "c.rs": "",
283 "d.rs": "",
284 }
285 },
286 "dir3": {
287 "deps": {},
288 "src": {
289 "e.rs": "",
290 "f.rs": "",
291 },
292 }
293 }),
294 )
295 .await;
296
297 // These symlinks point to directories outside of the worktree's root, dir1.
298 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
299 .await;
300 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
301 .await;
302
303 let tree = Worktree::local(
304 build_client(cx),
305 Path::new("/root/dir1"),
306 true,
307 fs.clone(),
308 Default::default(),
309 &mut cx.to_async(),
310 )
311 .await
312 .unwrap();
313
314 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
315 .await;
316
317 let tree_updates = Arc::new(Mutex::new(Vec::new()));
318 tree.update(cx, |_, cx| {
319 let tree_updates = tree_updates.clone();
320 cx.subscribe(&tree, move |_, _, event, _| {
321 if let Event::UpdatedEntries(update) = event {
322 tree_updates.lock().extend(
323 update
324 .iter()
325 .map(|(path, _, change)| (path.clone(), *change)),
326 );
327 }
328 })
329 .detach();
330 });
331
332 // The symlinked directories are not scanned by default.
333 tree.read_with(cx, |tree, _| {
334 assert_eq!(
335 tree.entries(true)
336 .map(|entry| (entry.path.as_ref(), entry.is_external))
337 .collect::<Vec<_>>(),
338 vec![
339 (Path::new(""), false),
340 (Path::new("deps"), false),
341 (Path::new("deps/dep-dir2"), true),
342 (Path::new("deps/dep-dir3"), true),
343 (Path::new("src"), false),
344 (Path::new("src/a.rs"), false),
345 (Path::new("src/b.rs"), false),
346 ]
347 );
348
349 assert_eq!(
350 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
351 EntryKind::UnloadedDir
352 );
353 });
354
355 // Expand one of the symlinked directories.
356 tree.read_with(cx, |tree, _| {
357 tree.as_local()
358 .unwrap()
359 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
360 })
361 .recv()
362 .await;
363
364 // The expanded directory's contents are loaded. Subdirectories are
365 // not scanned yet.
366 tree.read_with(cx, |tree, _| {
367 assert_eq!(
368 tree.entries(true)
369 .map(|entry| (entry.path.as_ref(), entry.is_external))
370 .collect::<Vec<_>>(),
371 vec![
372 (Path::new(""), false),
373 (Path::new("deps"), false),
374 (Path::new("deps/dep-dir2"), true),
375 (Path::new("deps/dep-dir3"), true),
376 (Path::new("deps/dep-dir3/deps"), true),
377 (Path::new("deps/dep-dir3/src"), true),
378 (Path::new("src"), false),
379 (Path::new("src/a.rs"), false),
380 (Path::new("src/b.rs"), false),
381 ]
382 );
383 });
384 assert_eq!(
385 mem::take(&mut *tree_updates.lock()),
386 &[
387 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
388 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
389 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
390 ]
391 );
392
393 // Expand a subdirectory of one of the symlinked directories.
394 tree.read_with(cx, |tree, _| {
395 tree.as_local()
396 .unwrap()
397 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
398 })
399 .recv()
400 .await;
401
402 // The expanded subdirectory's contents are loaded.
403 tree.read_with(cx, |tree, _| {
404 assert_eq!(
405 tree.entries(true)
406 .map(|entry| (entry.path.as_ref(), entry.is_external))
407 .collect::<Vec<_>>(),
408 vec![
409 (Path::new(""), false),
410 (Path::new("deps"), false),
411 (Path::new("deps/dep-dir2"), true),
412 (Path::new("deps/dep-dir3"), true),
413 (Path::new("deps/dep-dir3/deps"), true),
414 (Path::new("deps/dep-dir3/src"), true),
415 (Path::new("deps/dep-dir3/src/e.rs"), true),
416 (Path::new("deps/dep-dir3/src/f.rs"), true),
417 (Path::new("src"), false),
418 (Path::new("src/a.rs"), false),
419 (Path::new("src/b.rs"), false),
420 ]
421 );
422 });
423
424 assert_eq!(
425 mem::take(&mut *tree_updates.lock()),
426 &[
427 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
428 (
429 Path::new("deps/dep-dir3/src/e.rs").into(),
430 PathChange::Loaded
431 ),
432 (
433 Path::new("deps/dep-dir3/src/f.rs").into(),
434 PathChange::Loaded
435 )
436 ]
437 );
438}
439
440#[gpui::test]
441async fn test_open_gitignored_files(cx: &mut TestAppContext) {
442 let fs = FakeFs::new(cx.background());
443 fs.insert_tree(
444 "/root",
445 json!({
446 ".gitignore": "node_modules\n",
447 "one": {
448 "node_modules": {
449 "a": {
450 "a1.js": "a1",
451 "a2.js": "a2",
452 },
453 "b": {
454 "b1.js": "b1",
455 "b2.js": "b2",
456 },
457 "c": {
458 "c1.js": "c1",
459 "c2.js": "c2",
460 }
461 },
462 },
463 "two": {
464 "x.js": "",
465 "y.js": "",
466 },
467 }),
468 )
469 .await;
470
471 let tree = Worktree::local(
472 build_client(cx),
473 Path::new("/root"),
474 true,
475 fs.clone(),
476 Default::default(),
477 &mut cx.to_async(),
478 )
479 .await
480 .unwrap();
481
482 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
483 .await;
484
485 tree.read_with(cx, |tree, _| {
486 assert_eq!(
487 tree.entries(true)
488 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
489 .collect::<Vec<_>>(),
490 vec![
491 (Path::new(""), false),
492 (Path::new(".gitignore"), false),
493 (Path::new("one"), false),
494 (Path::new("one/node_modules"), true),
495 (Path::new("two"), false),
496 (Path::new("two/x.js"), false),
497 (Path::new("two/y.js"), false),
498 ]
499 );
500 });
501
502 // Open a file that is nested inside of a gitignored directory that
503 // has not yet been expanded.
504 let prev_read_dir_count = fs.read_dir_call_count();
505 let buffer = tree
506 .update(cx, |tree, cx| {
507 tree.as_local_mut()
508 .unwrap()
509 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
510 })
511 .await
512 .unwrap();
513
514 tree.read_with(cx, |tree, cx| {
515 assert_eq!(
516 tree.entries(true)
517 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
518 .collect::<Vec<_>>(),
519 vec![
520 (Path::new(""), false),
521 (Path::new(".gitignore"), false),
522 (Path::new("one"), false),
523 (Path::new("one/node_modules"), true),
524 (Path::new("one/node_modules/a"), true),
525 (Path::new("one/node_modules/b"), true),
526 (Path::new("one/node_modules/b/b1.js"), true),
527 (Path::new("one/node_modules/b/b2.js"), true),
528 (Path::new("one/node_modules/c"), true),
529 (Path::new("two"), false),
530 (Path::new("two/x.js"), false),
531 (Path::new("two/y.js"), false),
532 ]
533 );
534
535 assert_eq!(
536 buffer.read(cx).file().unwrap().path().as_ref(),
537 Path::new("one/node_modules/b/b1.js")
538 );
539
540 // Only the newly-expanded directories are scanned.
541 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
542 });
543
544 // Open another file in a different subdirectory of the same
545 // gitignored directory.
546 let prev_read_dir_count = fs.read_dir_call_count();
547 let buffer = tree
548 .update(cx, |tree, cx| {
549 tree.as_local_mut()
550 .unwrap()
551 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
552 })
553 .await
554 .unwrap();
555
556 tree.read_with(cx, |tree, cx| {
557 assert_eq!(
558 tree.entries(true)
559 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
560 .collect::<Vec<_>>(),
561 vec![
562 (Path::new(""), false),
563 (Path::new(".gitignore"), false),
564 (Path::new("one"), false),
565 (Path::new("one/node_modules"), true),
566 (Path::new("one/node_modules/a"), true),
567 (Path::new("one/node_modules/a/a1.js"), true),
568 (Path::new("one/node_modules/a/a2.js"), true),
569 (Path::new("one/node_modules/b"), true),
570 (Path::new("one/node_modules/b/b1.js"), true),
571 (Path::new("one/node_modules/b/b2.js"), true),
572 (Path::new("one/node_modules/c"), true),
573 (Path::new("two"), false),
574 (Path::new("two/x.js"), false),
575 (Path::new("two/y.js"), false),
576 ]
577 );
578
579 assert_eq!(
580 buffer.read(cx).file().unwrap().path().as_ref(),
581 Path::new("one/node_modules/a/a2.js")
582 );
583
584 // Only the newly-expanded directory is scanned.
585 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
586 });
587
588 // No work happens when files and directories change within an unloaded directory.
589 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
590 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
591 .await
592 .unwrap();
593 cx.foreground().run_until_parked();
594 assert_eq!(
595 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
596 0
597 );
598}
599
600#[gpui::test]
601async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
602 let fs = FakeFs::new(cx.background());
603 fs.insert_tree(
604 "/root",
605 json!({
606 ".gitignore": "node_modules\n",
607 "a": {
608 "a.js": "",
609 },
610 "b": {
611 "b.js": "",
612 },
613 "node_modules": {
614 "c": {
615 "c.js": "",
616 },
617 "d": {
618 "d.js": "",
619 "e": {
620 "e1.js": "",
621 "e2.js": "",
622 },
623 "f": {
624 "f1.js": "",
625 "f2.js": "",
626 }
627 },
628 },
629 }),
630 )
631 .await;
632
633 let tree = Worktree::local(
634 build_client(cx),
635 Path::new("/root"),
636 true,
637 fs.clone(),
638 Default::default(),
639 &mut cx.to_async(),
640 )
641 .await
642 .unwrap();
643
644 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
645 .await;
646
647 // Open a file within the gitignored directory, forcing some of its
648 // subdirectories to be read, but not all.
649 let read_dir_count_1 = fs.read_dir_call_count();
650 tree.read_with(cx, |tree, _| {
651 tree.as_local()
652 .unwrap()
653 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
654 })
655 .recv()
656 .await;
657
658 // Those subdirectories are now loaded.
659 tree.read_with(cx, |tree, _| {
660 assert_eq!(
661 tree.entries(true)
662 .map(|e| (e.path.as_ref(), e.is_ignored))
663 .collect::<Vec<_>>(),
664 &[
665 (Path::new(""), false),
666 (Path::new(".gitignore"), false),
667 (Path::new("a"), false),
668 (Path::new("a/a.js"), false),
669 (Path::new("b"), false),
670 (Path::new("b/b.js"), false),
671 (Path::new("node_modules"), true),
672 (Path::new("node_modules/c"), true),
673 (Path::new("node_modules/d"), true),
674 (Path::new("node_modules/d/d.js"), true),
675 (Path::new("node_modules/d/e"), true),
676 (Path::new("node_modules/d/f"), true),
677 ]
678 );
679 });
680 let read_dir_count_2 = fs.read_dir_call_count();
681 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
682
683 // Update the gitignore so that node_modules is no longer ignored,
684 // but a subdirectory is ignored
685 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
686 .await
687 .unwrap();
688 cx.foreground().run_until_parked();
689
690 // All of the directories that are no longer ignored are now loaded.
691 tree.read_with(cx, |tree, _| {
692 assert_eq!(
693 tree.entries(true)
694 .map(|e| (e.path.as_ref(), e.is_ignored))
695 .collect::<Vec<_>>(),
696 &[
697 (Path::new(""), false),
698 (Path::new(".gitignore"), false),
699 (Path::new("a"), false),
700 (Path::new("a/a.js"), false),
701 (Path::new("b"), false),
702 (Path::new("b/b.js"), false),
703 // This directory is no longer ignored
704 (Path::new("node_modules"), false),
705 (Path::new("node_modules/c"), false),
706 (Path::new("node_modules/c/c.js"), false),
707 (Path::new("node_modules/d"), false),
708 (Path::new("node_modules/d/d.js"), false),
709 // This subdirectory is now ignored
710 (Path::new("node_modules/d/e"), true),
711 (Path::new("node_modules/d/f"), false),
712 (Path::new("node_modules/d/f/f1.js"), false),
713 (Path::new("node_modules/d/f/f2.js"), false),
714 ]
715 );
716 });
717
718 // Each of the newly-loaded directories is scanned only once.
719 let read_dir_count_3 = fs.read_dir_call_count();
720 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
721}
722
723#[gpui::test(iterations = 10)]
724async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
725 let fs = FakeFs::new(cx.background());
726 fs.insert_tree(
727 "/root",
728 json!({
729 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
730 "tree": {
731 ".git": {},
732 ".gitignore": "ignored-dir\n",
733 "tracked-dir": {
734 "tracked-file1": "",
735 "ancestor-ignored-file1": "",
736 },
737 "ignored-dir": {
738 "ignored-file1": ""
739 }
740 }
741 }),
742 )
743 .await;
744
745 let tree = Worktree::local(
746 build_client(cx),
747 "/root/tree".as_ref(),
748 true,
749 fs.clone(),
750 Default::default(),
751 &mut cx.to_async(),
752 )
753 .await
754 .unwrap();
755 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
756 .await;
757
758 tree.read_with(cx, |tree, _| {
759 tree.as_local()
760 .unwrap()
761 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
762 })
763 .recv()
764 .await;
765
766 cx.read(|cx| {
767 let tree = tree.read(cx);
768 assert!(
769 !tree
770 .entry_for_path("tracked-dir/tracked-file1")
771 .unwrap()
772 .is_ignored
773 );
774 assert!(
775 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
776 .unwrap()
777 .is_ignored
778 );
779 assert!(
780 tree.entry_for_path("ignored-dir/ignored-file1")
781 .unwrap()
782 .is_ignored
783 );
784 });
785
786 fs.create_file(
787 "/root/tree/tracked-dir/tracked-file2".as_ref(),
788 Default::default(),
789 )
790 .await
791 .unwrap();
792 fs.create_file(
793 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
794 Default::default(),
795 )
796 .await
797 .unwrap();
798 fs.create_file(
799 "/root/tree/ignored-dir/ignored-file2".as_ref(),
800 Default::default(),
801 )
802 .await
803 .unwrap();
804
805 cx.foreground().run_until_parked();
806 cx.read(|cx| {
807 let tree = tree.read(cx);
808 assert!(
809 !tree
810 .entry_for_path("tracked-dir/tracked-file2")
811 .unwrap()
812 .is_ignored
813 );
814 assert!(
815 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
816 .unwrap()
817 .is_ignored
818 );
819 assert!(
820 tree.entry_for_path("ignored-dir/ignored-file2")
821 .unwrap()
822 .is_ignored
823 );
824 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
825 });
826}
827
828#[gpui::test]
829async fn test_write_file(cx: &mut TestAppContext) {
830 let dir = temp_tree(json!({
831 ".git": {},
832 ".gitignore": "ignored-dir\n",
833 "tracked-dir": {},
834 "ignored-dir": {}
835 }));
836
837 let tree = Worktree::local(
838 build_client(cx),
839 dir.path(),
840 true,
841 Arc::new(RealFs),
842 Default::default(),
843 &mut cx.to_async(),
844 )
845 .await
846 .unwrap();
847 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
848 .await;
849 tree.flush_fs_events(cx).await;
850
851 tree.update(cx, |tree, cx| {
852 tree.as_local().unwrap().write_file(
853 Path::new("tracked-dir/file.txt"),
854 "hello".into(),
855 Default::default(),
856 cx,
857 )
858 })
859 .await
860 .unwrap();
861 tree.update(cx, |tree, cx| {
862 tree.as_local().unwrap().write_file(
863 Path::new("ignored-dir/file.txt"),
864 "world".into(),
865 Default::default(),
866 cx,
867 )
868 })
869 .await
870 .unwrap();
871
872 tree.read_with(cx, |tree, _| {
873 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
874 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
875 assert!(!tracked.is_ignored);
876 assert!(ignored.is_ignored);
877 });
878}
879
880#[gpui::test(iterations = 30)]
881async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
882 let fs = FakeFs::new(cx.background());
883 fs.insert_tree(
884 "/root",
885 json!({
886 "b": {},
887 "c": {},
888 "d": {},
889 }),
890 )
891 .await;
892
893 let tree = Worktree::local(
894 build_client(cx),
895 "/root".as_ref(),
896 true,
897 fs,
898 Default::default(),
899 &mut cx.to_async(),
900 )
901 .await
902 .unwrap();
903
904 let snapshot1 = tree.update(cx, |tree, cx| {
905 let tree = tree.as_local_mut().unwrap();
906 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
907 let _ = tree.observe_updates(0, cx, {
908 let snapshot = snapshot.clone();
909 move |update| {
910 snapshot.lock().apply_remote_update(update).unwrap();
911 async { true }
912 }
913 });
914 snapshot
915 });
916
917 let entry = tree
918 .update(cx, |tree, cx| {
919 tree.as_local_mut()
920 .unwrap()
921 .create_entry("a/e".as_ref(), true, cx)
922 })
923 .await
924 .unwrap();
925 assert!(entry.is_dir());
926
927 cx.foreground().run_until_parked();
928 tree.read_with(cx, |tree, _| {
929 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
930 });
931
932 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
933 assert_eq!(
934 snapshot1.lock().entries(true).collect::<Vec<_>>(),
935 snapshot2.entries(true).collect::<Vec<_>>()
936 );
937}
938
939#[gpui::test(iterations = 100)]
940async fn test_random_worktree_operations_during_initial_scan(
941 cx: &mut TestAppContext,
942 mut rng: StdRng,
943) {
944 let operations = env::var("OPERATIONS")
945 .map(|o| o.parse().unwrap())
946 .unwrap_or(5);
947 let initial_entries = env::var("INITIAL_ENTRIES")
948 .map(|o| o.parse().unwrap())
949 .unwrap_or(20);
950
951 let root_dir = Path::new("/test");
952 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
953 fs.as_fake().insert_tree(root_dir, json!({})).await;
954 for _ in 0..initial_entries {
955 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
956 }
957 log::info!("generated initial tree");
958
959 let worktree = Worktree::local(
960 build_client(cx),
961 root_dir,
962 true,
963 fs.clone(),
964 Default::default(),
965 &mut cx.to_async(),
966 )
967 .await
968 .unwrap();
969
970 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
971 let updates = Arc::new(Mutex::new(Vec::new()));
972 worktree.update(cx, |tree, cx| {
973 check_worktree_change_events(tree, cx);
974
975 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
976 let updates = updates.clone();
977 move |update| {
978 updates.lock().push(update);
979 async { true }
980 }
981 });
982 });
983
984 for _ in 0..operations {
985 worktree
986 .update(cx, |worktree, cx| {
987 randomly_mutate_worktree(worktree, &mut rng, cx)
988 })
989 .await
990 .log_err();
991 worktree.read_with(cx, |tree, _| {
992 tree.as_local().unwrap().snapshot().check_invariants(true)
993 });
994
995 if rng.gen_bool(0.6) {
996 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
997 }
998 }
999
1000 worktree
1001 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1002 .await;
1003
1004 cx.foreground().run_until_parked();
1005
1006 let final_snapshot = worktree.read_with(cx, |tree, _| {
1007 let tree = tree.as_local().unwrap();
1008 let snapshot = tree.snapshot();
1009 snapshot.check_invariants(true);
1010 snapshot
1011 });
1012
1013 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1014 let mut updated_snapshot = snapshot.clone();
1015 for update in updates.lock().iter() {
1016 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1017 updated_snapshot
1018 .apply_remote_update(update.clone())
1019 .unwrap();
1020 }
1021 }
1022
1023 assert_eq!(
1024 updated_snapshot.entries(true).collect::<Vec<_>>(),
1025 final_snapshot.entries(true).collect::<Vec<_>>(),
1026 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1027 );
1028 }
1029}
1030
1031#[gpui::test(iterations = 100)]
1032async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1033 let operations = env::var("OPERATIONS")
1034 .map(|o| o.parse().unwrap())
1035 .unwrap_or(40);
1036 let initial_entries = env::var("INITIAL_ENTRIES")
1037 .map(|o| o.parse().unwrap())
1038 .unwrap_or(20);
1039
1040 let root_dir = Path::new("/test");
1041 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1042 fs.as_fake().insert_tree(root_dir, json!({})).await;
1043 for _ in 0..initial_entries {
1044 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1045 }
1046 log::info!("generated initial tree");
1047
1048 let worktree = Worktree::local(
1049 build_client(cx),
1050 root_dir,
1051 true,
1052 fs.clone(),
1053 Default::default(),
1054 &mut cx.to_async(),
1055 )
1056 .await
1057 .unwrap();
1058
1059 let updates = Arc::new(Mutex::new(Vec::new()));
1060 worktree.update(cx, |tree, cx| {
1061 check_worktree_change_events(tree, cx);
1062
1063 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1064 let updates = updates.clone();
1065 move |update| {
1066 updates.lock().push(update);
1067 async { true }
1068 }
1069 });
1070 });
1071
1072 worktree
1073 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1074 .await;
1075
1076 fs.as_fake().pause_events();
1077 let mut snapshots = Vec::new();
1078 let mut mutations_len = operations;
1079 while mutations_len > 1 {
1080 if rng.gen_bool(0.2) {
1081 worktree
1082 .update(cx, |worktree, cx| {
1083 randomly_mutate_worktree(worktree, &mut rng, cx)
1084 })
1085 .await
1086 .log_err();
1087 } else {
1088 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1089 }
1090
1091 let buffered_event_count = fs.as_fake().buffered_event_count();
1092 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1093 let len = rng.gen_range(0..=buffered_event_count);
1094 log::info!("flushing {} events", len);
1095 fs.as_fake().flush_events(len);
1096 } else {
1097 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1098 mutations_len -= 1;
1099 }
1100
1101 cx.foreground().run_until_parked();
1102 if rng.gen_bool(0.2) {
1103 log::info!("storing snapshot {}", snapshots.len());
1104 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1105 snapshots.push(snapshot);
1106 }
1107 }
1108
1109 log::info!("quiescing");
1110 fs.as_fake().flush_events(usize::MAX);
1111 cx.foreground().run_until_parked();
1112
1113 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1114 snapshot.check_invariants(true);
1115 let expanded_paths = snapshot
1116 .expanded_entries()
1117 .map(|e| e.path.clone())
1118 .collect::<Vec<_>>();
1119
1120 {
1121 let new_worktree = Worktree::local(
1122 build_client(cx),
1123 root_dir,
1124 true,
1125 fs.clone(),
1126 Default::default(),
1127 &mut cx.to_async(),
1128 )
1129 .await
1130 .unwrap();
1131 new_worktree
1132 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1133 .await;
1134 new_worktree
1135 .update(cx, |tree, _| {
1136 tree.as_local_mut()
1137 .unwrap()
1138 .refresh_entries_for_paths(expanded_paths)
1139 })
1140 .recv()
1141 .await;
1142 let new_snapshot =
1143 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1144 assert_eq!(
1145 snapshot.entries_without_ids(true),
1146 new_snapshot.entries_without_ids(true)
1147 );
1148 }
1149
1150 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1151 for update in updates.lock().iter() {
1152 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1153 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1154 }
1155 }
1156
1157 assert_eq!(
1158 prev_snapshot
1159 .entries(true)
1160 .map(ignore_pending_dir)
1161 .collect::<Vec<_>>(),
1162 snapshot
1163 .entries(true)
1164 .map(ignore_pending_dir)
1165 .collect::<Vec<_>>(),
1166 "wrong updates after snapshot {i}: {updates:#?}",
1167 );
1168 }
1169
1170 fn ignore_pending_dir(entry: &Entry) -> Entry {
1171 let mut entry = entry.clone();
1172 if entry.kind.is_dir() {
1173 entry.kind = EntryKind::Dir
1174 }
1175 entry
1176 }
1177}
1178
1179// The worktree's `UpdatedEntries` event can be used to follow along with
1180// all changes to the worktree's snapshot.
1181fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1182 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1183 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1184 if let Event::UpdatedEntries(changes) = event {
1185 for (path, _, change_type) in changes.iter() {
1186 let entry = tree.entry_for_path(&path).cloned();
1187 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1188 Ok(ix) | Err(ix) => ix,
1189 };
1190 match change_type {
1191 PathChange::Added => entries.insert(ix, entry.unwrap()),
1192 PathChange::Removed => drop(entries.remove(ix)),
1193 PathChange::Updated => {
1194 let entry = entry.unwrap();
1195 let existing_entry = entries.get_mut(ix).unwrap();
1196 assert_eq!(existing_entry.path, entry.path);
1197 *existing_entry = entry;
1198 }
1199 PathChange::AddedOrUpdated | PathChange::Loaded => {
1200 let entry = entry.unwrap();
1201 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1202 *entries.get_mut(ix).unwrap() = entry;
1203 } else {
1204 entries.insert(ix, entry);
1205 }
1206 }
1207 }
1208 }
1209
1210 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1211 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1212 }
1213 })
1214 .detach();
1215}
1216
1217fn randomly_mutate_worktree(
1218 worktree: &mut Worktree,
1219 rng: &mut impl Rng,
1220 cx: &mut ModelContext<Worktree>,
1221) -> Task<Result<()>> {
1222 log::info!("mutating worktree");
1223 let worktree = worktree.as_local_mut().unwrap();
1224 let snapshot = worktree.snapshot();
1225 let entry = snapshot.entries(false).choose(rng).unwrap();
1226
1227 match rng.gen_range(0_u32..100) {
1228 0..=33 if entry.path.as_ref() != Path::new("") => {
1229 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1230 worktree.delete_entry(entry.id, cx).unwrap()
1231 }
1232 ..=66 if entry.path.as_ref() != Path::new("") => {
1233 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1234 let new_parent_path = if other_entry.is_dir() {
1235 other_entry.path.clone()
1236 } else {
1237 other_entry.path.parent().unwrap().into()
1238 };
1239 let mut new_path = new_parent_path.join(random_filename(rng));
1240 if new_path.starts_with(&entry.path) {
1241 new_path = random_filename(rng).into();
1242 }
1243
1244 log::info!(
1245 "renaming entry {:?} ({}) to {:?}",
1246 entry.path,
1247 entry.id.0,
1248 new_path
1249 );
1250 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1251 cx.foreground().spawn(async move {
1252 task.await?;
1253 Ok(())
1254 })
1255 }
1256 _ => {
1257 let task = if entry.is_dir() {
1258 let child_path = entry.path.join(random_filename(rng));
1259 let is_dir = rng.gen_bool(0.3);
1260 log::info!(
1261 "creating {} at {:?}",
1262 if is_dir { "dir" } else { "file" },
1263 child_path,
1264 );
1265 worktree.create_entry(child_path, is_dir, cx)
1266 } else {
1267 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1268 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1269 };
1270 cx.foreground().spawn(async move {
1271 task.await?;
1272 Ok(())
1273 })
1274 }
1275 }
1276}
1277
1278async fn randomly_mutate_fs(
1279 fs: &Arc<dyn Fs>,
1280 root_path: &Path,
1281 insertion_probability: f64,
1282 rng: &mut impl Rng,
1283) {
1284 log::info!("mutating fs");
1285 let mut files = Vec::new();
1286 let mut dirs = Vec::new();
1287 for path in fs.as_fake().paths(false) {
1288 if path.starts_with(root_path) {
1289 if fs.is_file(&path).await {
1290 files.push(path);
1291 } else {
1292 dirs.push(path);
1293 }
1294 }
1295 }
1296
1297 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1298 let path = dirs.choose(rng).unwrap();
1299 let new_path = path.join(random_filename(rng));
1300
1301 if rng.gen() {
1302 log::info!(
1303 "creating dir {:?}",
1304 new_path.strip_prefix(root_path).unwrap()
1305 );
1306 fs.create_dir(&new_path).await.unwrap();
1307 } else {
1308 log::info!(
1309 "creating file {:?}",
1310 new_path.strip_prefix(root_path).unwrap()
1311 );
1312 fs.create_file(&new_path, Default::default()).await.unwrap();
1313 }
1314 } else if rng.gen_bool(0.05) {
1315 let ignore_dir_path = dirs.choose(rng).unwrap();
1316 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1317
1318 let subdirs = dirs
1319 .iter()
1320 .filter(|d| d.starts_with(&ignore_dir_path))
1321 .cloned()
1322 .collect::<Vec<_>>();
1323 let subfiles = files
1324 .iter()
1325 .filter(|d| d.starts_with(&ignore_dir_path))
1326 .cloned()
1327 .collect::<Vec<_>>();
1328 let files_to_ignore = {
1329 let len = rng.gen_range(0..=subfiles.len());
1330 subfiles.choose_multiple(rng, len)
1331 };
1332 let dirs_to_ignore = {
1333 let len = rng.gen_range(0..subdirs.len());
1334 subdirs.choose_multiple(rng, len)
1335 };
1336
1337 let mut ignore_contents = String::new();
1338 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1339 writeln!(
1340 ignore_contents,
1341 "{}",
1342 path_to_ignore
1343 .strip_prefix(&ignore_dir_path)
1344 .unwrap()
1345 .to_str()
1346 .unwrap()
1347 )
1348 .unwrap();
1349 }
1350 log::info!(
1351 "creating gitignore {:?} with contents:\n{}",
1352 ignore_path.strip_prefix(&root_path).unwrap(),
1353 ignore_contents
1354 );
1355 fs.save(
1356 &ignore_path,
1357 &ignore_contents.as_str().into(),
1358 Default::default(),
1359 )
1360 .await
1361 .unwrap();
1362 } else {
1363 let old_path = {
1364 let file_path = files.choose(rng);
1365 let dir_path = dirs[1..].choose(rng);
1366 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1367 };
1368
1369 let is_rename = rng.gen();
1370 if is_rename {
1371 let new_path_parent = dirs
1372 .iter()
1373 .filter(|d| !d.starts_with(old_path))
1374 .choose(rng)
1375 .unwrap();
1376
1377 let overwrite_existing_dir =
1378 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1379 let new_path = if overwrite_existing_dir {
1380 fs.remove_dir(
1381 &new_path_parent,
1382 RemoveOptions {
1383 recursive: true,
1384 ignore_if_not_exists: true,
1385 },
1386 )
1387 .await
1388 .unwrap();
1389 new_path_parent.to_path_buf()
1390 } else {
1391 new_path_parent.join(random_filename(rng))
1392 };
1393
1394 log::info!(
1395 "renaming {:?} to {}{:?}",
1396 old_path.strip_prefix(&root_path).unwrap(),
1397 if overwrite_existing_dir {
1398 "overwrite "
1399 } else {
1400 ""
1401 },
1402 new_path.strip_prefix(&root_path).unwrap()
1403 );
1404 fs.rename(
1405 &old_path,
1406 &new_path,
1407 fs::RenameOptions {
1408 overwrite: true,
1409 ignore_if_exists: true,
1410 },
1411 )
1412 .await
1413 .unwrap();
1414 } else if fs.is_file(&old_path).await {
1415 log::info!(
1416 "deleting file {:?}",
1417 old_path.strip_prefix(&root_path).unwrap()
1418 );
1419 fs.remove_file(old_path, Default::default()).await.unwrap();
1420 } else {
1421 log::info!(
1422 "deleting dir {:?}",
1423 old_path.strip_prefix(&root_path).unwrap()
1424 );
1425 fs.remove_dir(
1426 &old_path,
1427 RemoveOptions {
1428 recursive: true,
1429 ignore_if_not_exists: true,
1430 },
1431 )
1432 .await
1433 .unwrap();
1434 }
1435 }
1436}
1437
1438fn random_filename(rng: &mut impl Rng) -> String {
1439 (0..6)
1440 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1441 .map(char::from)
1442 .collect()
1443}
1444
1445#[gpui::test]
1446async fn test_rename_work_directory(cx: &mut TestAppContext) {
1447 let root = temp_tree(json!({
1448 "projects": {
1449 "project1": {
1450 "a": "",
1451 "b": "",
1452 }
1453 },
1454
1455 }));
1456 let root_path = root.path();
1457
1458 let tree = Worktree::local(
1459 build_client(cx),
1460 root_path,
1461 true,
1462 Arc::new(RealFs),
1463 Default::default(),
1464 &mut cx.to_async(),
1465 )
1466 .await
1467 .unwrap();
1468
1469 let repo = git_init(&root_path.join("projects/project1"));
1470 git_add("a", &repo);
1471 git_commit("init", &repo);
1472 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1473
1474 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1475 .await;
1476
1477 tree.flush_fs_events(cx).await;
1478
1479 cx.read(|cx| {
1480 let tree = tree.read(cx);
1481 let (work_dir, _) = tree.repositories().next().unwrap();
1482 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1483 assert_eq!(
1484 tree.status_for_file(Path::new("projects/project1/a")),
1485 Some(GitFileStatus::Modified)
1486 );
1487 assert_eq!(
1488 tree.status_for_file(Path::new("projects/project1/b")),
1489 Some(GitFileStatus::Added)
1490 );
1491 });
1492
1493 std::fs::rename(
1494 root_path.join("projects/project1"),
1495 root_path.join("projects/project2"),
1496 )
1497 .ok();
1498 tree.flush_fs_events(cx).await;
1499
1500 cx.read(|cx| {
1501 let tree = tree.read(cx);
1502 let (work_dir, _) = tree.repositories().next().unwrap();
1503 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1504 assert_eq!(
1505 tree.status_for_file(Path::new("projects/project2/a")),
1506 Some(GitFileStatus::Modified)
1507 );
1508 assert_eq!(
1509 tree.status_for_file(Path::new("projects/project2/b")),
1510 Some(GitFileStatus::Added)
1511 );
1512 });
1513}
1514
1515#[gpui::test]
1516async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1517 let root = temp_tree(json!({
1518 "c.txt": "",
1519 "dir1": {
1520 ".git": {},
1521 "deps": {
1522 "dep1": {
1523 ".git": {},
1524 "src": {
1525 "a.txt": ""
1526 }
1527 }
1528 },
1529 "src": {
1530 "b.txt": ""
1531 }
1532 },
1533 }));
1534
1535 let tree = Worktree::local(
1536 build_client(cx),
1537 root.path(),
1538 true,
1539 Arc::new(RealFs),
1540 Default::default(),
1541 &mut cx.to_async(),
1542 )
1543 .await
1544 .unwrap();
1545
1546 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1547 .await;
1548 tree.flush_fs_events(cx).await;
1549
1550 tree.read_with(cx, |tree, _cx| {
1551 let tree = tree.as_local().unwrap();
1552
1553 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1554
1555 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1556 assert_eq!(
1557 entry
1558 .work_directory(tree)
1559 .map(|directory| directory.as_ref().to_owned()),
1560 Some(Path::new("dir1").to_owned())
1561 );
1562
1563 let entry = tree
1564 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1565 .unwrap();
1566 assert_eq!(
1567 entry
1568 .work_directory(tree)
1569 .map(|directory| directory.as_ref().to_owned()),
1570 Some(Path::new("dir1/deps/dep1").to_owned())
1571 );
1572
1573 let entries = tree.files(false, 0);
1574
1575 let paths_with_repos = tree
1576 .entries_with_repositories(entries)
1577 .map(|(entry, repo)| {
1578 (
1579 entry.path.as_ref(),
1580 repo.and_then(|repo| {
1581 repo.work_directory(&tree)
1582 .map(|work_directory| work_directory.0.to_path_buf())
1583 }),
1584 )
1585 })
1586 .collect::<Vec<_>>();
1587
1588 assert_eq!(
1589 paths_with_repos,
1590 &[
1591 (Path::new("c.txt"), None),
1592 (
1593 Path::new("dir1/deps/dep1/src/a.txt"),
1594 Some(Path::new("dir1/deps/dep1").into())
1595 ),
1596 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1597 ]
1598 );
1599 });
1600
1601 let repo_update_events = Arc::new(Mutex::new(vec![]));
1602 tree.update(cx, |_, cx| {
1603 let repo_update_events = repo_update_events.clone();
1604 cx.subscribe(&tree, move |_, _, event, _| {
1605 if let Event::UpdatedGitRepositories(update) = event {
1606 repo_update_events.lock().push(update.clone());
1607 }
1608 })
1609 .detach();
1610 });
1611
1612 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1613 tree.flush_fs_events(cx).await;
1614
1615 assert_eq!(
1616 repo_update_events.lock()[0]
1617 .iter()
1618 .map(|e| e.0.clone())
1619 .collect::<Vec<Arc<Path>>>(),
1620 vec![Path::new("dir1").into()]
1621 );
1622
1623 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1624 tree.flush_fs_events(cx).await;
1625
1626 tree.read_with(cx, |tree, _cx| {
1627 let tree = tree.as_local().unwrap();
1628
1629 assert!(tree
1630 .repository_for_path("dir1/src/b.txt".as_ref())
1631 .is_none());
1632 });
1633}
1634
1635#[gpui::test]
1636async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1637 const IGNORE_RULE: &'static str = "**/target";
1638
1639 let root = temp_tree(json!({
1640 "project": {
1641 "a.txt": "a",
1642 "b.txt": "bb",
1643 "c": {
1644 "d": {
1645 "e.txt": "eee"
1646 }
1647 },
1648 "f.txt": "ffff",
1649 "target": {
1650 "build_file": "???"
1651 },
1652 ".gitignore": IGNORE_RULE
1653 },
1654
1655 }));
1656
1657 let tree = Worktree::local(
1658 build_client(cx),
1659 root.path(),
1660 true,
1661 Arc::new(RealFs),
1662 Default::default(),
1663 &mut cx.to_async(),
1664 )
1665 .await
1666 .unwrap();
1667
1668 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1669 .await;
1670
1671 const A_TXT: &'static str = "a.txt";
1672 const B_TXT: &'static str = "b.txt";
1673 const E_TXT: &'static str = "c/d/e.txt";
1674 const F_TXT: &'static str = "f.txt";
1675 const DOTGITIGNORE: &'static str = ".gitignore";
1676 const BUILD_FILE: &'static str = "target/build_file";
1677 let project_path: &Path = &Path::new("project");
1678
1679 let work_dir = root.path().join("project");
1680 let mut repo = git_init(work_dir.as_path());
1681 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1682 git_add(Path::new(A_TXT), &repo);
1683 git_add(Path::new(E_TXT), &repo);
1684 git_add(Path::new(DOTGITIGNORE), &repo);
1685 git_commit("Initial commit", &repo);
1686
1687 tree.flush_fs_events(cx).await;
1688 deterministic.run_until_parked();
1689
1690 // Check that the right git state is observed on startup
1691 tree.read_with(cx, |tree, _cx| {
1692 let snapshot = tree.snapshot();
1693 assert_eq!(snapshot.repositories().count(), 1);
1694 let (dir, _) = snapshot.repositories().next().unwrap();
1695 assert_eq!(dir.as_ref(), Path::new("project"));
1696
1697 assert_eq!(
1698 snapshot.status_for_file(project_path.join(B_TXT)),
1699 Some(GitFileStatus::Added)
1700 );
1701 assert_eq!(
1702 snapshot.status_for_file(project_path.join(F_TXT)),
1703 Some(GitFileStatus::Added)
1704 );
1705 });
1706
1707 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1708
1709 tree.flush_fs_events(cx).await;
1710 deterministic.run_until_parked();
1711
1712 tree.read_with(cx, |tree, _cx| {
1713 let snapshot = tree.snapshot();
1714
1715 assert_eq!(
1716 snapshot.status_for_file(project_path.join(A_TXT)),
1717 Some(GitFileStatus::Modified)
1718 );
1719 });
1720
1721 git_add(Path::new(A_TXT), &repo);
1722 git_add(Path::new(B_TXT), &repo);
1723 git_commit("Committing modified and added", &repo);
1724 tree.flush_fs_events(cx).await;
1725 deterministic.run_until_parked();
1726
1727 // Check that repo only changes are tracked
1728 tree.read_with(cx, |tree, _cx| {
1729 let snapshot = tree.snapshot();
1730
1731 assert_eq!(
1732 snapshot.status_for_file(project_path.join(F_TXT)),
1733 Some(GitFileStatus::Added)
1734 );
1735
1736 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1737 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1738 });
1739
1740 git_reset(0, &repo);
1741 git_remove_index(Path::new(B_TXT), &repo);
1742 git_stash(&mut repo);
1743 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1744 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1745 tree.flush_fs_events(cx).await;
1746 deterministic.run_until_parked();
1747
1748 // Check that more complex repo changes are tracked
1749 tree.read_with(cx, |tree, _cx| {
1750 let snapshot = tree.snapshot();
1751
1752 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1753 assert_eq!(
1754 snapshot.status_for_file(project_path.join(B_TXT)),
1755 Some(GitFileStatus::Added)
1756 );
1757 assert_eq!(
1758 snapshot.status_for_file(project_path.join(E_TXT)),
1759 Some(GitFileStatus::Modified)
1760 );
1761 });
1762
1763 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1764 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1765 std::fs::write(
1766 work_dir.join(DOTGITIGNORE),
1767 [IGNORE_RULE, "f.txt"].join("\n"),
1768 )
1769 .unwrap();
1770
1771 git_add(Path::new(DOTGITIGNORE), &repo);
1772 git_commit("Committing modified git ignore", &repo);
1773
1774 tree.flush_fs_events(cx).await;
1775 deterministic.run_until_parked();
1776
1777 let mut renamed_dir_name = "first_directory/second_directory";
1778 const RENAMED_FILE: &'static str = "rf.txt";
1779
1780 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
1781 std::fs::write(
1782 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
1783 "new-contents",
1784 )
1785 .unwrap();
1786
1787 tree.flush_fs_events(cx).await;
1788 deterministic.run_until_parked();
1789
1790 tree.read_with(cx, |tree, _cx| {
1791 let snapshot = tree.snapshot();
1792 assert_eq!(
1793 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
1794 Some(GitFileStatus::Added)
1795 );
1796 });
1797
1798 renamed_dir_name = "new_first_directory/second_directory";
1799
1800 std::fs::rename(
1801 work_dir.join("first_directory"),
1802 work_dir.join("new_first_directory"),
1803 )
1804 .unwrap();
1805
1806 tree.flush_fs_events(cx).await;
1807 deterministic.run_until_parked();
1808
1809 tree.read_with(cx, |tree, _cx| {
1810 let snapshot = tree.snapshot();
1811
1812 assert_eq!(
1813 snapshot.status_for_file(
1814 project_path
1815 .join(Path::new(renamed_dir_name))
1816 .join(RENAMED_FILE)
1817 ),
1818 Some(GitFileStatus::Added)
1819 );
1820 });
1821}
1822
1823#[gpui::test]
1824async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
1825 let fs = FakeFs::new(cx.background());
1826 fs.insert_tree(
1827 "/root",
1828 json!({
1829 ".git": {},
1830 "a": {
1831 "b": {
1832 "c1.txt": "",
1833 "c2.txt": "",
1834 },
1835 "d": {
1836 "e1.txt": "",
1837 "e2.txt": "",
1838 "e3.txt": "",
1839 }
1840 },
1841 "f": {
1842 "no-status.txt": ""
1843 },
1844 "g": {
1845 "h1.txt": "",
1846 "h2.txt": ""
1847 },
1848
1849 }),
1850 )
1851 .await;
1852
1853 fs.set_status_for_repo_via_git_operation(
1854 &Path::new("/root/.git"),
1855 &[
1856 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
1857 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
1858 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
1859 ],
1860 );
1861
1862 let tree = Worktree::local(
1863 build_client(cx),
1864 Path::new("/root"),
1865 true,
1866 fs.clone(),
1867 Default::default(),
1868 &mut cx.to_async(),
1869 )
1870 .await
1871 .unwrap();
1872
1873 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1874 .await;
1875
1876 cx.foreground().run_until_parked();
1877 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
1878
1879 check_propagated_statuses(
1880 &snapshot,
1881 &[
1882 (Path::new(""), Some(GitFileStatus::Conflict)),
1883 (Path::new("a"), Some(GitFileStatus::Modified)),
1884 (Path::new("a/b"), Some(GitFileStatus::Added)),
1885 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1886 (Path::new("a/b/c2.txt"), None),
1887 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1888 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1889 (Path::new("f"), None),
1890 (Path::new("f/no-status.txt"), None),
1891 (Path::new("g"), Some(GitFileStatus::Conflict)),
1892 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
1893 ],
1894 );
1895
1896 check_propagated_statuses(
1897 &snapshot,
1898 &[
1899 (Path::new("a/b"), Some(GitFileStatus::Added)),
1900 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1901 (Path::new("a/b/c2.txt"), None),
1902 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1903 (Path::new("a/d/e1.txt"), None),
1904 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1905 (Path::new("f"), None),
1906 (Path::new("f/no-status.txt"), None),
1907 (Path::new("g"), Some(GitFileStatus::Conflict)),
1908 ],
1909 );
1910
1911 check_propagated_statuses(
1912 &snapshot,
1913 &[
1914 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1915 (Path::new("a/b/c2.txt"), None),
1916 (Path::new("a/d/e1.txt"), None),
1917 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1918 (Path::new("f/no-status.txt"), None),
1919 ],
1920 );
1921
1922 #[track_caller]
1923 fn check_propagated_statuses(
1924 snapshot: &Snapshot,
1925 expected_statuses: &[(&Path, Option<GitFileStatus>)],
1926 ) {
1927 let mut entries = expected_statuses
1928 .iter()
1929 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
1930 .collect::<Vec<_>>();
1931 snapshot.propagate_git_statuses(&mut entries);
1932 assert_eq!(
1933 entries
1934 .iter()
1935 .map(|e| (e.path.as_ref(), e.git_status))
1936 .collect::<Vec<_>>(),
1937 expected_statuses
1938 );
1939 }
1940}
1941
1942fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
1943 let http_client = FakeHttpClient::with_404_response();
1944 cx.read(|cx| Client::new(http_client, cx))
1945}
1946
1947#[track_caller]
1948fn git_init(path: &Path) -> git2::Repository {
1949 git2::Repository::init(path).expect("Failed to initialize git repository")
1950}
1951
1952#[track_caller]
1953fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
1954 let path = path.as_ref();
1955 let mut index = repo.index().expect("Failed to get index");
1956 index.add_path(path).expect("Failed to add a.txt");
1957 index.write().expect("Failed to write index");
1958}
1959
1960#[track_caller]
1961fn git_remove_index(path: &Path, repo: &git2::Repository) {
1962 let mut index = repo.index().expect("Failed to get index");
1963 index.remove_path(path).expect("Failed to add a.txt");
1964 index.write().expect("Failed to write index");
1965}
1966
1967#[track_caller]
1968fn git_commit(msg: &'static str, repo: &git2::Repository) {
1969 use git2::Signature;
1970
1971 let signature = Signature::now("test", "test@zed.dev").unwrap();
1972 let oid = repo.index().unwrap().write_tree().unwrap();
1973 let tree = repo.find_tree(oid).unwrap();
1974 if let Some(head) = repo.head().ok() {
1975 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
1976
1977 let parent_commit = parent_obj.as_commit().unwrap();
1978
1979 repo.commit(
1980 Some("HEAD"),
1981 &signature,
1982 &signature,
1983 msg,
1984 &tree,
1985 &[parent_commit],
1986 )
1987 .expect("Failed to commit with parent");
1988 } else {
1989 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
1990 .expect("Failed to commit");
1991 }
1992}
1993
1994#[track_caller]
1995fn git_stash(repo: &mut git2::Repository) {
1996 use git2::Signature;
1997
1998 let signature = Signature::now("test", "test@zed.dev").unwrap();
1999 repo.stash_save(&signature, "N/A", None)
2000 .expect("Failed to stash");
2001}
2002
2003#[track_caller]
2004fn git_reset(offset: usize, repo: &git2::Repository) {
2005 let head = repo.head().expect("Couldn't get repo head");
2006 let object = head.peel(git2::ObjectType::Commit).unwrap();
2007 let commit = object.as_commit().unwrap();
2008 let new_head = commit
2009 .parents()
2010 .inspect(|parnet| {
2011 parnet.message();
2012 })
2013 .skip(offset)
2014 .next()
2015 .expect("Not enough history");
2016 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2017 .expect("Could not reset");
2018}
2019
2020#[allow(dead_code)]
2021#[track_caller]
2022fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2023 repo.statuses(None)
2024 .unwrap()
2025 .iter()
2026 .map(|status| (status.path().unwrap().to_string(), status.status()))
2027 .collect()
2028}