1use crate::{
2 worktree::{Event, Snapshot, WorktreeHandle},
3 Entry, EntryKind, PathChange, Worktree,
4};
5use anyhow::Result;
6use client::Client;
7use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
8use git::GITIGNORE;
9use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
10use parking_lot::Mutex;
11use postage::stream::Stream;
12use pretty_assertions::assert_eq;
13use rand::prelude::*;
14use serde_json::json;
15use std::{
16 env,
17 fmt::Write,
18 mem,
19 path::{Path, PathBuf},
20 sync::Arc,
21};
22use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
23
24#[gpui::test]
25async fn test_traversal(cx: &mut TestAppContext) {
26 let fs = FakeFs::new(cx.background());
27 fs.insert_tree(
28 "/root",
29 json!({
30 ".gitignore": "a/b\n",
31 "a": {
32 "b": "",
33 "c": "",
34 }
35 }),
36 )
37 .await;
38
39 let tree = Worktree::local(
40 build_client(cx),
41 Path::new("/root"),
42 true,
43 fs,
44 Default::default(),
45 &mut cx.to_async(),
46 )
47 .await
48 .unwrap();
49 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
50 .await;
51
52 tree.read_with(cx, |tree, _| {
53 assert_eq!(
54 tree.entries(false)
55 .map(|entry| entry.path.as_ref())
56 .collect::<Vec<_>>(),
57 vec![
58 Path::new(""),
59 Path::new(".gitignore"),
60 Path::new("a"),
61 Path::new("a/c"),
62 ]
63 );
64 assert_eq!(
65 tree.entries(true)
66 .map(|entry| entry.path.as_ref())
67 .collect::<Vec<_>>(),
68 vec![
69 Path::new(""),
70 Path::new(".gitignore"),
71 Path::new("a"),
72 Path::new("a/b"),
73 Path::new("a/c"),
74 ]
75 );
76 })
77}
78
79#[gpui::test]
80async fn test_descendent_entries(cx: &mut TestAppContext) {
81 let fs = FakeFs::new(cx.background());
82 fs.insert_tree(
83 "/root",
84 json!({
85 "a": "",
86 "b": {
87 "c": {
88 "d": ""
89 },
90 "e": {}
91 },
92 "f": "",
93 "g": {
94 "h": {}
95 },
96 "i": {
97 "j": {
98 "k": ""
99 },
100 "l": {
101
102 }
103 },
104 ".gitignore": "i/j\n",
105 }),
106 )
107 .await;
108
109 let tree = Worktree::local(
110 build_client(cx),
111 Path::new("/root"),
112 true,
113 fs,
114 Default::default(),
115 &mut cx.to_async(),
116 )
117 .await
118 .unwrap();
119 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
120 .await;
121
122 tree.read_with(cx, |tree, _| {
123 assert_eq!(
124 tree.descendent_entries(false, false, Path::new("b"))
125 .map(|entry| entry.path.as_ref())
126 .collect::<Vec<_>>(),
127 vec![Path::new("b/c/d"),]
128 );
129 assert_eq!(
130 tree.descendent_entries(true, false, Path::new("b"))
131 .map(|entry| entry.path.as_ref())
132 .collect::<Vec<_>>(),
133 vec![
134 Path::new("b"),
135 Path::new("b/c"),
136 Path::new("b/c/d"),
137 Path::new("b/e"),
138 ]
139 );
140
141 assert_eq!(
142 tree.descendent_entries(false, false, Path::new("g"))
143 .map(|entry| entry.path.as_ref())
144 .collect::<Vec<_>>(),
145 Vec::<PathBuf>::new()
146 );
147 assert_eq!(
148 tree.descendent_entries(true, false, Path::new("g"))
149 .map(|entry| entry.path.as_ref())
150 .collect::<Vec<_>>(),
151 vec![Path::new("g"), Path::new("g/h"),]
152 );
153 });
154
155 // Expand gitignored directory.
156 tree.read_with(cx, |tree, _| {
157 tree.as_local()
158 .unwrap()
159 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
160 })
161 .recv()
162 .await;
163
164 tree.read_with(cx, |tree, _| {
165 assert_eq!(
166 tree.descendent_entries(false, false, Path::new("i"))
167 .map(|entry| entry.path.as_ref())
168 .collect::<Vec<_>>(),
169 Vec::<PathBuf>::new()
170 );
171 assert_eq!(
172 tree.descendent_entries(false, true, Path::new("i"))
173 .map(|entry| entry.path.as_ref())
174 .collect::<Vec<_>>(),
175 vec![Path::new("i/j/k")]
176 );
177 assert_eq!(
178 tree.descendent_entries(true, false, Path::new("i"))
179 .map(|entry| entry.path.as_ref())
180 .collect::<Vec<_>>(),
181 vec![Path::new("i"), Path::new("i/l"),]
182 );
183 })
184}
185
186#[gpui::test(iterations = 10)]
187async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
188 let fs = FakeFs::new(cx.background());
189 fs.insert_tree(
190 "/root",
191 json!({
192 "lib": {
193 "a": {
194 "a.txt": ""
195 },
196 "b": {
197 "b.txt": ""
198 }
199 }
200 }),
201 )
202 .await;
203 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
204 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
205
206 let tree = Worktree::local(
207 build_client(cx),
208 Path::new("/root"),
209 true,
210 fs.clone(),
211 Default::default(),
212 &mut cx.to_async(),
213 )
214 .await
215 .unwrap();
216
217 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
218 .await;
219
220 tree.read_with(cx, |tree, _| {
221 assert_eq!(
222 tree.entries(false)
223 .map(|entry| entry.path.as_ref())
224 .collect::<Vec<_>>(),
225 vec![
226 Path::new(""),
227 Path::new("lib"),
228 Path::new("lib/a"),
229 Path::new("lib/a/a.txt"),
230 Path::new("lib/a/lib"),
231 Path::new("lib/b"),
232 Path::new("lib/b/b.txt"),
233 Path::new("lib/b/lib"),
234 ]
235 );
236 });
237
238 fs.rename(
239 Path::new("/root/lib/a/lib"),
240 Path::new("/root/lib/a/lib-2"),
241 Default::default(),
242 )
243 .await
244 .unwrap();
245 executor.run_until_parked();
246 tree.read_with(cx, |tree, _| {
247 assert_eq!(
248 tree.entries(false)
249 .map(|entry| entry.path.as_ref())
250 .collect::<Vec<_>>(),
251 vec![
252 Path::new(""),
253 Path::new("lib"),
254 Path::new("lib/a"),
255 Path::new("lib/a/a.txt"),
256 Path::new("lib/a/lib-2"),
257 Path::new("lib/b"),
258 Path::new("lib/b/b.txt"),
259 Path::new("lib/b/lib"),
260 ]
261 );
262 });
263}
264
265#[gpui::test]
266async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
267 let fs = FakeFs::new(cx.background());
268 fs.insert_tree(
269 "/root",
270 json!({
271 "dir1": {
272 "deps": {
273 // symlinks here
274 },
275 "src": {
276 "a.rs": "",
277 "b.rs": "",
278 },
279 },
280 "dir2": {
281 "src": {
282 "c.rs": "",
283 "d.rs": "",
284 }
285 },
286 "dir3": {
287 "deps": {},
288 "src": {
289 "e.rs": "",
290 "f.rs": "",
291 },
292 }
293 }),
294 )
295 .await;
296
297 // These symlinks point to directories outside of the worktree's root, dir1.
298 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
299 .await;
300 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
301 .await;
302
303 let tree = Worktree::local(
304 build_client(cx),
305 Path::new("/root/dir1"),
306 true,
307 fs.clone(),
308 Default::default(),
309 &mut cx.to_async(),
310 )
311 .await
312 .unwrap();
313
314 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
315 .await;
316
317 let tree_updates = Arc::new(Mutex::new(Vec::new()));
318 tree.update(cx, |_, cx| {
319 let tree_updates = tree_updates.clone();
320 cx.subscribe(&tree, move |_, _, event, _| {
321 if let Event::UpdatedEntries(update) = event {
322 tree_updates.lock().extend(
323 update
324 .iter()
325 .map(|(path, _, change)| (path.clone(), *change)),
326 );
327 }
328 })
329 .detach();
330 });
331
332 // The symlinked directories are not scanned by default.
333 tree.read_with(cx, |tree, _| {
334 assert_eq!(
335 tree.entries(true)
336 .map(|entry| (entry.path.as_ref(), entry.is_external))
337 .collect::<Vec<_>>(),
338 vec![
339 (Path::new(""), false),
340 (Path::new("deps"), false),
341 (Path::new("deps/dep-dir2"), true),
342 (Path::new("deps/dep-dir3"), true),
343 (Path::new("src"), false),
344 (Path::new("src/a.rs"), false),
345 (Path::new("src/b.rs"), false),
346 ]
347 );
348
349 assert_eq!(
350 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
351 EntryKind::UnloadedDir
352 );
353 });
354
355 // Expand one of the symlinked directories.
356 tree.read_with(cx, |tree, _| {
357 tree.as_local()
358 .unwrap()
359 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
360 })
361 .recv()
362 .await;
363
364 // The expanded directory's contents are loaded. Subdirectories are
365 // not scanned yet.
366 tree.read_with(cx, |tree, _| {
367 assert_eq!(
368 tree.entries(true)
369 .map(|entry| (entry.path.as_ref(), entry.is_external))
370 .collect::<Vec<_>>(),
371 vec![
372 (Path::new(""), false),
373 (Path::new("deps"), false),
374 (Path::new("deps/dep-dir2"), true),
375 (Path::new("deps/dep-dir3"), true),
376 (Path::new("deps/dep-dir3/deps"), true),
377 (Path::new("deps/dep-dir3/src"), true),
378 (Path::new("src"), false),
379 (Path::new("src/a.rs"), false),
380 (Path::new("src/b.rs"), false),
381 ]
382 );
383 });
384 assert_eq!(
385 mem::take(&mut *tree_updates.lock()),
386 &[
387 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
388 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
389 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
390 ]
391 );
392
393 // Expand a subdirectory of one of the symlinked directories.
394 tree.read_with(cx, |tree, _| {
395 tree.as_local()
396 .unwrap()
397 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
398 })
399 .recv()
400 .await;
401
402 // The expanded subdirectory's contents are loaded.
403 tree.read_with(cx, |tree, _| {
404 assert_eq!(
405 tree.entries(true)
406 .map(|entry| (entry.path.as_ref(), entry.is_external))
407 .collect::<Vec<_>>(),
408 vec![
409 (Path::new(""), false),
410 (Path::new("deps"), false),
411 (Path::new("deps/dep-dir2"), true),
412 (Path::new("deps/dep-dir3"), true),
413 (Path::new("deps/dep-dir3/deps"), true),
414 (Path::new("deps/dep-dir3/src"), true),
415 (Path::new("deps/dep-dir3/src/e.rs"), true),
416 (Path::new("deps/dep-dir3/src/f.rs"), true),
417 (Path::new("src"), false),
418 (Path::new("src/a.rs"), false),
419 (Path::new("src/b.rs"), false),
420 ]
421 );
422 });
423
424 assert_eq!(
425 mem::take(&mut *tree_updates.lock()),
426 &[
427 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
428 (
429 Path::new("deps/dep-dir3/src/e.rs").into(),
430 PathChange::Loaded
431 ),
432 (
433 Path::new("deps/dep-dir3/src/f.rs").into(),
434 PathChange::Loaded
435 )
436 ]
437 );
438}
439
440#[gpui::test]
441async fn test_open_gitignored_files(cx: &mut TestAppContext) {
442 let fs = FakeFs::new(cx.background());
443 fs.insert_tree(
444 "/root",
445 json!({
446 ".gitignore": "node_modules\n",
447 "one": {
448 "node_modules": {
449 "a": {
450 "a1.js": "a1",
451 "a2.js": "a2",
452 },
453 "b": {
454 "b1.js": "b1",
455 "b2.js": "b2",
456 },
457 "c": {
458 "c1.js": "c1",
459 "c2.js": "c2",
460 }
461 },
462 },
463 "two": {
464 "x.js": "",
465 "y.js": "",
466 },
467 }),
468 )
469 .await;
470
471 let tree = Worktree::local(
472 build_client(cx),
473 Path::new("/root"),
474 true,
475 fs.clone(),
476 Default::default(),
477 &mut cx.to_async(),
478 )
479 .await
480 .unwrap();
481
482 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
483 .await;
484
485 tree.read_with(cx, |tree, _| {
486 assert_eq!(
487 tree.entries(true)
488 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
489 .collect::<Vec<_>>(),
490 vec![
491 (Path::new(""), false),
492 (Path::new(".gitignore"), false),
493 (Path::new("one"), false),
494 (Path::new("one/node_modules"), true),
495 (Path::new("two"), false),
496 (Path::new("two/x.js"), false),
497 (Path::new("two/y.js"), false),
498 ]
499 );
500 });
501
502 // Open a file that is nested inside of a gitignored directory that
503 // has not yet been expanded.
504 let prev_read_dir_count = fs.read_dir_call_count();
505 let buffer = tree
506 .update(cx, |tree, cx| {
507 tree.as_local_mut()
508 .unwrap()
509 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
510 })
511 .await
512 .unwrap();
513
514 tree.read_with(cx, |tree, cx| {
515 assert_eq!(
516 tree.entries(true)
517 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
518 .collect::<Vec<_>>(),
519 vec![
520 (Path::new(""), false),
521 (Path::new(".gitignore"), false),
522 (Path::new("one"), false),
523 (Path::new("one/node_modules"), true),
524 (Path::new("one/node_modules/a"), true),
525 (Path::new("one/node_modules/b"), true),
526 (Path::new("one/node_modules/b/b1.js"), true),
527 (Path::new("one/node_modules/b/b2.js"), true),
528 (Path::new("one/node_modules/c"), true),
529 (Path::new("two"), false),
530 (Path::new("two/x.js"), false),
531 (Path::new("two/y.js"), false),
532 ]
533 );
534
535 assert_eq!(
536 buffer.read(cx).file().unwrap().path().as_ref(),
537 Path::new("one/node_modules/b/b1.js")
538 );
539
540 // Only the newly-expanded directories are scanned.
541 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
542 });
543
544 // Open another file in a different subdirectory of the same
545 // gitignored directory.
546 let prev_read_dir_count = fs.read_dir_call_count();
547 let buffer = tree
548 .update(cx, |tree, cx| {
549 tree.as_local_mut()
550 .unwrap()
551 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
552 })
553 .await
554 .unwrap();
555
556 tree.read_with(cx, |tree, cx| {
557 assert_eq!(
558 tree.entries(true)
559 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
560 .collect::<Vec<_>>(),
561 vec![
562 (Path::new(""), false),
563 (Path::new(".gitignore"), false),
564 (Path::new("one"), false),
565 (Path::new("one/node_modules"), true),
566 (Path::new("one/node_modules/a"), true),
567 (Path::new("one/node_modules/a/a1.js"), true),
568 (Path::new("one/node_modules/a/a2.js"), true),
569 (Path::new("one/node_modules/b"), true),
570 (Path::new("one/node_modules/b/b1.js"), true),
571 (Path::new("one/node_modules/b/b2.js"), true),
572 (Path::new("one/node_modules/c"), true),
573 (Path::new("two"), false),
574 (Path::new("two/x.js"), false),
575 (Path::new("two/y.js"), false),
576 ]
577 );
578
579 assert_eq!(
580 buffer.read(cx).file().unwrap().path().as_ref(),
581 Path::new("one/node_modules/a/a2.js")
582 );
583
584 // Only the newly-expanded directory is scanned.
585 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
586 });
587
588 // No work happens when files and directories change within an unloaded directory.
589 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
590 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
591 .await
592 .unwrap();
593 cx.foreground().run_until_parked();
594 assert_eq!(
595 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
596 0
597 );
598}
599
600#[gpui::test]
601async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
602 let fs = FakeFs::new(cx.background());
603 fs.insert_tree(
604 "/root",
605 json!({
606 ".gitignore": "node_modules\n",
607 "a": {
608 "a.js": "",
609 },
610 "b": {
611 "b.js": "",
612 },
613 "node_modules": {
614 "c": {
615 "c.js": "",
616 },
617 "d": {
618 "d.js": "",
619 "e": {
620 "e1.js": "",
621 "e2.js": "",
622 },
623 "f": {
624 "f1.js": "",
625 "f2.js": "",
626 }
627 },
628 },
629 }),
630 )
631 .await;
632
633 let tree = Worktree::local(
634 build_client(cx),
635 Path::new("/root"),
636 true,
637 fs.clone(),
638 Default::default(),
639 &mut cx.to_async(),
640 )
641 .await
642 .unwrap();
643
644 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
645 .await;
646
647 // Open a file within the gitignored directory, forcing some of its
648 // subdirectories to be read, but not all.
649 let read_dir_count_1 = fs.read_dir_call_count();
650 tree.read_with(cx, |tree, _| {
651 tree.as_local()
652 .unwrap()
653 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
654 })
655 .recv()
656 .await;
657
658 // Those subdirectories are now loaded.
659 tree.read_with(cx, |tree, _| {
660 assert_eq!(
661 tree.entries(true)
662 .map(|e| (e.path.as_ref(), e.is_ignored))
663 .collect::<Vec<_>>(),
664 &[
665 (Path::new(""), false),
666 (Path::new(".gitignore"), false),
667 (Path::new("a"), false),
668 (Path::new("a/a.js"), false),
669 (Path::new("b"), false),
670 (Path::new("b/b.js"), false),
671 (Path::new("node_modules"), true),
672 (Path::new("node_modules/c"), true),
673 (Path::new("node_modules/d"), true),
674 (Path::new("node_modules/d/d.js"), true),
675 (Path::new("node_modules/d/e"), true),
676 (Path::new("node_modules/d/f"), true),
677 ]
678 );
679 });
680 let read_dir_count_2 = fs.read_dir_call_count();
681 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
682
683 // Update the gitignore so that node_modules is no longer ignored,
684 // but a subdirectory is ignored
685 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
686 .await
687 .unwrap();
688 cx.foreground().run_until_parked();
689
690 // All of the directories that are no longer ignored are now loaded.
691 tree.read_with(cx, |tree, _| {
692 assert_eq!(
693 tree.entries(true)
694 .map(|e| (e.path.as_ref(), e.is_ignored))
695 .collect::<Vec<_>>(),
696 &[
697 (Path::new(""), false),
698 (Path::new(".gitignore"), false),
699 (Path::new("a"), false),
700 (Path::new("a/a.js"), false),
701 (Path::new("b"), false),
702 (Path::new("b/b.js"), false),
703 // This directory is no longer ignored
704 (Path::new("node_modules"), false),
705 (Path::new("node_modules/c"), false),
706 (Path::new("node_modules/c/c.js"), false),
707 (Path::new("node_modules/d"), false),
708 (Path::new("node_modules/d/d.js"), false),
709 // This subdirectory is now ignored
710 (Path::new("node_modules/d/e"), true),
711 (Path::new("node_modules/d/f"), false),
712 (Path::new("node_modules/d/f/f1.js"), false),
713 (Path::new("node_modules/d/f/f2.js"), false),
714 ]
715 );
716 });
717
718 // Each of the newly-loaded directories is scanned only once.
719 let read_dir_count_3 = fs.read_dir_call_count();
720 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
721}
722
723#[gpui::test(iterations = 10)]
724async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
725 let fs = FakeFs::new(cx.background());
726 fs.insert_tree(
727 "/root",
728 json!({
729 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
730 "tree": {
731 ".git": {},
732 ".gitignore": "ignored-dir\n",
733 "tracked-dir": {
734 "tracked-file1": "",
735 "ancestor-ignored-file1": "",
736 },
737 "ignored-dir": {
738 "ignored-file1": ""
739 }
740 }
741 }),
742 )
743 .await;
744
745 let tree = Worktree::local(
746 build_client(cx),
747 "/root/tree".as_ref(),
748 true,
749 fs.clone(),
750 Default::default(),
751 &mut cx.to_async(),
752 )
753 .await
754 .unwrap();
755 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
756 .await;
757
758 tree.read_with(cx, |tree, _| {
759 tree.as_local()
760 .unwrap()
761 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
762 })
763 .recv()
764 .await;
765
766 cx.read(|cx| {
767 let tree = tree.read(cx);
768 assert!(
769 !tree
770 .entry_for_path("tracked-dir/tracked-file1")
771 .unwrap()
772 .is_ignored
773 );
774 assert!(
775 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
776 .unwrap()
777 .is_ignored
778 );
779 assert!(
780 tree.entry_for_path("ignored-dir/ignored-file1")
781 .unwrap()
782 .is_ignored
783 );
784 });
785
786 fs.create_file(
787 "/root/tree/tracked-dir/tracked-file2".as_ref(),
788 Default::default(),
789 )
790 .await
791 .unwrap();
792 fs.create_file(
793 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
794 Default::default(),
795 )
796 .await
797 .unwrap();
798 fs.create_file(
799 "/root/tree/ignored-dir/ignored-file2".as_ref(),
800 Default::default(),
801 )
802 .await
803 .unwrap();
804
805 cx.foreground().run_until_parked();
806 cx.read(|cx| {
807 let tree = tree.read(cx);
808 assert!(
809 !tree
810 .entry_for_path("tracked-dir/tracked-file2")
811 .unwrap()
812 .is_ignored
813 );
814 assert!(
815 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
816 .unwrap()
817 .is_ignored
818 );
819 assert!(
820 tree.entry_for_path("ignored-dir/ignored-file2")
821 .unwrap()
822 .is_ignored
823 );
824 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
825 });
826}
827
828#[gpui::test]
829async fn test_write_file(cx: &mut TestAppContext) {
830 let dir = temp_tree(json!({
831 ".git": {},
832 ".gitignore": "ignored-dir\n",
833 "tracked-dir": {},
834 "ignored-dir": {}
835 }));
836
837 let tree = Worktree::local(
838 build_client(cx),
839 dir.path(),
840 true,
841 Arc::new(RealFs),
842 Default::default(),
843 &mut cx.to_async(),
844 )
845 .await
846 .unwrap();
847 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
848 .await;
849 tree.flush_fs_events(cx).await;
850
851 tree.update(cx, |tree, cx| {
852 tree.as_local().unwrap().write_file(
853 Path::new("tracked-dir/file.txt"),
854 "hello".into(),
855 Default::default(),
856 cx,
857 )
858 })
859 .await
860 .unwrap();
861 tree.update(cx, |tree, cx| {
862 tree.as_local().unwrap().write_file(
863 Path::new("ignored-dir/file.txt"),
864 "world".into(),
865 Default::default(),
866 cx,
867 )
868 })
869 .await
870 .unwrap();
871
872 tree.read_with(cx, |tree, _| {
873 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
874 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
875 assert!(!tracked.is_ignored);
876 assert!(ignored.is_ignored);
877 });
878}
879
880#[gpui::test(iterations = 30)]
881async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
882 let fs = FakeFs::new(cx.background());
883 fs.insert_tree(
884 "/root",
885 json!({
886 "b": {},
887 "c": {},
888 "d": {},
889 }),
890 )
891 .await;
892
893 let tree = Worktree::local(
894 build_client(cx),
895 "/root".as_ref(),
896 true,
897 fs,
898 Default::default(),
899 &mut cx.to_async(),
900 )
901 .await
902 .unwrap();
903
904 let snapshot1 = tree.update(cx, |tree, cx| {
905 let tree = tree.as_local_mut().unwrap();
906 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
907 let _ = tree.observe_updates(0, cx, {
908 let snapshot = snapshot.clone();
909 move |update| {
910 snapshot.lock().apply_remote_update(update).unwrap();
911 async { true }
912 }
913 });
914 snapshot
915 });
916
917 let entry = tree
918 .update(cx, |tree, cx| {
919 tree.as_local_mut()
920 .unwrap()
921 .create_entry("a/e".as_ref(), true, cx)
922 })
923 .await
924 .unwrap();
925 assert!(entry.is_dir());
926
927 cx.foreground().run_until_parked();
928 tree.read_with(cx, |tree, _| {
929 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
930 });
931
932 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
933 assert_eq!(
934 snapshot1.lock().entries(true).collect::<Vec<_>>(),
935 snapshot2.entries(true).collect::<Vec<_>>()
936 );
937}
938
939#[gpui::test]
940async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
941 let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
942
943 let fs_fake = FakeFs::new(cx.background());
944 fs_fake
945 .insert_tree(
946 "/root",
947 json!({
948 "a": {},
949 }),
950 )
951 .await;
952
953 let tree_fake = Worktree::local(
954 client_fake,
955 "/root".as_ref(),
956 true,
957 fs_fake,
958 Default::default(),
959 &mut cx.to_async(),
960 )
961 .await
962 .unwrap();
963
964 let entry = tree_fake
965 .update(cx, |tree, cx| {
966 tree.as_local_mut()
967 .unwrap()
968 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
969 })
970 .await
971 .unwrap();
972 assert!(entry.is_file());
973
974 cx.foreground().run_until_parked();
975 tree_fake.read_with(cx, |tree, _| {
976 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
977 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
978 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
979 });
980
981 let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
982
983 let fs_real = Arc::new(RealFs);
984 let temp_root = temp_tree(json!({
985 "a": {}
986 }));
987
988 let tree_real = Worktree::local(
989 client_real,
990 temp_root.path(),
991 true,
992 fs_real,
993 Default::default(),
994 &mut cx.to_async(),
995 )
996 .await
997 .unwrap();
998
999 let entry = tree_real
1000 .update(cx, |tree, cx| {
1001 tree.as_local_mut()
1002 .unwrap()
1003 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1004 })
1005 .await
1006 .unwrap();
1007 assert!(entry.is_file());
1008
1009 cx.foreground().run_until_parked();
1010 tree_real.read_with(cx, |tree, _| {
1011 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1012 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1013 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1014 });
1015}
1016
1017#[gpui::test(iterations = 100)]
1018async fn test_random_worktree_operations_during_initial_scan(
1019 cx: &mut TestAppContext,
1020 mut rng: StdRng,
1021) {
1022 let operations = env::var("OPERATIONS")
1023 .map(|o| o.parse().unwrap())
1024 .unwrap_or(5);
1025 let initial_entries = env::var("INITIAL_ENTRIES")
1026 .map(|o| o.parse().unwrap())
1027 .unwrap_or(20);
1028
1029 let root_dir = Path::new("/test");
1030 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1031 fs.as_fake().insert_tree(root_dir, json!({})).await;
1032 for _ in 0..initial_entries {
1033 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1034 }
1035 log::info!("generated initial tree");
1036
1037 let worktree = Worktree::local(
1038 build_client(cx),
1039 root_dir,
1040 true,
1041 fs.clone(),
1042 Default::default(),
1043 &mut cx.to_async(),
1044 )
1045 .await
1046 .unwrap();
1047
1048 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1049 let updates = Arc::new(Mutex::new(Vec::new()));
1050 worktree.update(cx, |tree, cx| {
1051 check_worktree_change_events(tree, cx);
1052
1053 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1054 let updates = updates.clone();
1055 move |update| {
1056 updates.lock().push(update);
1057 async { true }
1058 }
1059 });
1060 });
1061
1062 for _ in 0..operations {
1063 worktree
1064 .update(cx, |worktree, cx| {
1065 randomly_mutate_worktree(worktree, &mut rng, cx)
1066 })
1067 .await
1068 .log_err();
1069 worktree.read_with(cx, |tree, _| {
1070 tree.as_local().unwrap().snapshot().check_invariants(true)
1071 });
1072
1073 if rng.gen_bool(0.6) {
1074 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1075 }
1076 }
1077
1078 worktree
1079 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1080 .await;
1081
1082 cx.foreground().run_until_parked();
1083
1084 let final_snapshot = worktree.read_with(cx, |tree, _| {
1085 let tree = tree.as_local().unwrap();
1086 let snapshot = tree.snapshot();
1087 snapshot.check_invariants(true);
1088 snapshot
1089 });
1090
1091 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1092 let mut updated_snapshot = snapshot.clone();
1093 for update in updates.lock().iter() {
1094 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1095 updated_snapshot
1096 .apply_remote_update(update.clone())
1097 .unwrap();
1098 }
1099 }
1100
1101 assert_eq!(
1102 updated_snapshot.entries(true).collect::<Vec<_>>(),
1103 final_snapshot.entries(true).collect::<Vec<_>>(),
1104 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1105 );
1106 }
1107}
1108
1109#[gpui::test(iterations = 100)]
1110async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1111 let operations = env::var("OPERATIONS")
1112 .map(|o| o.parse().unwrap())
1113 .unwrap_or(40);
1114 let initial_entries = env::var("INITIAL_ENTRIES")
1115 .map(|o| o.parse().unwrap())
1116 .unwrap_or(20);
1117
1118 let root_dir = Path::new("/test");
1119 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1120 fs.as_fake().insert_tree(root_dir, json!({})).await;
1121 for _ in 0..initial_entries {
1122 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1123 }
1124 log::info!("generated initial tree");
1125
1126 let worktree = Worktree::local(
1127 build_client(cx),
1128 root_dir,
1129 true,
1130 fs.clone(),
1131 Default::default(),
1132 &mut cx.to_async(),
1133 )
1134 .await
1135 .unwrap();
1136
1137 let updates = Arc::new(Mutex::new(Vec::new()));
1138 worktree.update(cx, |tree, cx| {
1139 check_worktree_change_events(tree, cx);
1140
1141 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1142 let updates = updates.clone();
1143 move |update| {
1144 updates.lock().push(update);
1145 async { true }
1146 }
1147 });
1148 });
1149
1150 worktree
1151 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1152 .await;
1153
1154 fs.as_fake().pause_events();
1155 let mut snapshots = Vec::new();
1156 let mut mutations_len = operations;
1157 while mutations_len > 1 {
1158 if rng.gen_bool(0.2) {
1159 worktree
1160 .update(cx, |worktree, cx| {
1161 randomly_mutate_worktree(worktree, &mut rng, cx)
1162 })
1163 .await
1164 .log_err();
1165 } else {
1166 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1167 }
1168
1169 let buffered_event_count = fs.as_fake().buffered_event_count();
1170 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1171 let len = rng.gen_range(0..=buffered_event_count);
1172 log::info!("flushing {} events", len);
1173 fs.as_fake().flush_events(len);
1174 } else {
1175 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1176 mutations_len -= 1;
1177 }
1178
1179 cx.foreground().run_until_parked();
1180 if rng.gen_bool(0.2) {
1181 log::info!("storing snapshot {}", snapshots.len());
1182 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1183 snapshots.push(snapshot);
1184 }
1185 }
1186
1187 log::info!("quiescing");
1188 fs.as_fake().flush_events(usize::MAX);
1189 cx.foreground().run_until_parked();
1190
1191 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1192 snapshot.check_invariants(true);
1193 let expanded_paths = snapshot
1194 .expanded_entries()
1195 .map(|e| e.path.clone())
1196 .collect::<Vec<_>>();
1197
1198 {
1199 let new_worktree = Worktree::local(
1200 build_client(cx),
1201 root_dir,
1202 true,
1203 fs.clone(),
1204 Default::default(),
1205 &mut cx.to_async(),
1206 )
1207 .await
1208 .unwrap();
1209 new_worktree
1210 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1211 .await;
1212 new_worktree
1213 .update(cx, |tree, _| {
1214 tree.as_local_mut()
1215 .unwrap()
1216 .refresh_entries_for_paths(expanded_paths)
1217 })
1218 .recv()
1219 .await;
1220 let new_snapshot =
1221 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1222 assert_eq!(
1223 snapshot.entries_without_ids(true),
1224 new_snapshot.entries_without_ids(true)
1225 );
1226 }
1227
1228 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1229 for update in updates.lock().iter() {
1230 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1231 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1232 }
1233 }
1234
1235 assert_eq!(
1236 prev_snapshot
1237 .entries(true)
1238 .map(ignore_pending_dir)
1239 .collect::<Vec<_>>(),
1240 snapshot
1241 .entries(true)
1242 .map(ignore_pending_dir)
1243 .collect::<Vec<_>>(),
1244 "wrong updates after snapshot {i}: {updates:#?}",
1245 );
1246 }
1247
1248 fn ignore_pending_dir(entry: &Entry) -> Entry {
1249 let mut entry = entry.clone();
1250 if entry.kind.is_dir() {
1251 entry.kind = EntryKind::Dir
1252 }
1253 entry
1254 }
1255}
1256
1257// The worktree's `UpdatedEntries` event can be used to follow along with
1258// all changes to the worktree's snapshot.
1259fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1260 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1261 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1262 if let Event::UpdatedEntries(changes) = event {
1263 for (path, _, change_type) in changes.iter() {
1264 let entry = tree.entry_for_path(&path).cloned();
1265 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1266 Ok(ix) | Err(ix) => ix,
1267 };
1268 match change_type {
1269 PathChange::Added => entries.insert(ix, entry.unwrap()),
1270 PathChange::Removed => drop(entries.remove(ix)),
1271 PathChange::Updated => {
1272 let entry = entry.unwrap();
1273 let existing_entry = entries.get_mut(ix).unwrap();
1274 assert_eq!(existing_entry.path, entry.path);
1275 *existing_entry = entry;
1276 }
1277 PathChange::AddedOrUpdated | PathChange::Loaded => {
1278 let entry = entry.unwrap();
1279 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1280 *entries.get_mut(ix).unwrap() = entry;
1281 } else {
1282 entries.insert(ix, entry);
1283 }
1284 }
1285 }
1286 }
1287
1288 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1289 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1290 }
1291 })
1292 .detach();
1293}
1294
1295fn randomly_mutate_worktree(
1296 worktree: &mut Worktree,
1297 rng: &mut impl Rng,
1298 cx: &mut ModelContext<Worktree>,
1299) -> Task<Result<()>> {
1300 log::info!("mutating worktree");
1301 let worktree = worktree.as_local_mut().unwrap();
1302 let snapshot = worktree.snapshot();
1303 let entry = snapshot.entries(false).choose(rng).unwrap();
1304
1305 match rng.gen_range(0_u32..100) {
1306 0..=33 if entry.path.as_ref() != Path::new("") => {
1307 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1308 worktree.delete_entry(entry.id, cx).unwrap()
1309 }
1310 ..=66 if entry.path.as_ref() != Path::new("") => {
1311 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1312 let new_parent_path = if other_entry.is_dir() {
1313 other_entry.path.clone()
1314 } else {
1315 other_entry.path.parent().unwrap().into()
1316 };
1317 let mut new_path = new_parent_path.join(random_filename(rng));
1318 if new_path.starts_with(&entry.path) {
1319 new_path = random_filename(rng).into();
1320 }
1321
1322 log::info!(
1323 "renaming entry {:?} ({}) to {:?}",
1324 entry.path,
1325 entry.id.0,
1326 new_path
1327 );
1328 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1329 cx.foreground().spawn(async move {
1330 task.await?;
1331 Ok(())
1332 })
1333 }
1334 _ => {
1335 let task = if entry.is_dir() {
1336 let child_path = entry.path.join(random_filename(rng));
1337 let is_dir = rng.gen_bool(0.3);
1338 log::info!(
1339 "creating {} at {:?}",
1340 if is_dir { "dir" } else { "file" },
1341 child_path,
1342 );
1343 worktree.create_entry(child_path, is_dir, cx)
1344 } else {
1345 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1346 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1347 };
1348 cx.foreground().spawn(async move {
1349 task.await?;
1350 Ok(())
1351 })
1352 }
1353 }
1354}
1355
1356async fn randomly_mutate_fs(
1357 fs: &Arc<dyn Fs>,
1358 root_path: &Path,
1359 insertion_probability: f64,
1360 rng: &mut impl Rng,
1361) {
1362 log::info!("mutating fs");
1363 let mut files = Vec::new();
1364 let mut dirs = Vec::new();
1365 for path in fs.as_fake().paths(false) {
1366 if path.starts_with(root_path) {
1367 if fs.is_file(&path).await {
1368 files.push(path);
1369 } else {
1370 dirs.push(path);
1371 }
1372 }
1373 }
1374
1375 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1376 let path = dirs.choose(rng).unwrap();
1377 let new_path = path.join(random_filename(rng));
1378
1379 if rng.gen() {
1380 log::info!(
1381 "creating dir {:?}",
1382 new_path.strip_prefix(root_path).unwrap()
1383 );
1384 fs.create_dir(&new_path).await.unwrap();
1385 } else {
1386 log::info!(
1387 "creating file {:?}",
1388 new_path.strip_prefix(root_path).unwrap()
1389 );
1390 fs.create_file(&new_path, Default::default()).await.unwrap();
1391 }
1392 } else if rng.gen_bool(0.05) {
1393 let ignore_dir_path = dirs.choose(rng).unwrap();
1394 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1395
1396 let subdirs = dirs
1397 .iter()
1398 .filter(|d| d.starts_with(&ignore_dir_path))
1399 .cloned()
1400 .collect::<Vec<_>>();
1401 let subfiles = files
1402 .iter()
1403 .filter(|d| d.starts_with(&ignore_dir_path))
1404 .cloned()
1405 .collect::<Vec<_>>();
1406 let files_to_ignore = {
1407 let len = rng.gen_range(0..=subfiles.len());
1408 subfiles.choose_multiple(rng, len)
1409 };
1410 let dirs_to_ignore = {
1411 let len = rng.gen_range(0..subdirs.len());
1412 subdirs.choose_multiple(rng, len)
1413 };
1414
1415 let mut ignore_contents = String::new();
1416 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1417 writeln!(
1418 ignore_contents,
1419 "{}",
1420 path_to_ignore
1421 .strip_prefix(&ignore_dir_path)
1422 .unwrap()
1423 .to_str()
1424 .unwrap()
1425 )
1426 .unwrap();
1427 }
1428 log::info!(
1429 "creating gitignore {:?} with contents:\n{}",
1430 ignore_path.strip_prefix(&root_path).unwrap(),
1431 ignore_contents
1432 );
1433 fs.save(
1434 &ignore_path,
1435 &ignore_contents.as_str().into(),
1436 Default::default(),
1437 )
1438 .await
1439 .unwrap();
1440 } else {
1441 let old_path = {
1442 let file_path = files.choose(rng);
1443 let dir_path = dirs[1..].choose(rng);
1444 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1445 };
1446
1447 let is_rename = rng.gen();
1448 if is_rename {
1449 let new_path_parent = dirs
1450 .iter()
1451 .filter(|d| !d.starts_with(old_path))
1452 .choose(rng)
1453 .unwrap();
1454
1455 let overwrite_existing_dir =
1456 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1457 let new_path = if overwrite_existing_dir {
1458 fs.remove_dir(
1459 &new_path_parent,
1460 RemoveOptions {
1461 recursive: true,
1462 ignore_if_not_exists: true,
1463 },
1464 )
1465 .await
1466 .unwrap();
1467 new_path_parent.to_path_buf()
1468 } else {
1469 new_path_parent.join(random_filename(rng))
1470 };
1471
1472 log::info!(
1473 "renaming {:?} to {}{:?}",
1474 old_path.strip_prefix(&root_path).unwrap(),
1475 if overwrite_existing_dir {
1476 "overwrite "
1477 } else {
1478 ""
1479 },
1480 new_path.strip_prefix(&root_path).unwrap()
1481 );
1482 fs.rename(
1483 &old_path,
1484 &new_path,
1485 fs::RenameOptions {
1486 overwrite: true,
1487 ignore_if_exists: true,
1488 },
1489 )
1490 .await
1491 .unwrap();
1492 } else if fs.is_file(&old_path).await {
1493 log::info!(
1494 "deleting file {:?}",
1495 old_path.strip_prefix(&root_path).unwrap()
1496 );
1497 fs.remove_file(old_path, Default::default()).await.unwrap();
1498 } else {
1499 log::info!(
1500 "deleting dir {:?}",
1501 old_path.strip_prefix(&root_path).unwrap()
1502 );
1503 fs.remove_dir(
1504 &old_path,
1505 RemoveOptions {
1506 recursive: true,
1507 ignore_if_not_exists: true,
1508 },
1509 )
1510 .await
1511 .unwrap();
1512 }
1513 }
1514}
1515
1516fn random_filename(rng: &mut impl Rng) -> String {
1517 (0..6)
1518 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1519 .map(char::from)
1520 .collect()
1521}
1522
1523#[gpui::test]
1524async fn test_rename_work_directory(cx: &mut TestAppContext) {
1525 let root = temp_tree(json!({
1526 "projects": {
1527 "project1": {
1528 "a": "",
1529 "b": "",
1530 }
1531 },
1532
1533 }));
1534 let root_path = root.path();
1535
1536 let tree = Worktree::local(
1537 build_client(cx),
1538 root_path,
1539 true,
1540 Arc::new(RealFs),
1541 Default::default(),
1542 &mut cx.to_async(),
1543 )
1544 .await
1545 .unwrap();
1546
1547 let repo = git_init(&root_path.join("projects/project1"));
1548 git_add("a", &repo);
1549 git_commit("init", &repo);
1550 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1551
1552 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1553 .await;
1554
1555 tree.flush_fs_events(cx).await;
1556
1557 cx.read(|cx| {
1558 let tree = tree.read(cx);
1559 let (work_dir, _) = tree.repositories().next().unwrap();
1560 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1561 assert_eq!(
1562 tree.status_for_file(Path::new("projects/project1/a")),
1563 Some(GitFileStatus::Modified)
1564 );
1565 assert_eq!(
1566 tree.status_for_file(Path::new("projects/project1/b")),
1567 Some(GitFileStatus::Added)
1568 );
1569 });
1570
1571 std::fs::rename(
1572 root_path.join("projects/project1"),
1573 root_path.join("projects/project2"),
1574 )
1575 .ok();
1576 tree.flush_fs_events(cx).await;
1577
1578 cx.read(|cx| {
1579 let tree = tree.read(cx);
1580 let (work_dir, _) = tree.repositories().next().unwrap();
1581 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1582 assert_eq!(
1583 tree.status_for_file(Path::new("projects/project2/a")),
1584 Some(GitFileStatus::Modified)
1585 );
1586 assert_eq!(
1587 tree.status_for_file(Path::new("projects/project2/b")),
1588 Some(GitFileStatus::Added)
1589 );
1590 });
1591}
1592
1593#[gpui::test]
1594async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1595 let root = temp_tree(json!({
1596 "c.txt": "",
1597 "dir1": {
1598 ".git": {},
1599 "deps": {
1600 "dep1": {
1601 ".git": {},
1602 "src": {
1603 "a.txt": ""
1604 }
1605 }
1606 },
1607 "src": {
1608 "b.txt": ""
1609 }
1610 },
1611 }));
1612
1613 let tree = Worktree::local(
1614 build_client(cx),
1615 root.path(),
1616 true,
1617 Arc::new(RealFs),
1618 Default::default(),
1619 &mut cx.to_async(),
1620 )
1621 .await
1622 .unwrap();
1623
1624 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1625 .await;
1626 tree.flush_fs_events(cx).await;
1627
1628 tree.read_with(cx, |tree, _cx| {
1629 let tree = tree.as_local().unwrap();
1630
1631 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1632
1633 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1634 assert_eq!(
1635 entry
1636 .work_directory(tree)
1637 .map(|directory| directory.as_ref().to_owned()),
1638 Some(Path::new("dir1").to_owned())
1639 );
1640
1641 let entry = tree
1642 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1643 .unwrap();
1644 assert_eq!(
1645 entry
1646 .work_directory(tree)
1647 .map(|directory| directory.as_ref().to_owned()),
1648 Some(Path::new("dir1/deps/dep1").to_owned())
1649 );
1650
1651 let entries = tree.files(false, 0);
1652
1653 let paths_with_repos = tree
1654 .entries_with_repositories(entries)
1655 .map(|(entry, repo)| {
1656 (
1657 entry.path.as_ref(),
1658 repo.and_then(|repo| {
1659 repo.work_directory(&tree)
1660 .map(|work_directory| work_directory.0.to_path_buf())
1661 }),
1662 )
1663 })
1664 .collect::<Vec<_>>();
1665
1666 assert_eq!(
1667 paths_with_repos,
1668 &[
1669 (Path::new("c.txt"), None),
1670 (
1671 Path::new("dir1/deps/dep1/src/a.txt"),
1672 Some(Path::new("dir1/deps/dep1").into())
1673 ),
1674 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1675 ]
1676 );
1677 });
1678
1679 let repo_update_events = Arc::new(Mutex::new(vec![]));
1680 tree.update(cx, |_, cx| {
1681 let repo_update_events = repo_update_events.clone();
1682 cx.subscribe(&tree, move |_, _, event, _| {
1683 if let Event::UpdatedGitRepositories(update) = event {
1684 repo_update_events.lock().push(update.clone());
1685 }
1686 })
1687 .detach();
1688 });
1689
1690 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1691 tree.flush_fs_events(cx).await;
1692
1693 assert_eq!(
1694 repo_update_events.lock()[0]
1695 .iter()
1696 .map(|e| e.0.clone())
1697 .collect::<Vec<Arc<Path>>>(),
1698 vec![Path::new("dir1").into()]
1699 );
1700
1701 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1702 tree.flush_fs_events(cx).await;
1703
1704 tree.read_with(cx, |tree, _cx| {
1705 let tree = tree.as_local().unwrap();
1706
1707 assert!(tree
1708 .repository_for_path("dir1/src/b.txt".as_ref())
1709 .is_none());
1710 });
1711}
1712
1713#[gpui::test]
1714async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1715 const IGNORE_RULE: &'static str = "**/target";
1716
1717 let root = temp_tree(json!({
1718 "project": {
1719 "a.txt": "a",
1720 "b.txt": "bb",
1721 "c": {
1722 "d": {
1723 "e.txt": "eee"
1724 }
1725 },
1726 "f.txt": "ffff",
1727 "target": {
1728 "build_file": "???"
1729 },
1730 ".gitignore": IGNORE_RULE
1731 },
1732
1733 }));
1734
1735 let tree = Worktree::local(
1736 build_client(cx),
1737 root.path(),
1738 true,
1739 Arc::new(RealFs),
1740 Default::default(),
1741 &mut cx.to_async(),
1742 )
1743 .await
1744 .unwrap();
1745
1746 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1747 .await;
1748
1749 const A_TXT: &'static str = "a.txt";
1750 const B_TXT: &'static str = "b.txt";
1751 const E_TXT: &'static str = "c/d/e.txt";
1752 const F_TXT: &'static str = "f.txt";
1753 const DOTGITIGNORE: &'static str = ".gitignore";
1754 const BUILD_FILE: &'static str = "target/build_file";
1755 let project_path: &Path = &Path::new("project");
1756
1757 let work_dir = root.path().join("project");
1758 let mut repo = git_init(work_dir.as_path());
1759 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1760 git_add(Path::new(A_TXT), &repo);
1761 git_add(Path::new(E_TXT), &repo);
1762 git_add(Path::new(DOTGITIGNORE), &repo);
1763 git_commit("Initial commit", &repo);
1764
1765 tree.flush_fs_events(cx).await;
1766 deterministic.run_until_parked();
1767
1768 // Check that the right git state is observed on startup
1769 tree.read_with(cx, |tree, _cx| {
1770 let snapshot = tree.snapshot();
1771 assert_eq!(snapshot.repositories().count(), 1);
1772 let (dir, _) = snapshot.repositories().next().unwrap();
1773 assert_eq!(dir.as_ref(), Path::new("project"));
1774
1775 assert_eq!(
1776 snapshot.status_for_file(project_path.join(B_TXT)),
1777 Some(GitFileStatus::Added)
1778 );
1779 assert_eq!(
1780 snapshot.status_for_file(project_path.join(F_TXT)),
1781 Some(GitFileStatus::Added)
1782 );
1783 });
1784
1785 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1786
1787 tree.flush_fs_events(cx).await;
1788 deterministic.run_until_parked();
1789
1790 tree.read_with(cx, |tree, _cx| {
1791 let snapshot = tree.snapshot();
1792
1793 assert_eq!(
1794 snapshot.status_for_file(project_path.join(A_TXT)),
1795 Some(GitFileStatus::Modified)
1796 );
1797 });
1798
1799 git_add(Path::new(A_TXT), &repo);
1800 git_add(Path::new(B_TXT), &repo);
1801 git_commit("Committing modified and added", &repo);
1802 tree.flush_fs_events(cx).await;
1803 deterministic.run_until_parked();
1804
1805 // Check that repo only changes are tracked
1806 tree.read_with(cx, |tree, _cx| {
1807 let snapshot = tree.snapshot();
1808
1809 assert_eq!(
1810 snapshot.status_for_file(project_path.join(F_TXT)),
1811 Some(GitFileStatus::Added)
1812 );
1813
1814 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1815 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1816 });
1817
1818 git_reset(0, &repo);
1819 git_remove_index(Path::new(B_TXT), &repo);
1820 git_stash(&mut repo);
1821 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1822 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1823 tree.flush_fs_events(cx).await;
1824 deterministic.run_until_parked();
1825
1826 // Check that more complex repo changes are tracked
1827 tree.read_with(cx, |tree, _cx| {
1828 let snapshot = tree.snapshot();
1829
1830 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1831 assert_eq!(
1832 snapshot.status_for_file(project_path.join(B_TXT)),
1833 Some(GitFileStatus::Added)
1834 );
1835 assert_eq!(
1836 snapshot.status_for_file(project_path.join(E_TXT)),
1837 Some(GitFileStatus::Modified)
1838 );
1839 });
1840
1841 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1842 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1843 std::fs::write(
1844 work_dir.join(DOTGITIGNORE),
1845 [IGNORE_RULE, "f.txt"].join("\n"),
1846 )
1847 .unwrap();
1848
1849 git_add(Path::new(DOTGITIGNORE), &repo);
1850 git_commit("Committing modified git ignore", &repo);
1851
1852 tree.flush_fs_events(cx).await;
1853 deterministic.run_until_parked();
1854
1855 let mut renamed_dir_name = "first_directory/second_directory";
1856 const RENAMED_FILE: &'static str = "rf.txt";
1857
1858 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
1859 std::fs::write(
1860 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
1861 "new-contents",
1862 )
1863 .unwrap();
1864
1865 tree.flush_fs_events(cx).await;
1866 deterministic.run_until_parked();
1867
1868 tree.read_with(cx, |tree, _cx| {
1869 let snapshot = tree.snapshot();
1870 assert_eq!(
1871 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
1872 Some(GitFileStatus::Added)
1873 );
1874 });
1875
1876 renamed_dir_name = "new_first_directory/second_directory";
1877
1878 std::fs::rename(
1879 work_dir.join("first_directory"),
1880 work_dir.join("new_first_directory"),
1881 )
1882 .unwrap();
1883
1884 tree.flush_fs_events(cx).await;
1885 deterministic.run_until_parked();
1886
1887 tree.read_with(cx, |tree, _cx| {
1888 let snapshot = tree.snapshot();
1889
1890 assert_eq!(
1891 snapshot.status_for_file(
1892 project_path
1893 .join(Path::new(renamed_dir_name))
1894 .join(RENAMED_FILE)
1895 ),
1896 Some(GitFileStatus::Added)
1897 );
1898 });
1899}
1900
1901#[gpui::test]
1902async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
1903 let fs = FakeFs::new(cx.background());
1904 fs.insert_tree(
1905 "/root",
1906 json!({
1907 ".git": {},
1908 "a": {
1909 "b": {
1910 "c1.txt": "",
1911 "c2.txt": "",
1912 },
1913 "d": {
1914 "e1.txt": "",
1915 "e2.txt": "",
1916 "e3.txt": "",
1917 }
1918 },
1919 "f": {
1920 "no-status.txt": ""
1921 },
1922 "g": {
1923 "h1.txt": "",
1924 "h2.txt": ""
1925 },
1926
1927 }),
1928 )
1929 .await;
1930
1931 fs.set_status_for_repo_via_git_operation(
1932 &Path::new("/root/.git"),
1933 &[
1934 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
1935 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
1936 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
1937 ],
1938 );
1939
1940 let tree = Worktree::local(
1941 build_client(cx),
1942 Path::new("/root"),
1943 true,
1944 fs.clone(),
1945 Default::default(),
1946 &mut cx.to_async(),
1947 )
1948 .await
1949 .unwrap();
1950
1951 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1952 .await;
1953
1954 cx.foreground().run_until_parked();
1955 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
1956
1957 check_propagated_statuses(
1958 &snapshot,
1959 &[
1960 (Path::new(""), Some(GitFileStatus::Conflict)),
1961 (Path::new("a"), Some(GitFileStatus::Modified)),
1962 (Path::new("a/b"), Some(GitFileStatus::Added)),
1963 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1964 (Path::new("a/b/c2.txt"), None),
1965 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1966 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1967 (Path::new("f"), None),
1968 (Path::new("f/no-status.txt"), None),
1969 (Path::new("g"), Some(GitFileStatus::Conflict)),
1970 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
1971 ],
1972 );
1973
1974 check_propagated_statuses(
1975 &snapshot,
1976 &[
1977 (Path::new("a/b"), Some(GitFileStatus::Added)),
1978 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1979 (Path::new("a/b/c2.txt"), None),
1980 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1981 (Path::new("a/d/e1.txt"), None),
1982 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1983 (Path::new("f"), None),
1984 (Path::new("f/no-status.txt"), None),
1985 (Path::new("g"), Some(GitFileStatus::Conflict)),
1986 ],
1987 );
1988
1989 check_propagated_statuses(
1990 &snapshot,
1991 &[
1992 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1993 (Path::new("a/b/c2.txt"), None),
1994 (Path::new("a/d/e1.txt"), None),
1995 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1996 (Path::new("f/no-status.txt"), None),
1997 ],
1998 );
1999
2000 #[track_caller]
2001 fn check_propagated_statuses(
2002 snapshot: &Snapshot,
2003 expected_statuses: &[(&Path, Option<GitFileStatus>)],
2004 ) {
2005 let mut entries = expected_statuses
2006 .iter()
2007 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
2008 .collect::<Vec<_>>();
2009 snapshot.propagate_git_statuses(&mut entries);
2010 assert_eq!(
2011 entries
2012 .iter()
2013 .map(|e| (e.path.as_ref(), e.git_status))
2014 .collect::<Vec<_>>(),
2015 expected_statuses
2016 );
2017 }
2018}
2019
2020fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
2021 let http_client = FakeHttpClient::with_404_response();
2022 cx.read(|cx| Client::new(http_client, cx))
2023}
2024
2025#[track_caller]
2026fn git_init(path: &Path) -> git2::Repository {
2027 git2::Repository::init(path).expect("Failed to initialize git repository")
2028}
2029
2030#[track_caller]
2031fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
2032 let path = path.as_ref();
2033 let mut index = repo.index().expect("Failed to get index");
2034 index.add_path(path).expect("Failed to add a.txt");
2035 index.write().expect("Failed to write index");
2036}
2037
2038#[track_caller]
2039fn git_remove_index(path: &Path, repo: &git2::Repository) {
2040 let mut index = repo.index().expect("Failed to get index");
2041 index.remove_path(path).expect("Failed to add a.txt");
2042 index.write().expect("Failed to write index");
2043}
2044
2045#[track_caller]
2046fn git_commit(msg: &'static str, repo: &git2::Repository) {
2047 use git2::Signature;
2048
2049 let signature = Signature::now("test", "test@zed.dev").unwrap();
2050 let oid = repo.index().unwrap().write_tree().unwrap();
2051 let tree = repo.find_tree(oid).unwrap();
2052 if let Some(head) = repo.head().ok() {
2053 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
2054
2055 let parent_commit = parent_obj.as_commit().unwrap();
2056
2057 repo.commit(
2058 Some("HEAD"),
2059 &signature,
2060 &signature,
2061 msg,
2062 &tree,
2063 &[parent_commit],
2064 )
2065 .expect("Failed to commit with parent");
2066 } else {
2067 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
2068 .expect("Failed to commit");
2069 }
2070}
2071
2072#[track_caller]
2073fn git_stash(repo: &mut git2::Repository) {
2074 use git2::Signature;
2075
2076 let signature = Signature::now("test", "test@zed.dev").unwrap();
2077 repo.stash_save(&signature, "N/A", None)
2078 .expect("Failed to stash");
2079}
2080
2081#[track_caller]
2082fn git_reset(offset: usize, repo: &git2::Repository) {
2083 let head = repo.head().expect("Couldn't get repo head");
2084 let object = head.peel(git2::ObjectType::Commit).unwrap();
2085 let commit = object.as_commit().unwrap();
2086 let new_head = commit
2087 .parents()
2088 .inspect(|parnet| {
2089 parnet.message();
2090 })
2091 .skip(offset)
2092 .next()
2093 .expect("Not enough history");
2094 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2095 .expect("Could not reset");
2096}
2097
2098#[allow(dead_code)]
2099#[track_caller]
2100fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2101 repo.statuses(None)
2102 .unwrap()
2103 .iter()
2104 .map(|status| (status.path().unwrap().to_string(), status.status()))
2105 .collect()
2106}