1use crate::{
2 worktree::{Event, Snapshot, WorktreeModelHandle},
3 Entry, EntryKind, PathChange, Worktree,
4};
5use anyhow::Result;
6use client::Client;
7use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
8use git::GITIGNORE;
9use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
10use parking_lot::Mutex;
11use postage::stream::Stream;
12use pretty_assertions::assert_eq;
13use rand::prelude::*;
14use serde_json::json;
15use std::{
16 env,
17 fmt::Write,
18 mem,
19 path::{Path, PathBuf},
20 sync::Arc,
21};
22use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
23
24#[gpui::test]
25async fn test_traversal(cx: &mut TestAppContext) {
26 let fs = FakeFs::new(cx.background());
27 fs.insert_tree(
28 "/root",
29 json!({
30 ".gitignore": "a/b\n",
31 "a": {
32 "b": "",
33 "c": "",
34 }
35 }),
36 )
37 .await;
38
39 let tree = Worktree::local(
40 build_client(cx),
41 Path::new("/root"),
42 true,
43 fs,
44 Default::default(),
45 &mut cx.to_async(),
46 )
47 .await
48 .unwrap();
49 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
50 .await;
51
52 tree.read_with(cx, |tree, _| {
53 assert_eq!(
54 tree.entries(false)
55 .map(|entry| entry.path.as_ref())
56 .collect::<Vec<_>>(),
57 vec![
58 Path::new(""),
59 Path::new(".gitignore"),
60 Path::new("a"),
61 Path::new("a/c"),
62 ]
63 );
64 assert_eq!(
65 tree.entries(true)
66 .map(|entry| entry.path.as_ref())
67 .collect::<Vec<_>>(),
68 vec![
69 Path::new(""),
70 Path::new(".gitignore"),
71 Path::new("a"),
72 Path::new("a/b"),
73 Path::new("a/c"),
74 ]
75 );
76 })
77}
78
79#[gpui::test]
80async fn test_descendent_entries(cx: &mut TestAppContext) {
81 let fs = FakeFs::new(cx.background());
82 fs.insert_tree(
83 "/root",
84 json!({
85 "a": "",
86 "b": {
87 "c": {
88 "d": ""
89 },
90 "e": {}
91 },
92 "f": "",
93 "g": {
94 "h": {}
95 },
96 "i": {
97 "j": {
98 "k": ""
99 },
100 "l": {
101
102 }
103 },
104 ".gitignore": "i/j\n",
105 }),
106 )
107 .await;
108
109 let tree = Worktree::local(
110 build_client(cx),
111 Path::new("/root"),
112 true,
113 fs,
114 Default::default(),
115 &mut cx.to_async(),
116 )
117 .await
118 .unwrap();
119 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
120 .await;
121
122 tree.read_with(cx, |tree, _| {
123 assert_eq!(
124 tree.descendent_entries(false, false, Path::new("b"))
125 .map(|entry| entry.path.as_ref())
126 .collect::<Vec<_>>(),
127 vec![Path::new("b/c/d"),]
128 );
129 assert_eq!(
130 tree.descendent_entries(true, false, Path::new("b"))
131 .map(|entry| entry.path.as_ref())
132 .collect::<Vec<_>>(),
133 vec![
134 Path::new("b"),
135 Path::new("b/c"),
136 Path::new("b/c/d"),
137 Path::new("b/e"),
138 ]
139 );
140
141 assert_eq!(
142 tree.descendent_entries(false, false, Path::new("g"))
143 .map(|entry| entry.path.as_ref())
144 .collect::<Vec<_>>(),
145 Vec::<PathBuf>::new()
146 );
147 assert_eq!(
148 tree.descendent_entries(true, false, Path::new("g"))
149 .map(|entry| entry.path.as_ref())
150 .collect::<Vec<_>>(),
151 vec![Path::new("g"), Path::new("g/h"),]
152 );
153 });
154
155 // Expand gitignored directory.
156 tree.read_with(cx, |tree, _| {
157 tree.as_local()
158 .unwrap()
159 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
160 })
161 .recv()
162 .await;
163
164 tree.read_with(cx, |tree, _| {
165 assert_eq!(
166 tree.descendent_entries(false, false, Path::new("i"))
167 .map(|entry| entry.path.as_ref())
168 .collect::<Vec<_>>(),
169 Vec::<PathBuf>::new()
170 );
171 assert_eq!(
172 tree.descendent_entries(false, true, Path::new("i"))
173 .map(|entry| entry.path.as_ref())
174 .collect::<Vec<_>>(),
175 vec![Path::new("i/j/k")]
176 );
177 assert_eq!(
178 tree.descendent_entries(true, false, Path::new("i"))
179 .map(|entry| entry.path.as_ref())
180 .collect::<Vec<_>>(),
181 vec![Path::new("i"), Path::new("i/l"),]
182 );
183 })
184}
185
186#[gpui::test(iterations = 10)]
187async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
188 let fs = FakeFs::new(cx.background());
189 fs.insert_tree(
190 "/root",
191 json!({
192 "lib": {
193 "a": {
194 "a.txt": ""
195 },
196 "b": {
197 "b.txt": ""
198 }
199 }
200 }),
201 )
202 .await;
203 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
204 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
205
206 let tree = Worktree::local(
207 build_client(cx),
208 Path::new("/root"),
209 true,
210 fs.clone(),
211 Default::default(),
212 &mut cx.to_async(),
213 )
214 .await
215 .unwrap();
216
217 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
218 .await;
219
220 tree.read_with(cx, |tree, _| {
221 assert_eq!(
222 tree.entries(false)
223 .map(|entry| entry.path.as_ref())
224 .collect::<Vec<_>>(),
225 vec![
226 Path::new(""),
227 Path::new("lib"),
228 Path::new("lib/a"),
229 Path::new("lib/a/a.txt"),
230 Path::new("lib/a/lib"),
231 Path::new("lib/b"),
232 Path::new("lib/b/b.txt"),
233 Path::new("lib/b/lib"),
234 ]
235 );
236 });
237
238 fs.rename(
239 Path::new("/root/lib/a/lib"),
240 Path::new("/root/lib/a/lib-2"),
241 Default::default(),
242 )
243 .await
244 .unwrap();
245 executor.run_until_parked();
246 tree.read_with(cx, |tree, _| {
247 assert_eq!(
248 tree.entries(false)
249 .map(|entry| entry.path.as_ref())
250 .collect::<Vec<_>>(),
251 vec![
252 Path::new(""),
253 Path::new("lib"),
254 Path::new("lib/a"),
255 Path::new("lib/a/a.txt"),
256 Path::new("lib/a/lib-2"),
257 Path::new("lib/b"),
258 Path::new("lib/b/b.txt"),
259 Path::new("lib/b/lib"),
260 ]
261 );
262 });
263}
264
265#[gpui::test]
266async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
267 let fs = FakeFs::new(cx.background());
268 fs.insert_tree(
269 "/root",
270 json!({
271 "dir1": {
272 "deps": {
273 // symlinks here
274 },
275 "src": {
276 "a.rs": "",
277 "b.rs": "",
278 },
279 },
280 "dir2": {
281 "src": {
282 "c.rs": "",
283 "d.rs": "",
284 }
285 },
286 "dir3": {
287 "deps": {},
288 "src": {
289 "e.rs": "",
290 "f.rs": "",
291 },
292 }
293 }),
294 )
295 .await;
296
297 // These symlinks point to directories outside of the worktree's root, dir1.
298 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
299 .await;
300 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
301 .await;
302
303 let tree = Worktree::local(
304 build_client(cx),
305 Path::new("/root/dir1"),
306 true,
307 fs.clone(),
308 Default::default(),
309 &mut cx.to_async(),
310 )
311 .await
312 .unwrap();
313
314 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
315 .await;
316
317 let tree_updates = Arc::new(Mutex::new(Vec::new()));
318 tree.update(cx, |_, cx| {
319 let tree_updates = tree_updates.clone();
320 cx.subscribe(&tree, move |_, _, event, _| {
321 if let Event::UpdatedEntries(update) = event {
322 tree_updates.lock().extend(
323 update
324 .iter()
325 .map(|(path, _, change)| (path.clone(), *change)),
326 );
327 }
328 })
329 .detach();
330 });
331
332 // The symlinked directories are not scanned by default.
333 tree.read_with(cx, |tree, _| {
334 assert_eq!(
335 tree.entries(true)
336 .map(|entry| (entry.path.as_ref(), entry.is_external))
337 .collect::<Vec<_>>(),
338 vec![
339 (Path::new(""), false),
340 (Path::new("deps"), false),
341 (Path::new("deps/dep-dir2"), true),
342 (Path::new("deps/dep-dir3"), true),
343 (Path::new("src"), false),
344 (Path::new("src/a.rs"), false),
345 (Path::new("src/b.rs"), false),
346 ]
347 );
348
349 assert_eq!(
350 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
351 EntryKind::UnloadedDir
352 );
353 });
354
355 // Expand one of the symlinked directories.
356 tree.read_with(cx, |tree, _| {
357 tree.as_local()
358 .unwrap()
359 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
360 })
361 .recv()
362 .await;
363
364 // The expanded directory's contents are loaded. Subdirectories are
365 // not scanned yet.
366 tree.read_with(cx, |tree, _| {
367 assert_eq!(
368 tree.entries(true)
369 .map(|entry| (entry.path.as_ref(), entry.is_external))
370 .collect::<Vec<_>>(),
371 vec![
372 (Path::new(""), false),
373 (Path::new("deps"), false),
374 (Path::new("deps/dep-dir2"), true),
375 (Path::new("deps/dep-dir3"), true),
376 (Path::new("deps/dep-dir3/deps"), true),
377 (Path::new("deps/dep-dir3/src"), true),
378 (Path::new("src"), false),
379 (Path::new("src/a.rs"), false),
380 (Path::new("src/b.rs"), false),
381 ]
382 );
383 });
384 assert_eq!(
385 mem::take(&mut *tree_updates.lock()),
386 &[
387 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
388 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
389 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
390 ]
391 );
392
393 // Expand a subdirectory of one of the symlinked directories.
394 tree.read_with(cx, |tree, _| {
395 tree.as_local()
396 .unwrap()
397 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
398 })
399 .recv()
400 .await;
401
402 // The expanded subdirectory's contents are loaded.
403 tree.read_with(cx, |tree, _| {
404 assert_eq!(
405 tree.entries(true)
406 .map(|entry| (entry.path.as_ref(), entry.is_external))
407 .collect::<Vec<_>>(),
408 vec![
409 (Path::new(""), false),
410 (Path::new("deps"), false),
411 (Path::new("deps/dep-dir2"), true),
412 (Path::new("deps/dep-dir3"), true),
413 (Path::new("deps/dep-dir3/deps"), true),
414 (Path::new("deps/dep-dir3/src"), true),
415 (Path::new("deps/dep-dir3/src/e.rs"), true),
416 (Path::new("deps/dep-dir3/src/f.rs"), true),
417 (Path::new("src"), false),
418 (Path::new("src/a.rs"), false),
419 (Path::new("src/b.rs"), false),
420 ]
421 );
422 });
423
424 assert_eq!(
425 mem::take(&mut *tree_updates.lock()),
426 &[
427 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
428 (
429 Path::new("deps/dep-dir3/src/e.rs").into(),
430 PathChange::Loaded
431 ),
432 (
433 Path::new("deps/dep-dir3/src/f.rs").into(),
434 PathChange::Loaded
435 )
436 ]
437 );
438}
439
440#[gpui::test]
441async fn test_open_gitignored_files(cx: &mut TestAppContext) {
442 let fs = FakeFs::new(cx.background());
443 fs.insert_tree(
444 "/root",
445 json!({
446 ".gitignore": "node_modules\n",
447 "one": {
448 "node_modules": {
449 "a": {
450 "a1.js": "a1",
451 "a2.js": "a2",
452 },
453 "b": {
454 "b1.js": "b1",
455 "b2.js": "b2",
456 },
457 "c": {
458 "c1.js": "c1",
459 "c2.js": "c2",
460 }
461 },
462 },
463 "two": {
464 "x.js": "",
465 "y.js": "",
466 },
467 }),
468 )
469 .await;
470
471 let tree = Worktree::local(
472 build_client(cx),
473 Path::new("/root"),
474 true,
475 fs.clone(),
476 Default::default(),
477 &mut cx.to_async(),
478 )
479 .await
480 .unwrap();
481
482 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
483 .await;
484
485 tree.read_with(cx, |tree, _| {
486 assert_eq!(
487 tree.entries(true)
488 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
489 .collect::<Vec<_>>(),
490 vec![
491 (Path::new(""), false),
492 (Path::new(".gitignore"), false),
493 (Path::new("one"), false),
494 (Path::new("one/node_modules"), true),
495 (Path::new("two"), false),
496 (Path::new("two/x.js"), false),
497 (Path::new("two/y.js"), false),
498 ]
499 );
500 });
501
502 // Open a file that is nested inside of a gitignored directory that
503 // has not yet been expanded.
504 let prev_read_dir_count = fs.read_dir_call_count();
505 let buffer = tree
506 .update(cx, |tree, cx| {
507 tree.as_local_mut()
508 .unwrap()
509 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
510 })
511 .await
512 .unwrap();
513
514 tree.read_with(cx, |tree, cx| {
515 assert_eq!(
516 tree.entries(true)
517 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
518 .collect::<Vec<_>>(),
519 vec![
520 (Path::new(""), false),
521 (Path::new(".gitignore"), false),
522 (Path::new("one"), false),
523 (Path::new("one/node_modules"), true),
524 (Path::new("one/node_modules/a"), true),
525 (Path::new("one/node_modules/b"), true),
526 (Path::new("one/node_modules/b/b1.js"), true),
527 (Path::new("one/node_modules/b/b2.js"), true),
528 (Path::new("one/node_modules/c"), true),
529 (Path::new("two"), false),
530 (Path::new("two/x.js"), false),
531 (Path::new("two/y.js"), false),
532 ]
533 );
534
535 assert_eq!(
536 buffer.read(cx).file().unwrap().path().as_ref(),
537 Path::new("one/node_modules/b/b1.js")
538 );
539
540 // Only the newly-expanded directories are scanned.
541 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
542 });
543
544 // Open another file in a different subdirectory of the same
545 // gitignored directory.
546 let prev_read_dir_count = fs.read_dir_call_count();
547 let buffer = tree
548 .update(cx, |tree, cx| {
549 tree.as_local_mut()
550 .unwrap()
551 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
552 })
553 .await
554 .unwrap();
555
556 tree.read_with(cx, |tree, cx| {
557 assert_eq!(
558 tree.entries(true)
559 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
560 .collect::<Vec<_>>(),
561 vec![
562 (Path::new(""), false),
563 (Path::new(".gitignore"), false),
564 (Path::new("one"), false),
565 (Path::new("one/node_modules"), true),
566 (Path::new("one/node_modules/a"), true),
567 (Path::new("one/node_modules/a/a1.js"), true),
568 (Path::new("one/node_modules/a/a2.js"), true),
569 (Path::new("one/node_modules/b"), true),
570 (Path::new("one/node_modules/b/b1.js"), true),
571 (Path::new("one/node_modules/b/b2.js"), true),
572 (Path::new("one/node_modules/c"), true),
573 (Path::new("two"), false),
574 (Path::new("two/x.js"), false),
575 (Path::new("two/y.js"), false),
576 ]
577 );
578
579 assert_eq!(
580 buffer.read(cx).file().unwrap().path().as_ref(),
581 Path::new("one/node_modules/a/a2.js")
582 );
583
584 // Only the newly-expanded directory is scanned.
585 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
586 });
587
588 // No work happens when files and directories change within an unloaded directory.
589 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
590 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
591 .await
592 .unwrap();
593 cx.foreground().run_until_parked();
594 assert_eq!(
595 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
596 0
597 );
598}
599
600#[gpui::test]
601async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
602 let fs = FakeFs::new(cx.background());
603 fs.insert_tree(
604 "/root",
605 json!({
606 ".gitignore": "node_modules\n",
607 "a": {
608 "a.js": "",
609 },
610 "b": {
611 "b.js": "",
612 },
613 "node_modules": {
614 "c": {
615 "c.js": "",
616 },
617 "d": {
618 "d.js": "",
619 "e": {
620 "e1.js": "",
621 "e2.js": "",
622 },
623 "f": {
624 "f1.js": "",
625 "f2.js": "",
626 }
627 },
628 },
629 }),
630 )
631 .await;
632
633 let tree = Worktree::local(
634 build_client(cx),
635 Path::new("/root"),
636 true,
637 fs.clone(),
638 Default::default(),
639 &mut cx.to_async(),
640 )
641 .await
642 .unwrap();
643
644 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
645 .await;
646
647 // Open a file within the gitignored directory, forcing some of its
648 // subdirectories to be read, but not all.
649 let read_dir_count_1 = fs.read_dir_call_count();
650 tree.read_with(cx, |tree, _| {
651 tree.as_local()
652 .unwrap()
653 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
654 })
655 .recv()
656 .await;
657
658 // Those subdirectories are now loaded.
659 tree.read_with(cx, |tree, _| {
660 assert_eq!(
661 tree.entries(true)
662 .map(|e| (e.path.as_ref(), e.is_ignored))
663 .collect::<Vec<_>>(),
664 &[
665 (Path::new(""), false),
666 (Path::new(".gitignore"), false),
667 (Path::new("a"), false),
668 (Path::new("a/a.js"), false),
669 (Path::new("b"), false),
670 (Path::new("b/b.js"), false),
671 (Path::new("node_modules"), true),
672 (Path::new("node_modules/c"), true),
673 (Path::new("node_modules/d"), true),
674 (Path::new("node_modules/d/d.js"), true),
675 (Path::new("node_modules/d/e"), true),
676 (Path::new("node_modules/d/f"), true),
677 ]
678 );
679 });
680 let read_dir_count_2 = fs.read_dir_call_count();
681 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
682
683 // Update the gitignore so that node_modules is no longer ignored,
684 // but a subdirectory is ignored
685 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
686 .await
687 .unwrap();
688 cx.foreground().run_until_parked();
689
690 // All of the directories that are no longer ignored are now loaded.
691 tree.read_with(cx, |tree, _| {
692 assert_eq!(
693 tree.entries(true)
694 .map(|e| (e.path.as_ref(), e.is_ignored))
695 .collect::<Vec<_>>(),
696 &[
697 (Path::new(""), false),
698 (Path::new(".gitignore"), false),
699 (Path::new("a"), false),
700 (Path::new("a/a.js"), false),
701 (Path::new("b"), false),
702 (Path::new("b/b.js"), false),
703 // This directory is no longer ignored
704 (Path::new("node_modules"), false),
705 (Path::new("node_modules/c"), false),
706 (Path::new("node_modules/c/c.js"), false),
707 (Path::new("node_modules/d"), false),
708 (Path::new("node_modules/d/d.js"), false),
709 // This subdirectory is now ignored
710 (Path::new("node_modules/d/e"), true),
711 (Path::new("node_modules/d/f"), false),
712 (Path::new("node_modules/d/f/f1.js"), false),
713 (Path::new("node_modules/d/f/f2.js"), false),
714 ]
715 );
716 });
717
718 // Each of the newly-loaded directories is scanned only once.
719 let read_dir_count_3 = fs.read_dir_call_count();
720 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
721}
722
723#[gpui::test(iterations = 10)]
724async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
725 let fs = FakeFs::new(cx.background());
726 fs.insert_tree(
727 "/root",
728 json!({
729 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
730 "tree": {
731 ".git": {},
732 ".gitignore": "ignored-dir\n",
733 "tracked-dir": {
734 "tracked-file1": "",
735 "ancestor-ignored-file1": "",
736 },
737 "ignored-dir": {
738 "ignored-file1": ""
739 }
740 }
741 }),
742 )
743 .await;
744
745 let tree = Worktree::local(
746 build_client(cx),
747 "/root/tree".as_ref(),
748 true,
749 fs.clone(),
750 Default::default(),
751 &mut cx.to_async(),
752 )
753 .await
754 .unwrap();
755 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
756 .await;
757
758 tree.read_with(cx, |tree, _| {
759 tree.as_local()
760 .unwrap()
761 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
762 })
763 .recv()
764 .await;
765
766 cx.read(|cx| {
767 let tree = tree.read(cx);
768 assert!(
769 !tree
770 .entry_for_path("tracked-dir/tracked-file1")
771 .unwrap()
772 .is_ignored
773 );
774 assert!(
775 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
776 .unwrap()
777 .is_ignored
778 );
779 assert!(
780 tree.entry_for_path("ignored-dir/ignored-file1")
781 .unwrap()
782 .is_ignored
783 );
784 });
785
786 fs.create_file(
787 "/root/tree/tracked-dir/tracked-file2".as_ref(),
788 Default::default(),
789 )
790 .await
791 .unwrap();
792 fs.create_file(
793 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
794 Default::default(),
795 )
796 .await
797 .unwrap();
798 fs.create_file(
799 "/root/tree/ignored-dir/ignored-file2".as_ref(),
800 Default::default(),
801 )
802 .await
803 .unwrap();
804
805 cx.foreground().run_until_parked();
806 cx.read(|cx| {
807 let tree = tree.read(cx);
808 assert!(
809 !tree
810 .entry_for_path("tracked-dir/tracked-file2")
811 .unwrap()
812 .is_ignored
813 );
814 assert!(
815 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
816 .unwrap()
817 .is_ignored
818 );
819 assert!(
820 tree.entry_for_path("ignored-dir/ignored-file2")
821 .unwrap()
822 .is_ignored
823 );
824 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
825 });
826}
827
828#[gpui::test]
829async fn test_write_file(cx: &mut TestAppContext) {
830 let dir = temp_tree(json!({
831 ".git": {},
832 ".gitignore": "ignored-dir\n",
833 "tracked-dir": {},
834 "ignored-dir": {}
835 }));
836
837 let tree = Worktree::local(
838 build_client(cx),
839 dir.path(),
840 true,
841 Arc::new(RealFs),
842 Default::default(),
843 &mut cx.to_async(),
844 )
845 .await
846 .unwrap();
847 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
848 .await;
849 tree.flush_fs_events(cx).await;
850
851 tree.update(cx, |tree, cx| {
852 tree.as_local().unwrap().write_file(
853 Path::new("tracked-dir/file.txt"),
854 "hello".into(),
855 Default::default(),
856 cx,
857 )
858 })
859 .await
860 .unwrap();
861 tree.update(cx, |tree, cx| {
862 tree.as_local().unwrap().write_file(
863 Path::new("ignored-dir/file.txt"),
864 "world".into(),
865 Default::default(),
866 cx,
867 )
868 })
869 .await
870 .unwrap();
871
872 tree.read_with(cx, |tree, _| {
873 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
874 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
875 assert!(!tracked.is_ignored);
876 assert!(ignored.is_ignored);
877 });
878}
879
880#[gpui::test(iterations = 30)]
881async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
882 let fs = FakeFs::new(cx.background());
883 fs.insert_tree(
884 "/root",
885 json!({
886 "b": {},
887 "c": {},
888 "d": {},
889 }),
890 )
891 .await;
892
893 let tree = Worktree::local(
894 build_client(cx),
895 "/root".as_ref(),
896 true,
897 fs,
898 Default::default(),
899 &mut cx.to_async(),
900 )
901 .await
902 .unwrap();
903
904 let snapshot1 = tree.update(cx, |tree, cx| {
905 let tree = tree.as_local_mut().unwrap();
906 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
907 let _ = tree.observe_updates(0, cx, {
908 let snapshot = snapshot.clone();
909 move |update| {
910 snapshot.lock().apply_remote_update(update).unwrap();
911 async { true }
912 }
913 });
914 snapshot
915 });
916
917 let entry = tree
918 .update(cx, |tree, cx| {
919 tree.as_local_mut()
920 .unwrap()
921 .create_entry("a/e".as_ref(), true, cx)
922 })
923 .await
924 .unwrap();
925 assert!(entry.is_dir());
926
927 cx.foreground().run_until_parked();
928 tree.read_with(cx, |tree, _| {
929 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
930 });
931
932 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
933 assert_eq!(
934 snapshot1.lock().entries(true).collect::<Vec<_>>(),
935 snapshot2.entries(true).collect::<Vec<_>>()
936 );
937}
938
939#[gpui::test]
940async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
941 let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
942
943 let fs_fake = FakeFs::new(cx.background());
944 fs_fake
945 .insert_tree(
946 "/root",
947 json!({
948 "a": {},
949 }),
950 )
951 .await;
952
953 let tree_fake = Worktree::local(
954 client_fake,
955 "/root".as_ref(),
956 true,
957 fs_fake,
958 Default::default(),
959 &mut cx.to_async(),
960 )
961 .await
962 .unwrap();
963
964 let entry = tree_fake
965 .update(cx, |tree, cx| {
966 tree.as_local_mut()
967 .unwrap()
968 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
969 })
970 .await
971 .unwrap();
972 assert!(entry.is_file());
973
974 cx.foreground().run_until_parked();
975 tree_fake.read_with(cx, |tree, _| {
976 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
977 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
978 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
979 });
980
981 let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
982
983 let fs_real = Arc::new(RealFs);
984 let temp_root = temp_tree(json!({
985 "a": {}
986 }));
987
988 let tree_real = Worktree::local(
989 client_real,
990 temp_root.path(),
991 true,
992 fs_real,
993 Default::default(),
994 &mut cx.to_async(),
995 )
996 .await
997 .unwrap();
998
999 let entry = tree_real
1000 .update(cx, |tree, cx| {
1001 tree.as_local_mut()
1002 .unwrap()
1003 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1004 })
1005 .await
1006 .unwrap();
1007 assert!(entry.is_file());
1008
1009 cx.foreground().run_until_parked();
1010 tree_real.read_with(cx, |tree, _| {
1011 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1012 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1013 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1014 });
1015
1016 // Test smallest change
1017 let entry = tree_real
1018 .update(cx, |tree, cx| {
1019 tree.as_local_mut()
1020 .unwrap()
1021 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
1022 })
1023 .await
1024 .unwrap();
1025 assert!(entry.is_file());
1026
1027 cx.foreground().run_until_parked();
1028 tree_real.read_with(cx, |tree, _| {
1029 assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
1030 });
1031
1032 // Test largest change
1033 let entry = tree_real
1034 .update(cx, |tree, cx| {
1035 tree.as_local_mut()
1036 .unwrap()
1037 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
1038 })
1039 .await
1040 .unwrap();
1041 assert!(entry.is_file());
1042
1043 cx.foreground().run_until_parked();
1044 tree_real.read_with(cx, |tree, _| {
1045 assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
1046 assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
1047 assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
1048 assert!(tree.entry_for_path("d/").unwrap().is_dir());
1049 });
1050}
1051
1052#[gpui::test(iterations = 100)]
1053async fn test_random_worktree_operations_during_initial_scan(
1054 cx: &mut TestAppContext,
1055 mut rng: StdRng,
1056) {
1057 let operations = env::var("OPERATIONS")
1058 .map(|o| o.parse().unwrap())
1059 .unwrap_or(5);
1060 let initial_entries = env::var("INITIAL_ENTRIES")
1061 .map(|o| o.parse().unwrap())
1062 .unwrap_or(20);
1063
1064 let root_dir = Path::new("/test");
1065 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1066 fs.as_fake().insert_tree(root_dir, json!({})).await;
1067 for _ in 0..initial_entries {
1068 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1069 }
1070 log::info!("generated initial tree");
1071
1072 let worktree = Worktree::local(
1073 build_client(cx),
1074 root_dir,
1075 true,
1076 fs.clone(),
1077 Default::default(),
1078 &mut cx.to_async(),
1079 )
1080 .await
1081 .unwrap();
1082
1083 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1084 let updates = Arc::new(Mutex::new(Vec::new()));
1085 worktree.update(cx, |tree, cx| {
1086 check_worktree_change_events(tree, cx);
1087
1088 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1089 let updates = updates.clone();
1090 move |update| {
1091 updates.lock().push(update);
1092 async { true }
1093 }
1094 });
1095 });
1096
1097 for _ in 0..operations {
1098 worktree
1099 .update(cx, |worktree, cx| {
1100 randomly_mutate_worktree(worktree, &mut rng, cx)
1101 })
1102 .await
1103 .log_err();
1104 worktree.read_with(cx, |tree, _| {
1105 tree.as_local().unwrap().snapshot().check_invariants(true)
1106 });
1107
1108 if rng.gen_bool(0.6) {
1109 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1110 }
1111 }
1112
1113 worktree
1114 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1115 .await;
1116
1117 cx.foreground().run_until_parked();
1118
1119 let final_snapshot = worktree.read_with(cx, |tree, _| {
1120 let tree = tree.as_local().unwrap();
1121 let snapshot = tree.snapshot();
1122 snapshot.check_invariants(true);
1123 snapshot
1124 });
1125
1126 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1127 let mut updated_snapshot = snapshot.clone();
1128 for update in updates.lock().iter() {
1129 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1130 updated_snapshot
1131 .apply_remote_update(update.clone())
1132 .unwrap();
1133 }
1134 }
1135
1136 assert_eq!(
1137 updated_snapshot.entries(true).collect::<Vec<_>>(),
1138 final_snapshot.entries(true).collect::<Vec<_>>(),
1139 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1140 );
1141 }
1142}
1143
1144#[gpui::test(iterations = 100)]
1145async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1146 let operations = env::var("OPERATIONS")
1147 .map(|o| o.parse().unwrap())
1148 .unwrap_or(40);
1149 let initial_entries = env::var("INITIAL_ENTRIES")
1150 .map(|o| o.parse().unwrap())
1151 .unwrap_or(20);
1152
1153 let root_dir = Path::new("/test");
1154 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1155 fs.as_fake().insert_tree(root_dir, json!({})).await;
1156 for _ in 0..initial_entries {
1157 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1158 }
1159 log::info!("generated initial tree");
1160
1161 let worktree = Worktree::local(
1162 build_client(cx),
1163 root_dir,
1164 true,
1165 fs.clone(),
1166 Default::default(),
1167 &mut cx.to_async(),
1168 )
1169 .await
1170 .unwrap();
1171
1172 let updates = Arc::new(Mutex::new(Vec::new()));
1173 worktree.update(cx, |tree, cx| {
1174 check_worktree_change_events(tree, cx);
1175
1176 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1177 let updates = updates.clone();
1178 move |update| {
1179 updates.lock().push(update);
1180 async { true }
1181 }
1182 });
1183 });
1184
1185 worktree
1186 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1187 .await;
1188
1189 fs.as_fake().pause_events();
1190 let mut snapshots = Vec::new();
1191 let mut mutations_len = operations;
1192 while mutations_len > 1 {
1193 if rng.gen_bool(0.2) {
1194 worktree
1195 .update(cx, |worktree, cx| {
1196 randomly_mutate_worktree(worktree, &mut rng, cx)
1197 })
1198 .await
1199 .log_err();
1200 } else {
1201 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1202 }
1203
1204 let buffered_event_count = fs.as_fake().buffered_event_count();
1205 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1206 let len = rng.gen_range(0..=buffered_event_count);
1207 log::info!("flushing {} events", len);
1208 fs.as_fake().flush_events(len);
1209 } else {
1210 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1211 mutations_len -= 1;
1212 }
1213
1214 cx.foreground().run_until_parked();
1215 if rng.gen_bool(0.2) {
1216 log::info!("storing snapshot {}", snapshots.len());
1217 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1218 snapshots.push(snapshot);
1219 }
1220 }
1221
1222 log::info!("quiescing");
1223 fs.as_fake().flush_events(usize::MAX);
1224 cx.foreground().run_until_parked();
1225
1226 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1227 snapshot.check_invariants(true);
1228 let expanded_paths = snapshot
1229 .expanded_entries()
1230 .map(|e| e.path.clone())
1231 .collect::<Vec<_>>();
1232
1233 {
1234 let new_worktree = Worktree::local(
1235 build_client(cx),
1236 root_dir,
1237 true,
1238 fs.clone(),
1239 Default::default(),
1240 &mut cx.to_async(),
1241 )
1242 .await
1243 .unwrap();
1244 new_worktree
1245 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1246 .await;
1247 new_worktree
1248 .update(cx, |tree, _| {
1249 tree.as_local_mut()
1250 .unwrap()
1251 .refresh_entries_for_paths(expanded_paths)
1252 })
1253 .recv()
1254 .await;
1255 let new_snapshot =
1256 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1257 assert_eq!(
1258 snapshot.entries_without_ids(true),
1259 new_snapshot.entries_without_ids(true)
1260 );
1261 }
1262
1263 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1264 for update in updates.lock().iter() {
1265 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1266 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1267 }
1268 }
1269
1270 assert_eq!(
1271 prev_snapshot
1272 .entries(true)
1273 .map(ignore_pending_dir)
1274 .collect::<Vec<_>>(),
1275 snapshot
1276 .entries(true)
1277 .map(ignore_pending_dir)
1278 .collect::<Vec<_>>(),
1279 "wrong updates after snapshot {i}: {updates:#?}",
1280 );
1281 }
1282
1283 fn ignore_pending_dir(entry: &Entry) -> Entry {
1284 let mut entry = entry.clone();
1285 if entry.kind.is_dir() {
1286 entry.kind = EntryKind::Dir
1287 }
1288 entry
1289 }
1290}
1291
1292// The worktree's `UpdatedEntries` event can be used to follow along with
1293// all changes to the worktree's snapshot.
1294fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1295 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1296 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1297 if let Event::UpdatedEntries(changes) = event {
1298 for (path, _, change_type) in changes.iter() {
1299 let entry = tree.entry_for_path(&path).cloned();
1300 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1301 Ok(ix) | Err(ix) => ix,
1302 };
1303 match change_type {
1304 PathChange::Added => entries.insert(ix, entry.unwrap()),
1305 PathChange::Removed => drop(entries.remove(ix)),
1306 PathChange::Updated => {
1307 let entry = entry.unwrap();
1308 let existing_entry = entries.get_mut(ix).unwrap();
1309 assert_eq!(existing_entry.path, entry.path);
1310 *existing_entry = entry;
1311 }
1312 PathChange::AddedOrUpdated | PathChange::Loaded => {
1313 let entry = entry.unwrap();
1314 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1315 *entries.get_mut(ix).unwrap() = entry;
1316 } else {
1317 entries.insert(ix, entry);
1318 }
1319 }
1320 }
1321 }
1322
1323 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1324 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1325 }
1326 })
1327 .detach();
1328}
1329
1330fn randomly_mutate_worktree(
1331 worktree: &mut Worktree,
1332 rng: &mut impl Rng,
1333 cx: &mut ModelContext<Worktree>,
1334) -> Task<Result<()>> {
1335 log::info!("mutating worktree");
1336 let worktree = worktree.as_local_mut().unwrap();
1337 let snapshot = worktree.snapshot();
1338 let entry = snapshot.entries(false).choose(rng).unwrap();
1339
1340 match rng.gen_range(0_u32..100) {
1341 0..=33 if entry.path.as_ref() != Path::new("") => {
1342 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1343 worktree.delete_entry(entry.id, cx).unwrap()
1344 }
1345 ..=66 if entry.path.as_ref() != Path::new("") => {
1346 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1347 let new_parent_path = if other_entry.is_dir() {
1348 other_entry.path.clone()
1349 } else {
1350 other_entry.path.parent().unwrap().into()
1351 };
1352 let mut new_path = new_parent_path.join(random_filename(rng));
1353 if new_path.starts_with(&entry.path) {
1354 new_path = random_filename(rng).into();
1355 }
1356
1357 log::info!(
1358 "renaming entry {:?} ({}) to {:?}",
1359 entry.path,
1360 entry.id.0,
1361 new_path
1362 );
1363 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1364 cx.foreground().spawn(async move {
1365 task.await?;
1366 Ok(())
1367 })
1368 }
1369 _ => {
1370 let task = if entry.is_dir() {
1371 let child_path = entry.path.join(random_filename(rng));
1372 let is_dir = rng.gen_bool(0.3);
1373 log::info!(
1374 "creating {} at {:?}",
1375 if is_dir { "dir" } else { "file" },
1376 child_path,
1377 );
1378 worktree.create_entry(child_path, is_dir, cx)
1379 } else {
1380 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1381 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1382 };
1383 cx.foreground().spawn(async move {
1384 task.await?;
1385 Ok(())
1386 })
1387 }
1388 }
1389}
1390
1391async fn randomly_mutate_fs(
1392 fs: &Arc<dyn Fs>,
1393 root_path: &Path,
1394 insertion_probability: f64,
1395 rng: &mut impl Rng,
1396) {
1397 log::info!("mutating fs");
1398 let mut files = Vec::new();
1399 let mut dirs = Vec::new();
1400 for path in fs.as_fake().paths(false) {
1401 if path.starts_with(root_path) {
1402 if fs.is_file(&path).await {
1403 files.push(path);
1404 } else {
1405 dirs.push(path);
1406 }
1407 }
1408 }
1409
1410 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1411 let path = dirs.choose(rng).unwrap();
1412 let new_path = path.join(random_filename(rng));
1413
1414 if rng.gen() {
1415 log::info!(
1416 "creating dir {:?}",
1417 new_path.strip_prefix(root_path).unwrap()
1418 );
1419 fs.create_dir(&new_path).await.unwrap();
1420 } else {
1421 log::info!(
1422 "creating file {:?}",
1423 new_path.strip_prefix(root_path).unwrap()
1424 );
1425 fs.create_file(&new_path, Default::default()).await.unwrap();
1426 }
1427 } else if rng.gen_bool(0.05) {
1428 let ignore_dir_path = dirs.choose(rng).unwrap();
1429 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1430
1431 let subdirs = dirs
1432 .iter()
1433 .filter(|d| d.starts_with(&ignore_dir_path))
1434 .cloned()
1435 .collect::<Vec<_>>();
1436 let subfiles = files
1437 .iter()
1438 .filter(|d| d.starts_with(&ignore_dir_path))
1439 .cloned()
1440 .collect::<Vec<_>>();
1441 let files_to_ignore = {
1442 let len = rng.gen_range(0..=subfiles.len());
1443 subfiles.choose_multiple(rng, len)
1444 };
1445 let dirs_to_ignore = {
1446 let len = rng.gen_range(0..subdirs.len());
1447 subdirs.choose_multiple(rng, len)
1448 };
1449
1450 let mut ignore_contents = String::new();
1451 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1452 writeln!(
1453 ignore_contents,
1454 "{}",
1455 path_to_ignore
1456 .strip_prefix(&ignore_dir_path)
1457 .unwrap()
1458 .to_str()
1459 .unwrap()
1460 )
1461 .unwrap();
1462 }
1463 log::info!(
1464 "creating gitignore {:?} with contents:\n{}",
1465 ignore_path.strip_prefix(&root_path).unwrap(),
1466 ignore_contents
1467 );
1468 fs.save(
1469 &ignore_path,
1470 &ignore_contents.as_str().into(),
1471 Default::default(),
1472 )
1473 .await
1474 .unwrap();
1475 } else {
1476 let old_path = {
1477 let file_path = files.choose(rng);
1478 let dir_path = dirs[1..].choose(rng);
1479 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1480 };
1481
1482 let is_rename = rng.gen();
1483 if is_rename {
1484 let new_path_parent = dirs
1485 .iter()
1486 .filter(|d| !d.starts_with(old_path))
1487 .choose(rng)
1488 .unwrap();
1489
1490 let overwrite_existing_dir =
1491 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1492 let new_path = if overwrite_existing_dir {
1493 fs.remove_dir(
1494 &new_path_parent,
1495 RemoveOptions {
1496 recursive: true,
1497 ignore_if_not_exists: true,
1498 },
1499 )
1500 .await
1501 .unwrap();
1502 new_path_parent.to_path_buf()
1503 } else {
1504 new_path_parent.join(random_filename(rng))
1505 };
1506
1507 log::info!(
1508 "renaming {:?} to {}{:?}",
1509 old_path.strip_prefix(&root_path).unwrap(),
1510 if overwrite_existing_dir {
1511 "overwrite "
1512 } else {
1513 ""
1514 },
1515 new_path.strip_prefix(&root_path).unwrap()
1516 );
1517 fs.rename(
1518 &old_path,
1519 &new_path,
1520 fs::RenameOptions {
1521 overwrite: true,
1522 ignore_if_exists: true,
1523 },
1524 )
1525 .await
1526 .unwrap();
1527 } else if fs.is_file(&old_path).await {
1528 log::info!(
1529 "deleting file {:?}",
1530 old_path.strip_prefix(&root_path).unwrap()
1531 );
1532 fs.remove_file(old_path, Default::default()).await.unwrap();
1533 } else {
1534 log::info!(
1535 "deleting dir {:?}",
1536 old_path.strip_prefix(&root_path).unwrap()
1537 );
1538 fs.remove_dir(
1539 &old_path,
1540 RemoveOptions {
1541 recursive: true,
1542 ignore_if_not_exists: true,
1543 },
1544 )
1545 .await
1546 .unwrap();
1547 }
1548 }
1549}
1550
1551fn random_filename(rng: &mut impl Rng) -> String {
1552 (0..6)
1553 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1554 .map(char::from)
1555 .collect()
1556}
1557
1558#[gpui::test]
1559async fn test_rename_work_directory(cx: &mut TestAppContext) {
1560 let root = temp_tree(json!({
1561 "projects": {
1562 "project1": {
1563 "a": "",
1564 "b": "",
1565 }
1566 },
1567
1568 }));
1569 let root_path = root.path();
1570
1571 let tree = Worktree::local(
1572 build_client(cx),
1573 root_path,
1574 true,
1575 Arc::new(RealFs),
1576 Default::default(),
1577 &mut cx.to_async(),
1578 )
1579 .await
1580 .unwrap();
1581
1582 let repo = git_init(&root_path.join("projects/project1"));
1583 git_add("a", &repo);
1584 git_commit("init", &repo);
1585 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1586
1587 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1588 .await;
1589
1590 tree.flush_fs_events(cx).await;
1591
1592 cx.read(|cx| {
1593 let tree = tree.read(cx);
1594 let (work_dir, _) = tree.repositories().next().unwrap();
1595 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1596 assert_eq!(
1597 tree.status_for_file(Path::new("projects/project1/a")),
1598 Some(GitFileStatus::Modified)
1599 );
1600 assert_eq!(
1601 tree.status_for_file(Path::new("projects/project1/b")),
1602 Some(GitFileStatus::Added)
1603 );
1604 });
1605
1606 std::fs::rename(
1607 root_path.join("projects/project1"),
1608 root_path.join("projects/project2"),
1609 )
1610 .ok();
1611 tree.flush_fs_events(cx).await;
1612
1613 cx.read(|cx| {
1614 let tree = tree.read(cx);
1615 let (work_dir, _) = tree.repositories().next().unwrap();
1616 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1617 assert_eq!(
1618 tree.status_for_file(Path::new("projects/project2/a")),
1619 Some(GitFileStatus::Modified)
1620 );
1621 assert_eq!(
1622 tree.status_for_file(Path::new("projects/project2/b")),
1623 Some(GitFileStatus::Added)
1624 );
1625 });
1626}
1627
1628#[gpui::test]
1629async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1630 let root = temp_tree(json!({
1631 "c.txt": "",
1632 "dir1": {
1633 ".git": {},
1634 "deps": {
1635 "dep1": {
1636 ".git": {},
1637 "src": {
1638 "a.txt": ""
1639 }
1640 }
1641 },
1642 "src": {
1643 "b.txt": ""
1644 }
1645 },
1646 }));
1647
1648 let tree = Worktree::local(
1649 build_client(cx),
1650 root.path(),
1651 true,
1652 Arc::new(RealFs),
1653 Default::default(),
1654 &mut cx.to_async(),
1655 )
1656 .await
1657 .unwrap();
1658
1659 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1660 .await;
1661 tree.flush_fs_events(cx).await;
1662
1663 tree.read_with(cx, |tree, _cx| {
1664 let tree = tree.as_local().unwrap();
1665
1666 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1667
1668 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1669 assert_eq!(
1670 entry
1671 .work_directory(tree)
1672 .map(|directory| directory.as_ref().to_owned()),
1673 Some(Path::new("dir1").to_owned())
1674 );
1675
1676 let entry = tree
1677 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1678 .unwrap();
1679 assert_eq!(
1680 entry
1681 .work_directory(tree)
1682 .map(|directory| directory.as_ref().to_owned()),
1683 Some(Path::new("dir1/deps/dep1").to_owned())
1684 );
1685
1686 let entries = tree.files(false, 0);
1687
1688 let paths_with_repos = tree
1689 .entries_with_repositories(entries)
1690 .map(|(entry, repo)| {
1691 (
1692 entry.path.as_ref(),
1693 repo.and_then(|repo| {
1694 repo.work_directory(&tree)
1695 .map(|work_directory| work_directory.0.to_path_buf())
1696 }),
1697 )
1698 })
1699 .collect::<Vec<_>>();
1700
1701 assert_eq!(
1702 paths_with_repos,
1703 &[
1704 (Path::new("c.txt"), None),
1705 (
1706 Path::new("dir1/deps/dep1/src/a.txt"),
1707 Some(Path::new("dir1/deps/dep1").into())
1708 ),
1709 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1710 ]
1711 );
1712 });
1713
1714 let repo_update_events = Arc::new(Mutex::new(vec![]));
1715 tree.update(cx, |_, cx| {
1716 let repo_update_events = repo_update_events.clone();
1717 cx.subscribe(&tree, move |_, _, event, _| {
1718 if let Event::UpdatedGitRepositories(update) = event {
1719 repo_update_events.lock().push(update.clone());
1720 }
1721 })
1722 .detach();
1723 });
1724
1725 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1726 tree.flush_fs_events(cx).await;
1727
1728 assert_eq!(
1729 repo_update_events.lock()[0]
1730 .iter()
1731 .map(|e| e.0.clone())
1732 .collect::<Vec<Arc<Path>>>(),
1733 vec![Path::new("dir1").into()]
1734 );
1735
1736 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1737 tree.flush_fs_events(cx).await;
1738
1739 tree.read_with(cx, |tree, _cx| {
1740 let tree = tree.as_local().unwrap();
1741
1742 assert!(tree
1743 .repository_for_path("dir1/src/b.txt".as_ref())
1744 .is_none());
1745 });
1746}
1747
1748#[gpui::test]
1749async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1750 const IGNORE_RULE: &'static str = "**/target";
1751
1752 let root = temp_tree(json!({
1753 "project": {
1754 "a.txt": "a",
1755 "b.txt": "bb",
1756 "c": {
1757 "d": {
1758 "e.txt": "eee"
1759 }
1760 },
1761 "f.txt": "ffff",
1762 "target": {
1763 "build_file": "???"
1764 },
1765 ".gitignore": IGNORE_RULE
1766 },
1767
1768 }));
1769
1770 const A_TXT: &'static str = "a.txt";
1771 const B_TXT: &'static str = "b.txt";
1772 const E_TXT: &'static str = "c/d/e.txt";
1773 const F_TXT: &'static str = "f.txt";
1774 const DOTGITIGNORE: &'static str = ".gitignore";
1775 const BUILD_FILE: &'static str = "target/build_file";
1776 let project_path = Path::new("project");
1777
1778 // Set up git repository before creating the worktree.
1779 let work_dir = root.path().join("project");
1780 let mut repo = git_init(work_dir.as_path());
1781 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1782 git_add(A_TXT, &repo);
1783 git_add(E_TXT, &repo);
1784 git_add(DOTGITIGNORE, &repo);
1785 git_commit("Initial commit", &repo);
1786
1787 let tree = Worktree::local(
1788 build_client(cx),
1789 root.path(),
1790 true,
1791 Arc::new(RealFs),
1792 Default::default(),
1793 &mut cx.to_async(),
1794 )
1795 .await
1796 .unwrap();
1797
1798 tree.flush_fs_events(cx).await;
1799 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1800 .await;
1801 deterministic.run_until_parked();
1802
1803 // Check that the right git state is observed on startup
1804 tree.read_with(cx, |tree, _cx| {
1805 let snapshot = tree.snapshot();
1806 assert_eq!(snapshot.repositories().count(), 1);
1807 let (dir, _) = snapshot.repositories().next().unwrap();
1808 assert_eq!(dir.as_ref(), Path::new("project"));
1809
1810 assert_eq!(
1811 snapshot.status_for_file(project_path.join(B_TXT)),
1812 Some(GitFileStatus::Added)
1813 );
1814 assert_eq!(
1815 snapshot.status_for_file(project_path.join(F_TXT)),
1816 Some(GitFileStatus::Added)
1817 );
1818 });
1819
1820 // Modify a file in the working copy.
1821 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1822 tree.flush_fs_events(cx).await;
1823 deterministic.run_until_parked();
1824
1825 // The worktree detects that the file's git status has changed.
1826 tree.read_with(cx, |tree, _cx| {
1827 let snapshot = tree.snapshot();
1828 assert_eq!(
1829 snapshot.status_for_file(project_path.join(A_TXT)),
1830 Some(GitFileStatus::Modified)
1831 );
1832 });
1833
1834 // Create a commit in the git repository.
1835 git_add(A_TXT, &repo);
1836 git_add(B_TXT, &repo);
1837 git_commit("Committing modified and added", &repo);
1838 tree.flush_fs_events(cx).await;
1839 deterministic.run_until_parked();
1840
1841 // The worktree detects that the files' git status have changed.
1842 tree.read_with(cx, |tree, _cx| {
1843 let snapshot = tree.snapshot();
1844 assert_eq!(
1845 snapshot.status_for_file(project_path.join(F_TXT)),
1846 Some(GitFileStatus::Added)
1847 );
1848 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1849 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1850 });
1851
1852 // Modify files in the working copy and perform git operations on other files.
1853 git_reset(0, &repo);
1854 git_remove_index(Path::new(B_TXT), &repo);
1855 git_stash(&mut repo);
1856 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1857 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1858 tree.flush_fs_events(cx).await;
1859 deterministic.run_until_parked();
1860
1861 // Check that more complex repo changes are tracked
1862 tree.read_with(cx, |tree, _cx| {
1863 let snapshot = tree.snapshot();
1864
1865 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1866 assert_eq!(
1867 snapshot.status_for_file(project_path.join(B_TXT)),
1868 Some(GitFileStatus::Added)
1869 );
1870 assert_eq!(
1871 snapshot.status_for_file(project_path.join(E_TXT)),
1872 Some(GitFileStatus::Modified)
1873 );
1874 });
1875
1876 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1877 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1878 std::fs::write(
1879 work_dir.join(DOTGITIGNORE),
1880 [IGNORE_RULE, "f.txt"].join("\n"),
1881 )
1882 .unwrap();
1883
1884 git_add(Path::new(DOTGITIGNORE), &repo);
1885 git_commit("Committing modified git ignore", &repo);
1886
1887 tree.flush_fs_events(cx).await;
1888 deterministic.run_until_parked();
1889
1890 let mut renamed_dir_name = "first_directory/second_directory";
1891 const RENAMED_FILE: &'static str = "rf.txt";
1892
1893 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
1894 std::fs::write(
1895 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
1896 "new-contents",
1897 )
1898 .unwrap();
1899
1900 tree.flush_fs_events(cx).await;
1901 deterministic.run_until_parked();
1902
1903 tree.read_with(cx, |tree, _cx| {
1904 let snapshot = tree.snapshot();
1905 assert_eq!(
1906 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
1907 Some(GitFileStatus::Added)
1908 );
1909 });
1910
1911 renamed_dir_name = "new_first_directory/second_directory";
1912
1913 std::fs::rename(
1914 work_dir.join("first_directory"),
1915 work_dir.join("new_first_directory"),
1916 )
1917 .unwrap();
1918
1919 tree.flush_fs_events(cx).await;
1920 deterministic.run_until_parked();
1921
1922 tree.read_with(cx, |tree, _cx| {
1923 let snapshot = tree.snapshot();
1924
1925 assert_eq!(
1926 snapshot.status_for_file(
1927 project_path
1928 .join(Path::new(renamed_dir_name))
1929 .join(RENAMED_FILE)
1930 ),
1931 Some(GitFileStatus::Added)
1932 );
1933 });
1934}
1935
1936#[gpui::test]
1937async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
1938 let fs = FakeFs::new(cx.background());
1939 fs.insert_tree(
1940 "/root",
1941 json!({
1942 ".git": {},
1943 "a": {
1944 "b": {
1945 "c1.txt": "",
1946 "c2.txt": "",
1947 },
1948 "d": {
1949 "e1.txt": "",
1950 "e2.txt": "",
1951 "e3.txt": "",
1952 }
1953 },
1954 "f": {
1955 "no-status.txt": ""
1956 },
1957 "g": {
1958 "h1.txt": "",
1959 "h2.txt": ""
1960 },
1961
1962 }),
1963 )
1964 .await;
1965
1966 fs.set_status_for_repo_via_git_operation(
1967 &Path::new("/root/.git"),
1968 &[
1969 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
1970 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
1971 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
1972 ],
1973 );
1974
1975 let tree = Worktree::local(
1976 build_client(cx),
1977 Path::new("/root"),
1978 true,
1979 fs.clone(),
1980 Default::default(),
1981 &mut cx.to_async(),
1982 )
1983 .await
1984 .unwrap();
1985
1986 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1987 .await;
1988
1989 cx.foreground().run_until_parked();
1990 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
1991
1992 check_propagated_statuses(
1993 &snapshot,
1994 &[
1995 (Path::new(""), Some(GitFileStatus::Conflict)),
1996 (Path::new("a"), Some(GitFileStatus::Modified)),
1997 (Path::new("a/b"), Some(GitFileStatus::Added)),
1998 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1999 (Path::new("a/b/c2.txt"), None),
2000 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2001 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2002 (Path::new("f"), None),
2003 (Path::new("f/no-status.txt"), None),
2004 (Path::new("g"), Some(GitFileStatus::Conflict)),
2005 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
2006 ],
2007 );
2008
2009 check_propagated_statuses(
2010 &snapshot,
2011 &[
2012 (Path::new("a/b"), Some(GitFileStatus::Added)),
2013 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2014 (Path::new("a/b/c2.txt"), None),
2015 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2016 (Path::new("a/d/e1.txt"), None),
2017 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2018 (Path::new("f"), None),
2019 (Path::new("f/no-status.txt"), None),
2020 (Path::new("g"), Some(GitFileStatus::Conflict)),
2021 ],
2022 );
2023
2024 check_propagated_statuses(
2025 &snapshot,
2026 &[
2027 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2028 (Path::new("a/b/c2.txt"), None),
2029 (Path::new("a/d/e1.txt"), None),
2030 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2031 (Path::new("f/no-status.txt"), None),
2032 ],
2033 );
2034
2035 #[track_caller]
2036 fn check_propagated_statuses(
2037 snapshot: &Snapshot,
2038 expected_statuses: &[(&Path, Option<GitFileStatus>)],
2039 ) {
2040 let mut entries = expected_statuses
2041 .iter()
2042 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
2043 .collect::<Vec<_>>();
2044 snapshot.propagate_git_statuses(&mut entries);
2045 assert_eq!(
2046 entries
2047 .iter()
2048 .map(|e| (e.path.as_ref(), e.git_status))
2049 .collect::<Vec<_>>(),
2050 expected_statuses
2051 );
2052 }
2053}
2054
2055fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
2056 let http_client = FakeHttpClient::with_404_response();
2057 cx.read(|cx| Client::new(http_client, cx))
2058}
2059
2060#[track_caller]
2061fn git_init(path: &Path) -> git2::Repository {
2062 git2::Repository::init(path).expect("Failed to initialize git repository")
2063}
2064
2065#[track_caller]
2066fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
2067 let path = path.as_ref();
2068 let mut index = repo.index().expect("Failed to get index");
2069 index.add_path(path).expect("Failed to add a.txt");
2070 index.write().expect("Failed to write index");
2071}
2072
2073#[track_caller]
2074fn git_remove_index(path: &Path, repo: &git2::Repository) {
2075 let mut index = repo.index().expect("Failed to get index");
2076 index.remove_path(path).expect("Failed to add a.txt");
2077 index.write().expect("Failed to write index");
2078}
2079
2080#[track_caller]
2081fn git_commit(msg: &'static str, repo: &git2::Repository) {
2082 use git2::Signature;
2083
2084 let signature = Signature::now("test", "test@zed.dev").unwrap();
2085 let oid = repo.index().unwrap().write_tree().unwrap();
2086 let tree = repo.find_tree(oid).unwrap();
2087 if let Some(head) = repo.head().ok() {
2088 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
2089
2090 let parent_commit = parent_obj.as_commit().unwrap();
2091
2092 repo.commit(
2093 Some("HEAD"),
2094 &signature,
2095 &signature,
2096 msg,
2097 &tree,
2098 &[parent_commit],
2099 )
2100 .expect("Failed to commit with parent");
2101 } else {
2102 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
2103 .expect("Failed to commit");
2104 }
2105}
2106
2107#[track_caller]
2108fn git_stash(repo: &mut git2::Repository) {
2109 use git2::Signature;
2110
2111 let signature = Signature::now("test", "test@zed.dev").unwrap();
2112 repo.stash_save(&signature, "N/A", None)
2113 .expect("Failed to stash");
2114}
2115
2116#[track_caller]
2117fn git_reset(offset: usize, repo: &git2::Repository) {
2118 let head = repo.head().expect("Couldn't get repo head");
2119 let object = head.peel(git2::ObjectType::Commit).unwrap();
2120 let commit = object.as_commit().unwrap();
2121 let new_head = commit
2122 .parents()
2123 .inspect(|parnet| {
2124 parnet.message();
2125 })
2126 .skip(offset)
2127 .next()
2128 .expect("Not enough history");
2129 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2130 .expect("Could not reset");
2131}
2132
2133#[allow(dead_code)]
2134#[track_caller]
2135fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2136 repo.statuses(None)
2137 .unwrap()
2138 .iter()
2139 .map(|status| (status.path().unwrap().to_string(), status.status()))
2140 .collect()
2141}