1use crate::{
2 worktree::{Event, Snapshot, WorktreeHandle},
3 Entry, EntryKind, PathChange, Worktree,
4};
5use anyhow::Result;
6use client::Client;
7use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
8use git::GITIGNORE;
9use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
10use parking_lot::Mutex;
11use postage::stream::Stream;
12use pretty_assertions::assert_eq;
13use rand::prelude::*;
14use serde_json::json;
15use std::{
16 env,
17 fmt::Write,
18 path::{Path, PathBuf},
19 sync::Arc,
20};
21use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
22
23#[gpui::test]
24async fn test_traversal(cx: &mut TestAppContext) {
25 let fs = FakeFs::new(cx.background());
26 fs.insert_tree(
27 "/root",
28 json!({
29 ".gitignore": "a/b\n",
30 "a": {
31 "b": "",
32 "c": "",
33 }
34 }),
35 )
36 .await;
37
38 let tree = Worktree::local(
39 build_client(cx),
40 Path::new("/root"),
41 true,
42 fs,
43 Default::default(),
44 &mut cx.to_async(),
45 )
46 .await
47 .unwrap();
48 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
49 .await;
50
51 tree.read_with(cx, |tree, _| {
52 assert_eq!(
53 tree.entries(false)
54 .map(|entry| entry.path.as_ref())
55 .collect::<Vec<_>>(),
56 vec![
57 Path::new(""),
58 Path::new(".gitignore"),
59 Path::new("a"),
60 Path::new("a/c"),
61 ]
62 );
63 assert_eq!(
64 tree.entries(true)
65 .map(|entry| entry.path.as_ref())
66 .collect::<Vec<_>>(),
67 vec![
68 Path::new(""),
69 Path::new(".gitignore"),
70 Path::new("a"),
71 Path::new("a/b"),
72 Path::new("a/c"),
73 ]
74 );
75 })
76}
77
78#[gpui::test]
79async fn test_descendent_entries(cx: &mut TestAppContext) {
80 let fs = FakeFs::new(cx.background());
81 fs.insert_tree(
82 "/root",
83 json!({
84 "a": "",
85 "b": {
86 "c": {
87 "d": ""
88 },
89 "e": {}
90 },
91 "f": "",
92 "g": {
93 "h": {}
94 },
95 "i": {
96 "j": {
97 "k": ""
98 },
99 "l": {
100
101 }
102 },
103 ".gitignore": "i/j\n",
104 }),
105 )
106 .await;
107
108 let tree = Worktree::local(
109 build_client(cx),
110 Path::new("/root"),
111 true,
112 fs,
113 Default::default(),
114 &mut cx.to_async(),
115 )
116 .await
117 .unwrap();
118 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
119 .await;
120
121 tree.read_with(cx, |tree, _| {
122 assert_eq!(
123 tree.descendent_entries(false, false, Path::new("b"))
124 .map(|entry| entry.path.as_ref())
125 .collect::<Vec<_>>(),
126 vec![Path::new("b/c/d"),]
127 );
128 assert_eq!(
129 tree.descendent_entries(true, false, Path::new("b"))
130 .map(|entry| entry.path.as_ref())
131 .collect::<Vec<_>>(),
132 vec![
133 Path::new("b"),
134 Path::new("b/c"),
135 Path::new("b/c/d"),
136 Path::new("b/e"),
137 ]
138 );
139
140 assert_eq!(
141 tree.descendent_entries(false, false, Path::new("g"))
142 .map(|entry| entry.path.as_ref())
143 .collect::<Vec<_>>(),
144 Vec::<PathBuf>::new()
145 );
146 assert_eq!(
147 tree.descendent_entries(true, false, Path::new("g"))
148 .map(|entry| entry.path.as_ref())
149 .collect::<Vec<_>>(),
150 vec![Path::new("g"), Path::new("g/h"),]
151 );
152 });
153
154 // Expand gitignored directory.
155 tree.read_with(cx, |tree, _| {
156 tree.as_local()
157 .unwrap()
158 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
159 })
160 .recv()
161 .await;
162
163 tree.read_with(cx, |tree, _| {
164 assert_eq!(
165 tree.descendent_entries(false, false, Path::new("i"))
166 .map(|entry| entry.path.as_ref())
167 .collect::<Vec<_>>(),
168 Vec::<PathBuf>::new()
169 );
170 assert_eq!(
171 tree.descendent_entries(false, true, Path::new("i"))
172 .map(|entry| entry.path.as_ref())
173 .collect::<Vec<_>>(),
174 vec![Path::new("i/j/k")]
175 );
176 assert_eq!(
177 tree.descendent_entries(true, false, Path::new("i"))
178 .map(|entry| entry.path.as_ref())
179 .collect::<Vec<_>>(),
180 vec![Path::new("i"), Path::new("i/l"),]
181 );
182 })
183}
184
185#[gpui::test(iterations = 10)]
186async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
187 let fs = FakeFs::new(cx.background());
188 fs.insert_tree(
189 "/root",
190 json!({
191 "lib": {
192 "a": {
193 "a.txt": ""
194 },
195 "b": {
196 "b.txt": ""
197 }
198 }
199 }),
200 )
201 .await;
202 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
203 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
204
205 let tree = Worktree::local(
206 build_client(cx),
207 Path::new("/root"),
208 true,
209 fs.clone(),
210 Default::default(),
211 &mut cx.to_async(),
212 )
213 .await
214 .unwrap();
215
216 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
217 .await;
218
219 tree.read_with(cx, |tree, _| {
220 assert_eq!(
221 tree.entries(false)
222 .map(|entry| entry.path.as_ref())
223 .collect::<Vec<_>>(),
224 vec![
225 Path::new(""),
226 Path::new("lib"),
227 Path::new("lib/a"),
228 Path::new("lib/a/a.txt"),
229 Path::new("lib/a/lib"),
230 Path::new("lib/b"),
231 Path::new("lib/b/b.txt"),
232 Path::new("lib/b/lib"),
233 ]
234 );
235 });
236
237 fs.rename(
238 Path::new("/root/lib/a/lib"),
239 Path::new("/root/lib/a/lib-2"),
240 Default::default(),
241 )
242 .await
243 .unwrap();
244 executor.run_until_parked();
245 tree.read_with(cx, |tree, _| {
246 assert_eq!(
247 tree.entries(false)
248 .map(|entry| entry.path.as_ref())
249 .collect::<Vec<_>>(),
250 vec![
251 Path::new(""),
252 Path::new("lib"),
253 Path::new("lib/a"),
254 Path::new("lib/a/a.txt"),
255 Path::new("lib/a/lib-2"),
256 Path::new("lib/b"),
257 Path::new("lib/b/b.txt"),
258 Path::new("lib/b/lib"),
259 ]
260 );
261 });
262}
263
264#[gpui::test]
265async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
266 let fs = FakeFs::new(cx.background());
267 fs.insert_tree(
268 "/root",
269 json!({
270 "dir1": {
271 "deps": {
272 // symlinks here
273 },
274 "src": {
275 "a.rs": "",
276 "b.rs": "",
277 },
278 },
279 "dir2": {
280 "src": {
281 "c.rs": "",
282 "d.rs": "",
283 }
284 },
285 "dir3": {
286 "deps": {},
287 "src": {
288 "e.rs": "",
289 "f.rs": "",
290 },
291 }
292 }),
293 )
294 .await;
295
296 // These symlinks point to directories outside of the worktree's root, dir1.
297 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
298 .await;
299 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
300 .await;
301
302 let tree = Worktree::local(
303 build_client(cx),
304 Path::new("/root/dir1"),
305 true,
306 fs.clone(),
307 Default::default(),
308 &mut cx.to_async(),
309 )
310 .await
311 .unwrap();
312
313 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
314 .await;
315
316 // The symlinked directories are not scanned by default.
317 tree.read_with(cx, |tree, _| {
318 assert_eq!(
319 tree.entries(false)
320 .map(|entry| (entry.path.as_ref(), entry.is_external))
321 .collect::<Vec<_>>(),
322 vec![
323 (Path::new(""), false),
324 (Path::new("deps"), false),
325 (Path::new("deps/dep-dir2"), true),
326 (Path::new("deps/dep-dir3"), true),
327 (Path::new("src"), false),
328 (Path::new("src/a.rs"), false),
329 (Path::new("src/b.rs"), false),
330 ]
331 );
332
333 assert_eq!(
334 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
335 EntryKind::UnloadedDir
336 );
337 });
338
339 // Expand one of the symlinked directories.
340 tree.read_with(cx, |tree, _| {
341 tree.as_local()
342 .unwrap()
343 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
344 })
345 .recv()
346 .await;
347
348 // The expanded directory's contents are loaded. Subdirectories are
349 // not scanned yet.
350 tree.read_with(cx, |tree, _| {
351 assert_eq!(
352 tree.entries(false)
353 .map(|entry| (entry.path.as_ref(), entry.is_external))
354 .collect::<Vec<_>>(),
355 vec![
356 (Path::new(""), false),
357 (Path::new("deps"), false),
358 (Path::new("deps/dep-dir2"), true),
359 (Path::new("deps/dep-dir3"), true),
360 (Path::new("deps/dep-dir3/deps"), true),
361 (Path::new("deps/dep-dir3/src"), true),
362 (Path::new("src"), false),
363 (Path::new("src/a.rs"), false),
364 (Path::new("src/b.rs"), false),
365 ]
366 );
367 });
368
369 // Expand a subdirectory of one of the symlinked directories.
370 tree.read_with(cx, |tree, _| {
371 tree.as_local()
372 .unwrap()
373 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
374 })
375 .recv()
376 .await;
377
378 // The expanded subdirectory's contents are loaded.
379 tree.read_with(cx, |tree, _| {
380 assert_eq!(
381 tree.entries(false)
382 .map(|entry| (entry.path.as_ref(), entry.is_external))
383 .collect::<Vec<_>>(),
384 vec![
385 (Path::new(""), false),
386 (Path::new("deps"), false),
387 (Path::new("deps/dep-dir2"), true),
388 (Path::new("deps/dep-dir3"), true),
389 (Path::new("deps/dep-dir3/deps"), true),
390 (Path::new("deps/dep-dir3/src"), true),
391 (Path::new("deps/dep-dir3/src/e.rs"), true),
392 (Path::new("deps/dep-dir3/src/f.rs"), true),
393 (Path::new("src"), false),
394 (Path::new("src/a.rs"), false),
395 (Path::new("src/b.rs"), false),
396 ]
397 );
398 });
399}
400
401#[gpui::test]
402async fn test_open_gitignored_files(cx: &mut TestAppContext) {
403 let fs = FakeFs::new(cx.background());
404 fs.insert_tree(
405 "/root",
406 json!({
407 ".gitignore": "node_modules\n",
408 "one": {
409 "node_modules": {
410 "a": {
411 "a1.js": "a1",
412 "a2.js": "a2",
413 },
414 "b": {
415 "b1.js": "b1",
416 "b2.js": "b2",
417 },
418 },
419 },
420 "two": {
421 "x.js": "",
422 "y.js": "",
423 },
424 }),
425 )
426 .await;
427
428 let tree = Worktree::local(
429 build_client(cx),
430 Path::new("/root"),
431 true,
432 fs.clone(),
433 Default::default(),
434 &mut cx.to_async(),
435 )
436 .await
437 .unwrap();
438
439 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
440 .await;
441
442 tree.read_with(cx, |tree, _| {
443 assert_eq!(
444 tree.entries(true)
445 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
446 .collect::<Vec<_>>(),
447 vec![
448 (Path::new(""), false),
449 (Path::new(".gitignore"), false),
450 (Path::new("one"), false),
451 (Path::new("one/node_modules"), true),
452 (Path::new("two"), false),
453 (Path::new("two/x.js"), false),
454 (Path::new("two/y.js"), false),
455 ]
456 );
457 });
458
459 // Open a file that is nested inside of a gitignored directory that
460 // has not yet been expanded.
461 let prev_read_dir_count = fs.read_dir_call_count();
462 let buffer = tree
463 .update(cx, |tree, cx| {
464 tree.as_local_mut()
465 .unwrap()
466 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
467 })
468 .await
469 .unwrap();
470
471 tree.read_with(cx, |tree, cx| {
472 assert_eq!(
473 tree.entries(true)
474 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
475 .collect::<Vec<_>>(),
476 vec![
477 (Path::new(""), false),
478 (Path::new(".gitignore"), false),
479 (Path::new("one"), false),
480 (Path::new("one/node_modules"), true),
481 (Path::new("one/node_modules/a"), true),
482 (Path::new("one/node_modules/b"), true),
483 (Path::new("one/node_modules/b/b1.js"), true),
484 (Path::new("one/node_modules/b/b2.js"), true),
485 (Path::new("two"), false),
486 (Path::new("two/x.js"), false),
487 (Path::new("two/y.js"), false),
488 ]
489 );
490
491 assert_eq!(
492 buffer.read(cx).file().unwrap().path().as_ref(),
493 Path::new("one/node_modules/b/b1.js")
494 );
495
496 // Only the newly-expanded directories are scanned.
497 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
498 });
499
500 // Open another file in a different subdirectory of the same
501 // gitignored directory.
502 let prev_read_dir_count = fs.read_dir_call_count();
503 let buffer = tree
504 .update(cx, |tree, cx| {
505 tree.as_local_mut()
506 .unwrap()
507 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
508 })
509 .await
510 .unwrap();
511
512 tree.read_with(cx, |tree, cx| {
513 assert_eq!(
514 tree.entries(true)
515 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
516 .collect::<Vec<_>>(),
517 vec![
518 (Path::new(""), false),
519 (Path::new(".gitignore"), false),
520 (Path::new("one"), false),
521 (Path::new("one/node_modules"), true),
522 (Path::new("one/node_modules/a"), true),
523 (Path::new("one/node_modules/a/a1.js"), true),
524 (Path::new("one/node_modules/a/a2.js"), true),
525 (Path::new("one/node_modules/b"), true),
526 (Path::new("one/node_modules/b/b1.js"), true),
527 (Path::new("one/node_modules/b/b2.js"), true),
528 (Path::new("two"), false),
529 (Path::new("two/x.js"), false),
530 (Path::new("two/y.js"), false),
531 ]
532 );
533
534 assert_eq!(
535 buffer.read(cx).file().unwrap().path().as_ref(),
536 Path::new("one/node_modules/a/a2.js")
537 );
538
539 // Only the newly-expanded directory is scanned.
540 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
541 });
542}
543
544#[gpui::test]
545async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
546 let fs = FakeFs::new(cx.background());
547 fs.insert_tree(
548 "/root",
549 json!({
550 ".gitignore": "node_modules\n",
551 "a": {
552 "a.js": "",
553 },
554 "b": {
555 "b.js": "",
556 },
557 "node_modules": {
558 "c": {
559 "c.js": "",
560 },
561 "d": {
562 "d.js": "",
563 "e": {
564 "e1.js": "",
565 "e2.js": "",
566 },
567 "f": {
568 "f1.js": "",
569 "f2.js": "",
570 }
571 },
572 },
573 }),
574 )
575 .await;
576
577 let tree = Worktree::local(
578 build_client(cx),
579 Path::new("/root"),
580 true,
581 fs.clone(),
582 Default::default(),
583 &mut cx.to_async(),
584 )
585 .await
586 .unwrap();
587
588 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
589 .await;
590
591 // Open a file within the gitignored directory, forcing some of its
592 // subdirectories to be read, but not all.
593 let read_dir_count_1 = fs.read_dir_call_count();
594 tree.read_with(cx, |tree, _| {
595 tree.as_local()
596 .unwrap()
597 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
598 })
599 .recv()
600 .await;
601
602 // Those subdirectories are now loaded.
603 tree.read_with(cx, |tree, _| {
604 assert_eq!(
605 tree.entries(true)
606 .map(|e| (e.path.as_ref(), e.is_ignored))
607 .collect::<Vec<_>>(),
608 &[
609 (Path::new(""), false),
610 (Path::new(".gitignore"), false),
611 (Path::new("a"), false),
612 (Path::new("a/a.js"), false),
613 (Path::new("b"), false),
614 (Path::new("b/b.js"), false),
615 (Path::new("node_modules"), true),
616 (Path::new("node_modules/c"), true),
617 (Path::new("node_modules/d"), true),
618 (Path::new("node_modules/d/d.js"), true),
619 (Path::new("node_modules/d/e"), true),
620 (Path::new("node_modules/d/f"), true),
621 ]
622 );
623 });
624 let read_dir_count_2 = fs.read_dir_call_count();
625 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
626
627 // Update the gitignore so that node_modules is no longer ignored,
628 // but a subdirectory is ignored
629 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
630 .await
631 .unwrap();
632 cx.foreground().run_until_parked();
633
634 // All of the directories that are no longer ignored are now loaded.
635 tree.read_with(cx, |tree, _| {
636 assert_eq!(
637 tree.entries(true)
638 .map(|e| (e.path.as_ref(), e.is_ignored))
639 .collect::<Vec<_>>(),
640 &[
641 (Path::new(""), false),
642 (Path::new(".gitignore"), false),
643 (Path::new("a"), false),
644 (Path::new("a/a.js"), false),
645 (Path::new("b"), false),
646 (Path::new("b/b.js"), false),
647 // This directory is no longer ignored
648 (Path::new("node_modules"), false),
649 (Path::new("node_modules/c"), false),
650 (Path::new("node_modules/c/c.js"), false),
651 (Path::new("node_modules/d"), false),
652 (Path::new("node_modules/d/d.js"), false),
653 // This subdirectory is now ignored
654 (Path::new("node_modules/d/e"), true),
655 (Path::new("node_modules/d/f"), false),
656 (Path::new("node_modules/d/f/f1.js"), false),
657 (Path::new("node_modules/d/f/f2.js"), false),
658 ]
659 );
660 });
661
662 // Each of the newly-loaded directories is scanned only once.
663 let read_dir_count_3 = fs.read_dir_call_count();
664 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
665}
666
667#[gpui::test]
668async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
669 // .gitignores are handled explicitly by Zed and do not use the git
670 // machinery that the git_tests module checks
671 let parent_dir = temp_tree(json!({
672 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
673 "tree": {
674 ".git": {},
675 ".gitignore": "ignored-dir\n",
676 "tracked-dir": {
677 "tracked-file1": "",
678 "ancestor-ignored-file1": "",
679 },
680 "ignored-dir": {
681 "ignored-file1": ""
682 }
683 }
684 }));
685 let dir = parent_dir.path().join("tree");
686
687 let tree = Worktree::local(
688 build_client(cx),
689 dir.as_path(),
690 true,
691 Arc::new(RealFs),
692 Default::default(),
693 &mut cx.to_async(),
694 )
695 .await
696 .unwrap();
697 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
698 .await;
699
700 tree.read_with(cx, |tree, _| {
701 tree.as_local()
702 .unwrap()
703 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
704 })
705 .recv()
706 .await;
707
708 cx.read(|cx| {
709 let tree = tree.read(cx);
710 assert!(
711 !tree
712 .entry_for_path("tracked-dir/tracked-file1")
713 .unwrap()
714 .is_ignored
715 );
716 assert!(
717 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
718 .unwrap()
719 .is_ignored
720 );
721 assert!(
722 tree.entry_for_path("ignored-dir/ignored-file1")
723 .unwrap()
724 .is_ignored
725 );
726 });
727
728 std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
729 std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
730 std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
731 tree.flush_fs_events(cx).await;
732 cx.read(|cx| {
733 let tree = tree.read(cx);
734 assert!(
735 !tree
736 .entry_for_path("tracked-dir/tracked-file2")
737 .unwrap()
738 .is_ignored
739 );
740 assert!(
741 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
742 .unwrap()
743 .is_ignored
744 );
745 assert!(
746 tree.entry_for_path("ignored-dir/ignored-file2")
747 .unwrap()
748 .is_ignored
749 );
750 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
751 });
752}
753
754#[gpui::test]
755async fn test_write_file(cx: &mut TestAppContext) {
756 let dir = temp_tree(json!({
757 ".git": {},
758 ".gitignore": "ignored-dir\n",
759 "tracked-dir": {},
760 "ignored-dir": {}
761 }));
762
763 let tree = Worktree::local(
764 build_client(cx),
765 dir.path(),
766 true,
767 Arc::new(RealFs),
768 Default::default(),
769 &mut cx.to_async(),
770 )
771 .await
772 .unwrap();
773 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
774 .await;
775 tree.flush_fs_events(cx).await;
776
777 tree.update(cx, |tree, cx| {
778 tree.as_local().unwrap().write_file(
779 Path::new("tracked-dir/file.txt"),
780 "hello".into(),
781 Default::default(),
782 cx,
783 )
784 })
785 .await
786 .unwrap();
787 tree.update(cx, |tree, cx| {
788 tree.as_local().unwrap().write_file(
789 Path::new("ignored-dir/file.txt"),
790 "world".into(),
791 Default::default(),
792 cx,
793 )
794 })
795 .await
796 .unwrap();
797
798 tree.read_with(cx, |tree, _| {
799 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
800 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
801 assert!(!tracked.is_ignored);
802 assert!(ignored.is_ignored);
803 });
804}
805
806#[gpui::test(iterations = 30)]
807async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
808 let fs = FakeFs::new(cx.background());
809 fs.insert_tree(
810 "/root",
811 json!({
812 "b": {},
813 "c": {},
814 "d": {},
815 }),
816 )
817 .await;
818
819 let tree = Worktree::local(
820 build_client(cx),
821 "/root".as_ref(),
822 true,
823 fs,
824 Default::default(),
825 &mut cx.to_async(),
826 )
827 .await
828 .unwrap();
829
830 let snapshot1 = tree.update(cx, |tree, cx| {
831 let tree = tree.as_local_mut().unwrap();
832 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
833 let _ = tree.observe_updates(0, cx, {
834 let snapshot = snapshot.clone();
835 move |update| {
836 snapshot.lock().apply_remote_update(update).unwrap();
837 async { true }
838 }
839 });
840 snapshot
841 });
842
843 let entry = tree
844 .update(cx, |tree, cx| {
845 tree.as_local_mut()
846 .unwrap()
847 .create_entry("a/e".as_ref(), true, cx)
848 })
849 .await
850 .unwrap();
851 assert!(entry.is_dir());
852
853 cx.foreground().run_until_parked();
854 tree.read_with(cx, |tree, _| {
855 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
856 });
857
858 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
859 assert_eq!(
860 snapshot1.lock().entries(true).collect::<Vec<_>>(),
861 snapshot2.entries(true).collect::<Vec<_>>()
862 );
863}
864
865#[gpui::test(iterations = 100)]
866async fn test_random_worktree_operations_during_initial_scan(
867 cx: &mut TestAppContext,
868 mut rng: StdRng,
869) {
870 let operations = env::var("OPERATIONS")
871 .map(|o| o.parse().unwrap())
872 .unwrap_or(5);
873 let initial_entries = env::var("INITIAL_ENTRIES")
874 .map(|o| o.parse().unwrap())
875 .unwrap_or(20);
876
877 let root_dir = Path::new("/test");
878 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
879 fs.as_fake().insert_tree(root_dir, json!({})).await;
880 for _ in 0..initial_entries {
881 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
882 }
883 log::info!("generated initial tree");
884
885 let worktree = Worktree::local(
886 build_client(cx),
887 root_dir,
888 true,
889 fs.clone(),
890 Default::default(),
891 &mut cx.to_async(),
892 )
893 .await
894 .unwrap();
895
896 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
897 let updates = Arc::new(Mutex::new(Vec::new()));
898 worktree.update(cx, |tree, cx| {
899 check_worktree_change_events(tree, cx);
900
901 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
902 let updates = updates.clone();
903 move |update| {
904 updates.lock().push(update);
905 async { true }
906 }
907 });
908 });
909
910 for _ in 0..operations {
911 worktree
912 .update(cx, |worktree, cx| {
913 randomly_mutate_worktree(worktree, &mut rng, cx)
914 })
915 .await
916 .log_err();
917 worktree.read_with(cx, |tree, _| {
918 tree.as_local().unwrap().snapshot().check_invariants(true)
919 });
920
921 if rng.gen_bool(0.6) {
922 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
923 }
924 }
925
926 worktree
927 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
928 .await;
929
930 cx.foreground().run_until_parked();
931
932 let final_snapshot = worktree.read_with(cx, |tree, _| {
933 let tree = tree.as_local().unwrap();
934 let snapshot = tree.snapshot();
935 snapshot.check_invariants(true);
936 snapshot
937 });
938
939 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
940 let mut updated_snapshot = snapshot.clone();
941 for update in updates.lock().iter() {
942 if update.scan_id >= updated_snapshot.scan_id() as u64 {
943 updated_snapshot
944 .apply_remote_update(update.clone())
945 .unwrap();
946 }
947 }
948
949 assert_eq!(
950 updated_snapshot.entries(true).collect::<Vec<_>>(),
951 final_snapshot.entries(true).collect::<Vec<_>>(),
952 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
953 );
954 }
955}
956
957#[gpui::test(iterations = 100)]
958async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
959 let operations = env::var("OPERATIONS")
960 .map(|o| o.parse().unwrap())
961 .unwrap_or(40);
962 let initial_entries = env::var("INITIAL_ENTRIES")
963 .map(|o| o.parse().unwrap())
964 .unwrap_or(20);
965
966 let root_dir = Path::new("/test");
967 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
968 fs.as_fake().insert_tree(root_dir, json!({})).await;
969 for _ in 0..initial_entries {
970 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
971 }
972 log::info!("generated initial tree");
973
974 let worktree = Worktree::local(
975 build_client(cx),
976 root_dir,
977 true,
978 fs.clone(),
979 Default::default(),
980 &mut cx.to_async(),
981 )
982 .await
983 .unwrap();
984
985 let updates = Arc::new(Mutex::new(Vec::new()));
986 worktree.update(cx, |tree, cx| {
987 check_worktree_change_events(tree, cx);
988
989 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
990 let updates = updates.clone();
991 move |update| {
992 updates.lock().push(update);
993 async { true }
994 }
995 });
996 });
997
998 worktree
999 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1000 .await;
1001
1002 fs.as_fake().pause_events();
1003 let mut snapshots = Vec::new();
1004 let mut mutations_len = operations;
1005 while mutations_len > 1 {
1006 if rng.gen_bool(0.2) {
1007 worktree
1008 .update(cx, |worktree, cx| {
1009 randomly_mutate_worktree(worktree, &mut rng, cx)
1010 })
1011 .await
1012 .log_err();
1013 } else {
1014 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1015 }
1016
1017 let buffered_event_count = fs.as_fake().buffered_event_count();
1018 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1019 let len = rng.gen_range(0..=buffered_event_count);
1020 log::info!("flushing {} events", len);
1021 fs.as_fake().flush_events(len);
1022 } else {
1023 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1024 mutations_len -= 1;
1025 }
1026
1027 cx.foreground().run_until_parked();
1028 if rng.gen_bool(0.2) {
1029 log::info!("storing snapshot {}", snapshots.len());
1030 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1031 snapshots.push(snapshot);
1032 }
1033 }
1034
1035 log::info!("quiescing");
1036 fs.as_fake().flush_events(usize::MAX);
1037 cx.foreground().run_until_parked();
1038
1039 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1040 snapshot.check_invariants(true);
1041 let expanded_paths = snapshot
1042 .expanded_entries()
1043 .map(|e| e.path.clone())
1044 .collect::<Vec<_>>();
1045
1046 {
1047 let new_worktree = Worktree::local(
1048 build_client(cx),
1049 root_dir,
1050 true,
1051 fs.clone(),
1052 Default::default(),
1053 &mut cx.to_async(),
1054 )
1055 .await
1056 .unwrap();
1057 new_worktree
1058 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1059 .await;
1060 new_worktree
1061 .update(cx, |tree, _| {
1062 tree.as_local_mut()
1063 .unwrap()
1064 .refresh_entries_for_paths(expanded_paths)
1065 })
1066 .recv()
1067 .await;
1068 let new_snapshot =
1069 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1070 assert_eq!(
1071 snapshot.entries_without_ids(true),
1072 new_snapshot.entries_without_ids(true)
1073 );
1074 }
1075
1076 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1077 for update in updates.lock().iter() {
1078 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1079 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1080 }
1081 }
1082
1083 assert_eq!(
1084 prev_snapshot
1085 .entries(true)
1086 .map(ignore_pending_dir)
1087 .collect::<Vec<_>>(),
1088 snapshot
1089 .entries(true)
1090 .map(ignore_pending_dir)
1091 .collect::<Vec<_>>(),
1092 "wrong updates after snapshot {i}: {updates:#?}",
1093 );
1094 }
1095
1096 fn ignore_pending_dir(entry: &Entry) -> Entry {
1097 let mut entry = entry.clone();
1098 if entry.kind.is_dir() {
1099 entry.kind = EntryKind::Dir
1100 }
1101 entry
1102 }
1103}
1104
1105// The worktree's `UpdatedEntries` event can be used to follow along with
1106// all changes to the worktree's snapshot.
1107fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1108 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1109 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1110 if let Event::UpdatedEntries(changes) = event {
1111 for (path, _, change_type) in changes.iter() {
1112 let entry = tree.entry_for_path(&path).cloned();
1113 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1114 Ok(ix) | Err(ix) => ix,
1115 };
1116 match change_type {
1117 PathChange::Loaded => entries.insert(ix, entry.unwrap()),
1118 PathChange::Added => entries.insert(ix, entry.unwrap()),
1119 PathChange::Removed => drop(entries.remove(ix)),
1120 PathChange::Updated => {
1121 let entry = entry.unwrap();
1122 let existing_entry = entries.get_mut(ix).unwrap();
1123 assert_eq!(existing_entry.path, entry.path);
1124 *existing_entry = entry;
1125 }
1126 PathChange::AddedOrUpdated => {
1127 let entry = entry.unwrap();
1128 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1129 *entries.get_mut(ix).unwrap() = entry;
1130 } else {
1131 entries.insert(ix, entry);
1132 }
1133 }
1134 }
1135 }
1136
1137 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1138 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1139 }
1140 })
1141 .detach();
1142}
1143
1144fn randomly_mutate_worktree(
1145 worktree: &mut Worktree,
1146 rng: &mut impl Rng,
1147 cx: &mut ModelContext<Worktree>,
1148) -> Task<Result<()>> {
1149 log::info!("mutating worktree");
1150 let worktree = worktree.as_local_mut().unwrap();
1151 let snapshot = worktree.snapshot();
1152 let entry = snapshot.entries(false).choose(rng).unwrap();
1153
1154 match rng.gen_range(0_u32..100) {
1155 0..=33 if entry.path.as_ref() != Path::new("") => {
1156 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1157 worktree.delete_entry(entry.id, cx).unwrap()
1158 }
1159 ..=66 if entry.path.as_ref() != Path::new("") => {
1160 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1161 let new_parent_path = if other_entry.is_dir() {
1162 other_entry.path.clone()
1163 } else {
1164 other_entry.path.parent().unwrap().into()
1165 };
1166 let mut new_path = new_parent_path.join(random_filename(rng));
1167 if new_path.starts_with(&entry.path) {
1168 new_path = random_filename(rng).into();
1169 }
1170
1171 log::info!(
1172 "renaming entry {:?} ({}) to {:?}",
1173 entry.path,
1174 entry.id.0,
1175 new_path
1176 );
1177 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1178 cx.foreground().spawn(async move {
1179 task.await?;
1180 Ok(())
1181 })
1182 }
1183 _ => {
1184 let task = if entry.is_dir() {
1185 let child_path = entry.path.join(random_filename(rng));
1186 let is_dir = rng.gen_bool(0.3);
1187 log::info!(
1188 "creating {} at {:?}",
1189 if is_dir { "dir" } else { "file" },
1190 child_path,
1191 );
1192 worktree.create_entry(child_path, is_dir, cx)
1193 } else {
1194 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1195 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1196 };
1197 cx.foreground().spawn(async move {
1198 task.await?;
1199 Ok(())
1200 })
1201 }
1202 }
1203}
1204
1205async fn randomly_mutate_fs(
1206 fs: &Arc<dyn Fs>,
1207 root_path: &Path,
1208 insertion_probability: f64,
1209 rng: &mut impl Rng,
1210) {
1211 log::info!("mutating fs");
1212 let mut files = Vec::new();
1213 let mut dirs = Vec::new();
1214 for path in fs.as_fake().paths(false) {
1215 if path.starts_with(root_path) {
1216 if fs.is_file(&path).await {
1217 files.push(path);
1218 } else {
1219 dirs.push(path);
1220 }
1221 }
1222 }
1223
1224 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1225 let path = dirs.choose(rng).unwrap();
1226 let new_path = path.join(random_filename(rng));
1227
1228 if rng.gen() {
1229 log::info!(
1230 "creating dir {:?}",
1231 new_path.strip_prefix(root_path).unwrap()
1232 );
1233 fs.create_dir(&new_path).await.unwrap();
1234 } else {
1235 log::info!(
1236 "creating file {:?}",
1237 new_path.strip_prefix(root_path).unwrap()
1238 );
1239 fs.create_file(&new_path, Default::default()).await.unwrap();
1240 }
1241 } else if rng.gen_bool(0.05) {
1242 let ignore_dir_path = dirs.choose(rng).unwrap();
1243 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1244
1245 let subdirs = dirs
1246 .iter()
1247 .filter(|d| d.starts_with(&ignore_dir_path))
1248 .cloned()
1249 .collect::<Vec<_>>();
1250 let subfiles = files
1251 .iter()
1252 .filter(|d| d.starts_with(&ignore_dir_path))
1253 .cloned()
1254 .collect::<Vec<_>>();
1255 let files_to_ignore = {
1256 let len = rng.gen_range(0..=subfiles.len());
1257 subfiles.choose_multiple(rng, len)
1258 };
1259 let dirs_to_ignore = {
1260 let len = rng.gen_range(0..subdirs.len());
1261 subdirs.choose_multiple(rng, len)
1262 };
1263
1264 let mut ignore_contents = String::new();
1265 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1266 writeln!(
1267 ignore_contents,
1268 "{}",
1269 path_to_ignore
1270 .strip_prefix(&ignore_dir_path)
1271 .unwrap()
1272 .to_str()
1273 .unwrap()
1274 )
1275 .unwrap();
1276 }
1277 log::info!(
1278 "creating gitignore {:?} with contents:\n{}",
1279 ignore_path.strip_prefix(&root_path).unwrap(),
1280 ignore_contents
1281 );
1282 fs.save(
1283 &ignore_path,
1284 &ignore_contents.as_str().into(),
1285 Default::default(),
1286 )
1287 .await
1288 .unwrap();
1289 } else {
1290 let old_path = {
1291 let file_path = files.choose(rng);
1292 let dir_path = dirs[1..].choose(rng);
1293 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1294 };
1295
1296 let is_rename = rng.gen();
1297 if is_rename {
1298 let new_path_parent = dirs
1299 .iter()
1300 .filter(|d| !d.starts_with(old_path))
1301 .choose(rng)
1302 .unwrap();
1303
1304 let overwrite_existing_dir =
1305 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1306 let new_path = if overwrite_existing_dir {
1307 fs.remove_dir(
1308 &new_path_parent,
1309 RemoveOptions {
1310 recursive: true,
1311 ignore_if_not_exists: true,
1312 },
1313 )
1314 .await
1315 .unwrap();
1316 new_path_parent.to_path_buf()
1317 } else {
1318 new_path_parent.join(random_filename(rng))
1319 };
1320
1321 log::info!(
1322 "renaming {:?} to {}{:?}",
1323 old_path.strip_prefix(&root_path).unwrap(),
1324 if overwrite_existing_dir {
1325 "overwrite "
1326 } else {
1327 ""
1328 },
1329 new_path.strip_prefix(&root_path).unwrap()
1330 );
1331 fs.rename(
1332 &old_path,
1333 &new_path,
1334 fs::RenameOptions {
1335 overwrite: true,
1336 ignore_if_exists: true,
1337 },
1338 )
1339 .await
1340 .unwrap();
1341 } else if fs.is_file(&old_path).await {
1342 log::info!(
1343 "deleting file {:?}",
1344 old_path.strip_prefix(&root_path).unwrap()
1345 );
1346 fs.remove_file(old_path, Default::default()).await.unwrap();
1347 } else {
1348 log::info!(
1349 "deleting dir {:?}",
1350 old_path.strip_prefix(&root_path).unwrap()
1351 );
1352 fs.remove_dir(
1353 &old_path,
1354 RemoveOptions {
1355 recursive: true,
1356 ignore_if_not_exists: true,
1357 },
1358 )
1359 .await
1360 .unwrap();
1361 }
1362 }
1363}
1364
1365fn random_filename(rng: &mut impl Rng) -> String {
1366 (0..6)
1367 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1368 .map(char::from)
1369 .collect()
1370}
1371
1372#[gpui::test]
1373async fn test_rename_work_directory(cx: &mut TestAppContext) {
1374 let root = temp_tree(json!({
1375 "projects": {
1376 "project1": {
1377 "a": "",
1378 "b": "",
1379 }
1380 },
1381
1382 }));
1383 let root_path = root.path();
1384
1385 let tree = Worktree::local(
1386 build_client(cx),
1387 root_path,
1388 true,
1389 Arc::new(RealFs),
1390 Default::default(),
1391 &mut cx.to_async(),
1392 )
1393 .await
1394 .unwrap();
1395
1396 let repo = git_init(&root_path.join("projects/project1"));
1397 git_add("a", &repo);
1398 git_commit("init", &repo);
1399 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1400
1401 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1402 .await;
1403
1404 tree.flush_fs_events(cx).await;
1405
1406 cx.read(|cx| {
1407 let tree = tree.read(cx);
1408 let (work_dir, _) = tree.repositories().next().unwrap();
1409 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1410 assert_eq!(
1411 tree.status_for_file(Path::new("projects/project1/a")),
1412 Some(GitFileStatus::Modified)
1413 );
1414 assert_eq!(
1415 tree.status_for_file(Path::new("projects/project1/b")),
1416 Some(GitFileStatus::Added)
1417 );
1418 });
1419
1420 std::fs::rename(
1421 root_path.join("projects/project1"),
1422 root_path.join("projects/project2"),
1423 )
1424 .ok();
1425 tree.flush_fs_events(cx).await;
1426
1427 cx.read(|cx| {
1428 let tree = tree.read(cx);
1429 let (work_dir, _) = tree.repositories().next().unwrap();
1430 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1431 assert_eq!(
1432 tree.status_for_file(Path::new("projects/project2/a")),
1433 Some(GitFileStatus::Modified)
1434 );
1435 assert_eq!(
1436 tree.status_for_file(Path::new("projects/project2/b")),
1437 Some(GitFileStatus::Added)
1438 );
1439 });
1440}
1441
1442#[gpui::test]
1443async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1444 let root = temp_tree(json!({
1445 "c.txt": "",
1446 "dir1": {
1447 ".git": {},
1448 "deps": {
1449 "dep1": {
1450 ".git": {},
1451 "src": {
1452 "a.txt": ""
1453 }
1454 }
1455 },
1456 "src": {
1457 "b.txt": ""
1458 }
1459 },
1460 }));
1461
1462 let tree = Worktree::local(
1463 build_client(cx),
1464 root.path(),
1465 true,
1466 Arc::new(RealFs),
1467 Default::default(),
1468 &mut cx.to_async(),
1469 )
1470 .await
1471 .unwrap();
1472
1473 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1474 .await;
1475 tree.flush_fs_events(cx).await;
1476
1477 tree.read_with(cx, |tree, _cx| {
1478 let tree = tree.as_local().unwrap();
1479
1480 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1481
1482 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1483 assert_eq!(
1484 entry
1485 .work_directory(tree)
1486 .map(|directory| directory.as_ref().to_owned()),
1487 Some(Path::new("dir1").to_owned())
1488 );
1489
1490 let entry = tree
1491 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1492 .unwrap();
1493 assert_eq!(
1494 entry
1495 .work_directory(tree)
1496 .map(|directory| directory.as_ref().to_owned()),
1497 Some(Path::new("dir1/deps/dep1").to_owned())
1498 );
1499
1500 let entries = tree.files(false, 0);
1501
1502 let paths_with_repos = tree
1503 .entries_with_repositories(entries)
1504 .map(|(entry, repo)| {
1505 (
1506 entry.path.as_ref(),
1507 repo.and_then(|repo| {
1508 repo.work_directory(&tree)
1509 .map(|work_directory| work_directory.0.to_path_buf())
1510 }),
1511 )
1512 })
1513 .collect::<Vec<_>>();
1514
1515 assert_eq!(
1516 paths_with_repos,
1517 &[
1518 (Path::new("c.txt"), None),
1519 (
1520 Path::new("dir1/deps/dep1/src/a.txt"),
1521 Some(Path::new("dir1/deps/dep1").into())
1522 ),
1523 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1524 ]
1525 );
1526 });
1527
1528 let repo_update_events = Arc::new(Mutex::new(vec![]));
1529 tree.update(cx, |_, cx| {
1530 let repo_update_events = repo_update_events.clone();
1531 cx.subscribe(&tree, move |_, _, event, _| {
1532 if let Event::UpdatedGitRepositories(update) = event {
1533 repo_update_events.lock().push(update.clone());
1534 }
1535 })
1536 .detach();
1537 });
1538
1539 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1540 tree.flush_fs_events(cx).await;
1541
1542 assert_eq!(
1543 repo_update_events.lock()[0]
1544 .iter()
1545 .map(|e| e.0.clone())
1546 .collect::<Vec<Arc<Path>>>(),
1547 vec![Path::new("dir1").into()]
1548 );
1549
1550 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1551 tree.flush_fs_events(cx).await;
1552
1553 tree.read_with(cx, |tree, _cx| {
1554 let tree = tree.as_local().unwrap();
1555
1556 assert!(tree
1557 .repository_for_path("dir1/src/b.txt".as_ref())
1558 .is_none());
1559 });
1560}
1561
1562#[gpui::test]
1563async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1564 const IGNORE_RULE: &'static str = "**/target";
1565
1566 let root = temp_tree(json!({
1567 "project": {
1568 "a.txt": "a",
1569 "b.txt": "bb",
1570 "c": {
1571 "d": {
1572 "e.txt": "eee"
1573 }
1574 },
1575 "f.txt": "ffff",
1576 "target": {
1577 "build_file": "???"
1578 },
1579 ".gitignore": IGNORE_RULE
1580 },
1581
1582 }));
1583
1584 let tree = Worktree::local(
1585 build_client(cx),
1586 root.path(),
1587 true,
1588 Arc::new(RealFs),
1589 Default::default(),
1590 &mut cx.to_async(),
1591 )
1592 .await
1593 .unwrap();
1594
1595 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1596 .await;
1597
1598 const A_TXT: &'static str = "a.txt";
1599 const B_TXT: &'static str = "b.txt";
1600 const E_TXT: &'static str = "c/d/e.txt";
1601 const F_TXT: &'static str = "f.txt";
1602 const DOTGITIGNORE: &'static str = ".gitignore";
1603 const BUILD_FILE: &'static str = "target/build_file";
1604 let project_path: &Path = &Path::new("project");
1605
1606 let work_dir = root.path().join("project");
1607 let mut repo = git_init(work_dir.as_path());
1608 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1609 git_add(Path::new(A_TXT), &repo);
1610 git_add(Path::new(E_TXT), &repo);
1611 git_add(Path::new(DOTGITIGNORE), &repo);
1612 git_commit("Initial commit", &repo);
1613
1614 tree.flush_fs_events(cx).await;
1615 deterministic.run_until_parked();
1616
1617 // Check that the right git state is observed on startup
1618 tree.read_with(cx, |tree, _cx| {
1619 let snapshot = tree.snapshot();
1620 assert_eq!(snapshot.repositories().count(), 1);
1621 let (dir, _) = snapshot.repositories().next().unwrap();
1622 assert_eq!(dir.as_ref(), Path::new("project"));
1623
1624 assert_eq!(
1625 snapshot.status_for_file(project_path.join(B_TXT)),
1626 Some(GitFileStatus::Added)
1627 );
1628 assert_eq!(
1629 snapshot.status_for_file(project_path.join(F_TXT)),
1630 Some(GitFileStatus::Added)
1631 );
1632 });
1633
1634 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1635
1636 tree.flush_fs_events(cx).await;
1637 deterministic.run_until_parked();
1638
1639 tree.read_with(cx, |tree, _cx| {
1640 let snapshot = tree.snapshot();
1641
1642 assert_eq!(
1643 snapshot.status_for_file(project_path.join(A_TXT)),
1644 Some(GitFileStatus::Modified)
1645 );
1646 });
1647
1648 git_add(Path::new(A_TXT), &repo);
1649 git_add(Path::new(B_TXT), &repo);
1650 git_commit("Committing modified and added", &repo);
1651 tree.flush_fs_events(cx).await;
1652 deterministic.run_until_parked();
1653
1654 // Check that repo only changes are tracked
1655 tree.read_with(cx, |tree, _cx| {
1656 let snapshot = tree.snapshot();
1657
1658 assert_eq!(
1659 snapshot.status_for_file(project_path.join(F_TXT)),
1660 Some(GitFileStatus::Added)
1661 );
1662
1663 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1664 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1665 });
1666
1667 git_reset(0, &repo);
1668 git_remove_index(Path::new(B_TXT), &repo);
1669 git_stash(&mut repo);
1670 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1671 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1672 tree.flush_fs_events(cx).await;
1673 deterministic.run_until_parked();
1674
1675 // Check that more complex repo changes are tracked
1676 tree.read_with(cx, |tree, _cx| {
1677 let snapshot = tree.snapshot();
1678
1679 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1680 assert_eq!(
1681 snapshot.status_for_file(project_path.join(B_TXT)),
1682 Some(GitFileStatus::Added)
1683 );
1684 assert_eq!(
1685 snapshot.status_for_file(project_path.join(E_TXT)),
1686 Some(GitFileStatus::Modified)
1687 );
1688 });
1689
1690 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1691 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1692 std::fs::write(
1693 work_dir.join(DOTGITIGNORE),
1694 [IGNORE_RULE, "f.txt"].join("\n"),
1695 )
1696 .unwrap();
1697
1698 git_add(Path::new(DOTGITIGNORE), &repo);
1699 git_commit("Committing modified git ignore", &repo);
1700
1701 tree.flush_fs_events(cx).await;
1702 deterministic.run_until_parked();
1703
1704 let mut renamed_dir_name = "first_directory/second_directory";
1705 const RENAMED_FILE: &'static str = "rf.txt";
1706
1707 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
1708 std::fs::write(
1709 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
1710 "new-contents",
1711 )
1712 .unwrap();
1713
1714 tree.flush_fs_events(cx).await;
1715 deterministic.run_until_parked();
1716
1717 tree.read_with(cx, |tree, _cx| {
1718 let snapshot = tree.snapshot();
1719 assert_eq!(
1720 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
1721 Some(GitFileStatus::Added)
1722 );
1723 });
1724
1725 renamed_dir_name = "new_first_directory/second_directory";
1726
1727 std::fs::rename(
1728 work_dir.join("first_directory"),
1729 work_dir.join("new_first_directory"),
1730 )
1731 .unwrap();
1732
1733 tree.flush_fs_events(cx).await;
1734 deterministic.run_until_parked();
1735
1736 tree.read_with(cx, |tree, _cx| {
1737 let snapshot = tree.snapshot();
1738
1739 assert_eq!(
1740 snapshot.status_for_file(
1741 project_path
1742 .join(Path::new(renamed_dir_name))
1743 .join(RENAMED_FILE)
1744 ),
1745 Some(GitFileStatus::Added)
1746 );
1747 });
1748}
1749
1750#[gpui::test]
1751async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
1752 let fs = FakeFs::new(cx.background());
1753 fs.insert_tree(
1754 "/root",
1755 json!({
1756 ".git": {},
1757 "a": {
1758 "b": {
1759 "c1.txt": "",
1760 "c2.txt": "",
1761 },
1762 "d": {
1763 "e1.txt": "",
1764 "e2.txt": "",
1765 "e3.txt": "",
1766 }
1767 },
1768 "f": {
1769 "no-status.txt": ""
1770 },
1771 "g": {
1772 "h1.txt": "",
1773 "h2.txt": ""
1774 },
1775
1776 }),
1777 )
1778 .await;
1779
1780 fs.set_status_for_repo_via_git_operation(
1781 &Path::new("/root/.git"),
1782 &[
1783 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
1784 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
1785 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
1786 ],
1787 );
1788
1789 let tree = Worktree::local(
1790 build_client(cx),
1791 Path::new("/root"),
1792 true,
1793 fs.clone(),
1794 Default::default(),
1795 &mut cx.to_async(),
1796 )
1797 .await
1798 .unwrap();
1799
1800 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1801 .await;
1802
1803 cx.foreground().run_until_parked();
1804 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
1805
1806 check_propagated_statuses(
1807 &snapshot,
1808 &[
1809 (Path::new(""), Some(GitFileStatus::Conflict)),
1810 (Path::new("a"), Some(GitFileStatus::Modified)),
1811 (Path::new("a/b"), Some(GitFileStatus::Added)),
1812 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1813 (Path::new("a/b/c2.txt"), None),
1814 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1815 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1816 (Path::new("f"), None),
1817 (Path::new("f/no-status.txt"), None),
1818 (Path::new("g"), Some(GitFileStatus::Conflict)),
1819 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
1820 ],
1821 );
1822
1823 check_propagated_statuses(
1824 &snapshot,
1825 &[
1826 (Path::new("a/b"), Some(GitFileStatus::Added)),
1827 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1828 (Path::new("a/b/c2.txt"), None),
1829 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1830 (Path::new("a/d/e1.txt"), None),
1831 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1832 (Path::new("f"), None),
1833 (Path::new("f/no-status.txt"), None),
1834 (Path::new("g"), Some(GitFileStatus::Conflict)),
1835 ],
1836 );
1837
1838 check_propagated_statuses(
1839 &snapshot,
1840 &[
1841 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1842 (Path::new("a/b/c2.txt"), None),
1843 (Path::new("a/d/e1.txt"), None),
1844 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1845 (Path::new("f/no-status.txt"), None),
1846 ],
1847 );
1848
1849 #[track_caller]
1850 fn check_propagated_statuses(
1851 snapshot: &Snapshot,
1852 expected_statuses: &[(&Path, Option<GitFileStatus>)],
1853 ) {
1854 let mut entries = expected_statuses
1855 .iter()
1856 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
1857 .collect::<Vec<_>>();
1858 snapshot.propagate_git_statuses(&mut entries);
1859 assert_eq!(
1860 entries
1861 .iter()
1862 .map(|e| (e.path.as_ref(), e.git_status))
1863 .collect::<Vec<_>>(),
1864 expected_statuses
1865 );
1866 }
1867}
1868
1869fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
1870 let http_client = FakeHttpClient::with_404_response();
1871 cx.read(|cx| Client::new(http_client, cx))
1872}
1873
1874#[track_caller]
1875fn git_init(path: &Path) -> git2::Repository {
1876 git2::Repository::init(path).expect("Failed to initialize git repository")
1877}
1878
1879#[track_caller]
1880fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
1881 let path = path.as_ref();
1882 let mut index = repo.index().expect("Failed to get index");
1883 index.add_path(path).expect("Failed to add a.txt");
1884 index.write().expect("Failed to write index");
1885}
1886
1887#[track_caller]
1888fn git_remove_index(path: &Path, repo: &git2::Repository) {
1889 let mut index = repo.index().expect("Failed to get index");
1890 index.remove_path(path).expect("Failed to add a.txt");
1891 index.write().expect("Failed to write index");
1892}
1893
1894#[track_caller]
1895fn git_commit(msg: &'static str, repo: &git2::Repository) {
1896 use git2::Signature;
1897
1898 let signature = Signature::now("test", "test@zed.dev").unwrap();
1899 let oid = repo.index().unwrap().write_tree().unwrap();
1900 let tree = repo.find_tree(oid).unwrap();
1901 if let Some(head) = repo.head().ok() {
1902 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
1903
1904 let parent_commit = parent_obj.as_commit().unwrap();
1905
1906 repo.commit(
1907 Some("HEAD"),
1908 &signature,
1909 &signature,
1910 msg,
1911 &tree,
1912 &[parent_commit],
1913 )
1914 .expect("Failed to commit with parent");
1915 } else {
1916 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
1917 .expect("Failed to commit");
1918 }
1919}
1920
1921#[track_caller]
1922fn git_stash(repo: &mut git2::Repository) {
1923 use git2::Signature;
1924
1925 let signature = Signature::now("test", "test@zed.dev").unwrap();
1926 repo.stash_save(&signature, "N/A", None)
1927 .expect("Failed to stash");
1928}
1929
1930#[track_caller]
1931fn git_reset(offset: usize, repo: &git2::Repository) {
1932 let head = repo.head().expect("Couldn't get repo head");
1933 let object = head.peel(git2::ObjectType::Commit).unwrap();
1934 let commit = object.as_commit().unwrap();
1935 let new_head = commit
1936 .parents()
1937 .inspect(|parnet| {
1938 parnet.message();
1939 })
1940 .skip(offset)
1941 .next()
1942 .expect("Not enough history");
1943 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
1944 .expect("Could not reset");
1945}
1946
1947#[allow(dead_code)]
1948#[track_caller]
1949fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
1950 repo.statuses(None)
1951 .unwrap()
1952 .iter()
1953 .map(|status| (status.path().unwrap().to_string(), status.status()))
1954 .collect()
1955}