1use crate::{
2 worktree::{Event, Snapshot, WorktreeHandle},
3 Entry, EntryKind, PathChange, Worktree,
4};
5use anyhow::Result;
6use client::Client;
7use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
8use git::GITIGNORE;
9use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
10use parking_lot::Mutex;
11use postage::stream::Stream;
12use pretty_assertions::assert_eq;
13use rand::prelude::*;
14use serde_json::json;
15use std::{
16 env,
17 fmt::Write,
18 path::{Path, PathBuf},
19 sync::Arc,
20};
21use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
22
23#[gpui::test]
24async fn test_traversal(cx: &mut TestAppContext) {
25 let fs = FakeFs::new(cx.background());
26 fs.insert_tree(
27 "/root",
28 json!({
29 ".gitignore": "a/b\n",
30 "a": {
31 "b": "",
32 "c": "",
33 }
34 }),
35 )
36 .await;
37
38 let tree = Worktree::local(
39 build_client(cx),
40 Path::new("/root"),
41 true,
42 fs,
43 Default::default(),
44 &mut cx.to_async(),
45 )
46 .await
47 .unwrap();
48 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
49 .await;
50
51 tree.read_with(cx, |tree, _| {
52 assert_eq!(
53 tree.entries(false)
54 .map(|entry| entry.path.as_ref())
55 .collect::<Vec<_>>(),
56 vec![
57 Path::new(""),
58 Path::new(".gitignore"),
59 Path::new("a"),
60 Path::new("a/c"),
61 ]
62 );
63 assert_eq!(
64 tree.entries(true)
65 .map(|entry| entry.path.as_ref())
66 .collect::<Vec<_>>(),
67 vec![
68 Path::new(""),
69 Path::new(".gitignore"),
70 Path::new("a"),
71 Path::new("a/b"),
72 Path::new("a/c"),
73 ]
74 );
75 })
76}
77
78#[gpui::test]
79async fn test_descendent_entries(cx: &mut TestAppContext) {
80 let fs = FakeFs::new(cx.background());
81 fs.insert_tree(
82 "/root",
83 json!({
84 "a": "",
85 "b": {
86 "c": {
87 "d": ""
88 },
89 "e": {}
90 },
91 "f": "",
92 "g": {
93 "h": {}
94 },
95 "i": {
96 "j": {
97 "k": ""
98 },
99 "l": {
100
101 }
102 },
103 ".gitignore": "i/j\n",
104 }),
105 )
106 .await;
107
108 let tree = Worktree::local(
109 build_client(cx),
110 Path::new("/root"),
111 true,
112 fs,
113 Default::default(),
114 &mut cx.to_async(),
115 )
116 .await
117 .unwrap();
118 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
119 .await;
120
121 tree.read_with(cx, |tree, _| {
122 assert_eq!(
123 tree.descendent_entries(false, false, Path::new("b"))
124 .map(|entry| entry.path.as_ref())
125 .collect::<Vec<_>>(),
126 vec![Path::new("b/c/d"),]
127 );
128 assert_eq!(
129 tree.descendent_entries(true, false, Path::new("b"))
130 .map(|entry| entry.path.as_ref())
131 .collect::<Vec<_>>(),
132 vec![
133 Path::new("b"),
134 Path::new("b/c"),
135 Path::new("b/c/d"),
136 Path::new("b/e"),
137 ]
138 );
139
140 assert_eq!(
141 tree.descendent_entries(false, false, Path::new("g"))
142 .map(|entry| entry.path.as_ref())
143 .collect::<Vec<_>>(),
144 Vec::<PathBuf>::new()
145 );
146 assert_eq!(
147 tree.descendent_entries(true, false, Path::new("g"))
148 .map(|entry| entry.path.as_ref())
149 .collect::<Vec<_>>(),
150 vec![Path::new("g"), Path::new("g/h"),]
151 );
152 });
153
154 // Expand gitignored directory.
155 tree.read_with(cx, |tree, _| {
156 tree.as_local()
157 .unwrap()
158 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
159 })
160 .recv()
161 .await;
162
163 tree.read_with(cx, |tree, _| {
164 assert_eq!(
165 tree.descendent_entries(false, false, Path::new("i"))
166 .map(|entry| entry.path.as_ref())
167 .collect::<Vec<_>>(),
168 Vec::<PathBuf>::new()
169 );
170 assert_eq!(
171 tree.descendent_entries(false, true, Path::new("i"))
172 .map(|entry| entry.path.as_ref())
173 .collect::<Vec<_>>(),
174 vec![Path::new("i/j/k")]
175 );
176 assert_eq!(
177 tree.descendent_entries(true, false, Path::new("i"))
178 .map(|entry| entry.path.as_ref())
179 .collect::<Vec<_>>(),
180 vec![Path::new("i"), Path::new("i/l"),]
181 );
182 })
183}
184
185#[gpui::test(iterations = 10)]
186async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
187 let fs = FakeFs::new(cx.background());
188 fs.insert_tree(
189 "/root",
190 json!({
191 "lib": {
192 "a": {
193 "a.txt": ""
194 },
195 "b": {
196 "b.txt": ""
197 }
198 }
199 }),
200 )
201 .await;
202 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
203 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
204
205 let tree = Worktree::local(
206 build_client(cx),
207 Path::new("/root"),
208 true,
209 fs.clone(),
210 Default::default(),
211 &mut cx.to_async(),
212 )
213 .await
214 .unwrap();
215
216 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
217 .await;
218
219 tree.read_with(cx, |tree, _| {
220 assert_eq!(
221 tree.entries(false)
222 .map(|entry| entry.path.as_ref())
223 .collect::<Vec<_>>(),
224 vec![
225 Path::new(""),
226 Path::new("lib"),
227 Path::new("lib/a"),
228 Path::new("lib/a/a.txt"),
229 Path::new("lib/a/lib"),
230 Path::new("lib/b"),
231 Path::new("lib/b/b.txt"),
232 Path::new("lib/b/lib"),
233 ]
234 );
235 });
236
237 fs.rename(
238 Path::new("/root/lib/a/lib"),
239 Path::new("/root/lib/a/lib-2"),
240 Default::default(),
241 )
242 .await
243 .unwrap();
244 executor.run_until_parked();
245 tree.read_with(cx, |tree, _| {
246 assert_eq!(
247 tree.entries(false)
248 .map(|entry| entry.path.as_ref())
249 .collect::<Vec<_>>(),
250 vec![
251 Path::new(""),
252 Path::new("lib"),
253 Path::new("lib/a"),
254 Path::new("lib/a/a.txt"),
255 Path::new("lib/a/lib-2"),
256 Path::new("lib/b"),
257 Path::new("lib/b/b.txt"),
258 Path::new("lib/b/lib"),
259 ]
260 );
261 });
262}
263
264#[gpui::test]
265async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
266 let fs = FakeFs::new(cx.background());
267 fs.insert_tree(
268 "/root",
269 json!({
270 "dir1": {
271 "deps": {
272 // symlinks here
273 },
274 "src": {
275 "a.rs": "",
276 "b.rs": "",
277 },
278 },
279 "dir2": {
280 "src": {
281 "c.rs": "",
282 "d.rs": "",
283 }
284 },
285 "dir3": {
286 "deps": {},
287 "src": {
288 "e.rs": "",
289 "f.rs": "",
290 },
291 }
292 }),
293 )
294 .await;
295
296 // These symlinks point to directories outside of the worktree's root, dir1.
297 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
298 .await;
299 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
300 .await;
301
302 let tree = Worktree::local(
303 build_client(cx),
304 Path::new("/root/dir1"),
305 true,
306 fs.clone(),
307 Default::default(),
308 &mut cx.to_async(),
309 )
310 .await
311 .unwrap();
312
313 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
314 .await;
315
316 // The symlinked directories are not scanned by default.
317 tree.read_with(cx, |tree, _| {
318 assert_eq!(
319 tree.entries(false)
320 .map(|entry| (entry.path.as_ref(), entry.is_external))
321 .collect::<Vec<_>>(),
322 vec![
323 (Path::new(""), false),
324 (Path::new("deps"), false),
325 (Path::new("deps/dep-dir2"), true),
326 (Path::new("deps/dep-dir3"), true),
327 (Path::new("src"), false),
328 (Path::new("src/a.rs"), false),
329 (Path::new("src/b.rs"), false),
330 ]
331 );
332
333 assert_eq!(
334 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
335 EntryKind::PendingDir
336 );
337 });
338
339 // Expand one of the symlinked directories.
340 tree.read_with(cx, |tree, _| {
341 tree.as_local()
342 .unwrap()
343 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
344 })
345 .recv()
346 .await;
347
348 // The expanded directory's contents are loaded. Subdirectories are
349 // not scanned yet.
350 tree.read_with(cx, |tree, _| {
351 assert_eq!(
352 tree.entries(false)
353 .map(|entry| (entry.path.as_ref(), entry.is_external))
354 .collect::<Vec<_>>(),
355 vec![
356 (Path::new(""), false),
357 (Path::new("deps"), false),
358 (Path::new("deps/dep-dir2"), true),
359 (Path::new("deps/dep-dir3"), true),
360 (Path::new("deps/dep-dir3/deps"), true),
361 (Path::new("deps/dep-dir3/src"), true),
362 (Path::new("src"), false),
363 (Path::new("src/a.rs"), false),
364 (Path::new("src/b.rs"), false),
365 ]
366 );
367 });
368
369 // Expand a subdirectory of one of the symlinked directories.
370 tree.read_with(cx, |tree, _| {
371 tree.as_local()
372 .unwrap()
373 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
374 })
375 .recv()
376 .await;
377
378 // The expanded subdirectory's contents are loaded.
379 tree.read_with(cx, |tree, _| {
380 assert_eq!(
381 tree.entries(false)
382 .map(|entry| (entry.path.as_ref(), entry.is_external))
383 .collect::<Vec<_>>(),
384 vec![
385 (Path::new(""), false),
386 (Path::new("deps"), false),
387 (Path::new("deps/dep-dir2"), true),
388 (Path::new("deps/dep-dir3"), true),
389 (Path::new("deps/dep-dir3/deps"), true),
390 (Path::new("deps/dep-dir3/src"), true),
391 (Path::new("deps/dep-dir3/src/e.rs"), true),
392 (Path::new("deps/dep-dir3/src/f.rs"), true),
393 (Path::new("src"), false),
394 (Path::new("src/a.rs"), false),
395 (Path::new("src/b.rs"), false),
396 ]
397 );
398 });
399}
400
401#[gpui::test]
402async fn test_open_gitignored_files(cx: &mut TestAppContext) {
403 let fs = FakeFs::new(cx.background());
404 fs.insert_tree(
405 "/root",
406 json!({
407 ".gitignore": "node_modules\n",
408 "one": {
409 "node_modules": {
410 "a": {
411 "a1.js": "a1",
412 "a2.js": "a2",
413 },
414 "b": {
415 "b1.js": "b1",
416 "b2.js": "b2",
417 },
418 },
419 },
420 "two": {
421 "x.js": "",
422 "y.js": "",
423 },
424 }),
425 )
426 .await;
427
428 let tree = Worktree::local(
429 build_client(cx),
430 Path::new("/root"),
431 true,
432 fs.clone(),
433 Default::default(),
434 &mut cx.to_async(),
435 )
436 .await
437 .unwrap();
438
439 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
440 .await;
441
442 tree.read_with(cx, |tree, _| {
443 assert_eq!(
444 tree.entries(true)
445 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
446 .collect::<Vec<_>>(),
447 vec![
448 (Path::new(""), false),
449 (Path::new(".gitignore"), false),
450 (Path::new("one"), false),
451 (Path::new("one/node_modules"), true),
452 (Path::new("two"), false),
453 (Path::new("two/x.js"), false),
454 (Path::new("two/y.js"), false),
455 ]
456 );
457 });
458
459 // Open a file that is nested inside of a gitignored directory that
460 // has not yet been expanded.
461 let prev_read_dir_count = fs.read_dir_call_count();
462 let buffer = tree
463 .update(cx, |tree, cx| {
464 tree.as_local_mut()
465 .unwrap()
466 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
467 })
468 .await
469 .unwrap();
470
471 tree.read_with(cx, |tree, cx| {
472 assert_eq!(
473 tree.entries(true)
474 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
475 .collect::<Vec<_>>(),
476 vec![
477 (Path::new(""), false),
478 (Path::new(".gitignore"), false),
479 (Path::new("one"), false),
480 (Path::new("one/node_modules"), true),
481 (Path::new("one/node_modules/a"), true),
482 (Path::new("one/node_modules/b"), true),
483 (Path::new("one/node_modules/b/b1.js"), true),
484 (Path::new("one/node_modules/b/b2.js"), true),
485 (Path::new("two"), false),
486 (Path::new("two/x.js"), false),
487 (Path::new("two/y.js"), false),
488 ]
489 );
490
491 assert_eq!(
492 buffer.read(cx).file().unwrap().path().as_ref(),
493 Path::new("one/node_modules/b/b1.js")
494 );
495
496 // Only the newly-expanded directories are scanned.
497 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
498 });
499
500 // Open another file in a different subdirectory of the same
501 // gitignored directory.
502 let prev_read_dir_count = fs.read_dir_call_count();
503 let buffer = tree
504 .update(cx, |tree, cx| {
505 tree.as_local_mut()
506 .unwrap()
507 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
508 })
509 .await
510 .unwrap();
511
512 tree.read_with(cx, |tree, cx| {
513 assert_eq!(
514 tree.entries(true)
515 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
516 .collect::<Vec<_>>(),
517 vec![
518 (Path::new(""), false),
519 (Path::new(".gitignore"), false),
520 (Path::new("one"), false),
521 (Path::new("one/node_modules"), true),
522 (Path::new("one/node_modules/a"), true),
523 (Path::new("one/node_modules/a/a1.js"), true),
524 (Path::new("one/node_modules/a/a2.js"), true),
525 (Path::new("one/node_modules/b"), true),
526 (Path::new("one/node_modules/b/b1.js"), true),
527 (Path::new("one/node_modules/b/b2.js"), true),
528 (Path::new("two"), false),
529 (Path::new("two/x.js"), false),
530 (Path::new("two/y.js"), false),
531 ]
532 );
533
534 assert_eq!(
535 buffer.read(cx).file().unwrap().path().as_ref(),
536 Path::new("one/node_modules/a/a2.js")
537 );
538
539 // Only the newly-expanded directory is scanned.
540 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
541 });
542}
543
544#[gpui::test]
545async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
546 let fs = FakeFs::new(cx.background());
547 fs.insert_tree(
548 "/root",
549 json!({
550 ".gitignore": "node_modules\n",
551 "a": {
552 "a.js": "",
553 },
554 "b": {
555 "b.js": "",
556 },
557 "node_modules": {
558 "c": {
559 "c.js": "",
560 },
561 "d": {
562 "d.js": "",
563 "e": {
564 "e1.js": "",
565 "e2.js": "",
566 },
567 "f": {
568 "f1.js": "",
569 "f2.js": "",
570 }
571 },
572 },
573 }),
574 )
575 .await;
576
577 let tree = Worktree::local(
578 build_client(cx),
579 Path::new("/root"),
580 true,
581 fs.clone(),
582 Default::default(),
583 &mut cx.to_async(),
584 )
585 .await
586 .unwrap();
587
588 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
589 .await;
590
591 // Open a file within the gitignored directory, forcing some of its
592 // subdirectories to be read, but not all.
593 let read_dir_count_1 = fs.read_dir_call_count();
594 tree.read_with(cx, |tree, _| {
595 tree.as_local()
596 .unwrap()
597 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
598 })
599 .recv()
600 .await;
601
602 // Those subdirectories are now loaded.
603 tree.read_with(cx, |tree, _| {
604 assert_eq!(
605 tree.entries(true)
606 .map(|e| (e.path.as_ref(), e.is_ignored))
607 .collect::<Vec<_>>(),
608 &[
609 (Path::new(""), false),
610 (Path::new(".gitignore"), false),
611 (Path::new("a"), false),
612 (Path::new("a/a.js"), false),
613 (Path::new("b"), false),
614 (Path::new("b/b.js"), false),
615 (Path::new("node_modules"), true),
616 (Path::new("node_modules/c"), true),
617 (Path::new("node_modules/d"), true),
618 (Path::new("node_modules/d/d.js"), true),
619 (Path::new("node_modules/d/e"), true),
620 (Path::new("node_modules/d/f"), true),
621 ]
622 );
623 });
624 let read_dir_count_2 = fs.read_dir_call_count();
625 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
626
627 // Update the gitignore so that node_modules is no longer ignored,
628 // but a subdirectory is ignored
629 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
630 .await
631 .unwrap();
632 cx.foreground().run_until_parked();
633
634 // All of the directories that are no longer ignored are now loaded.
635 tree.read_with(cx, |tree, _| {
636 assert_eq!(
637 tree.entries(true)
638 .map(|e| (e.path.as_ref(), e.is_ignored))
639 .collect::<Vec<_>>(),
640 &[
641 (Path::new(""), false),
642 (Path::new(".gitignore"), false),
643 (Path::new("a"), false),
644 (Path::new("a/a.js"), false),
645 (Path::new("b"), false),
646 (Path::new("b/b.js"), false),
647 (Path::new("node_modules"), false),
648 (Path::new("node_modules/c"), false),
649 (Path::new("node_modules/c/c.js"), false),
650 (Path::new("node_modules/d"), false),
651 (Path::new("node_modules/d/d.js"), false),
652 // This subdirectory is now ignored
653 (Path::new("node_modules/d/e"), true),
654 (Path::new("node_modules/d/f"), false),
655 (Path::new("node_modules/d/f/f1.js"), false),
656 (Path::new("node_modules/d/f/f2.js"), false),
657 ]
658 );
659 });
660
661 // Each of the newly-loaded directories is scanned only once.
662 let read_dir_count_3 = fs.read_dir_call_count();
663 assert_eq!(read_dir_count_3 - read_dir_count_2, 4);
664}
665
666#[gpui::test]
667async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
668 // .gitignores are handled explicitly by Zed and do not use the git
669 // machinery that the git_tests module checks
670 let parent_dir = temp_tree(json!({
671 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
672 "tree": {
673 ".git": {},
674 ".gitignore": "ignored-dir\n",
675 "tracked-dir": {
676 "tracked-file1": "",
677 "ancestor-ignored-file1": "",
678 },
679 "ignored-dir": {
680 "ignored-file1": ""
681 }
682 }
683 }));
684 let dir = parent_dir.path().join("tree");
685
686 let tree = Worktree::local(
687 build_client(cx),
688 dir.as_path(),
689 true,
690 Arc::new(RealFs),
691 Default::default(),
692 &mut cx.to_async(),
693 )
694 .await
695 .unwrap();
696 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
697 .await;
698
699 tree.read_with(cx, |tree, _| {
700 tree.as_local()
701 .unwrap()
702 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
703 })
704 .recv()
705 .await;
706
707 cx.read(|cx| {
708 let tree = tree.read(cx);
709 assert!(
710 !tree
711 .entry_for_path("tracked-dir/tracked-file1")
712 .unwrap()
713 .is_ignored
714 );
715 assert!(
716 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
717 .unwrap()
718 .is_ignored
719 );
720 assert!(
721 tree.entry_for_path("ignored-dir/ignored-file1")
722 .unwrap()
723 .is_ignored
724 );
725 });
726
727 std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
728 std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
729 std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
730 tree.flush_fs_events(cx).await;
731 cx.read(|cx| {
732 let tree = tree.read(cx);
733 assert!(
734 !tree
735 .entry_for_path("tracked-dir/tracked-file2")
736 .unwrap()
737 .is_ignored
738 );
739 assert!(
740 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
741 .unwrap()
742 .is_ignored
743 );
744 assert!(
745 tree.entry_for_path("ignored-dir/ignored-file2")
746 .unwrap()
747 .is_ignored
748 );
749 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
750 });
751}
752
753#[gpui::test]
754async fn test_write_file(cx: &mut TestAppContext) {
755 let dir = temp_tree(json!({
756 ".git": {},
757 ".gitignore": "ignored-dir\n",
758 "tracked-dir": {},
759 "ignored-dir": {}
760 }));
761
762 let tree = Worktree::local(
763 build_client(cx),
764 dir.path(),
765 true,
766 Arc::new(RealFs),
767 Default::default(),
768 &mut cx.to_async(),
769 )
770 .await
771 .unwrap();
772 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
773 .await;
774 tree.flush_fs_events(cx).await;
775
776 tree.update(cx, |tree, cx| {
777 tree.as_local().unwrap().write_file(
778 Path::new("tracked-dir/file.txt"),
779 "hello".into(),
780 Default::default(),
781 cx,
782 )
783 })
784 .await
785 .unwrap();
786 tree.update(cx, |tree, cx| {
787 tree.as_local().unwrap().write_file(
788 Path::new("ignored-dir/file.txt"),
789 "world".into(),
790 Default::default(),
791 cx,
792 )
793 })
794 .await
795 .unwrap();
796
797 tree.read_with(cx, |tree, _| {
798 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
799 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
800 assert!(!tracked.is_ignored);
801 assert!(ignored.is_ignored);
802 });
803}
804
805#[gpui::test(iterations = 30)]
806async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
807 let fs = FakeFs::new(cx.background());
808 fs.insert_tree(
809 "/root",
810 json!({
811 "b": {},
812 "c": {},
813 "d": {},
814 }),
815 )
816 .await;
817
818 let tree = Worktree::local(
819 build_client(cx),
820 "/root".as_ref(),
821 true,
822 fs,
823 Default::default(),
824 &mut cx.to_async(),
825 )
826 .await
827 .unwrap();
828
829 let snapshot1 = tree.update(cx, |tree, cx| {
830 let tree = tree.as_local_mut().unwrap();
831 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
832 let _ = tree.observe_updates(0, cx, {
833 let snapshot = snapshot.clone();
834 move |update| {
835 snapshot.lock().apply_remote_update(update).unwrap();
836 async { true }
837 }
838 });
839 snapshot
840 });
841
842 let entry = tree
843 .update(cx, |tree, cx| {
844 tree.as_local_mut()
845 .unwrap()
846 .create_entry("a/e".as_ref(), true, cx)
847 })
848 .await
849 .unwrap();
850 assert!(entry.is_dir());
851
852 cx.foreground().run_until_parked();
853 tree.read_with(cx, |tree, _| {
854 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
855 });
856
857 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
858 assert_eq!(
859 snapshot1.lock().entries(true).collect::<Vec<_>>(),
860 snapshot2.entries(true).collect::<Vec<_>>()
861 );
862}
863
864#[gpui::test(iterations = 100)]
865async fn test_random_worktree_operations_during_initial_scan(
866 cx: &mut TestAppContext,
867 mut rng: StdRng,
868) {
869 let operations = env::var("OPERATIONS")
870 .map(|o| o.parse().unwrap())
871 .unwrap_or(5);
872 let initial_entries = env::var("INITIAL_ENTRIES")
873 .map(|o| o.parse().unwrap())
874 .unwrap_or(20);
875
876 let root_dir = Path::new("/test");
877 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
878 fs.as_fake().insert_tree(root_dir, json!({})).await;
879 for _ in 0..initial_entries {
880 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
881 }
882 log::info!("generated initial tree");
883
884 let worktree = Worktree::local(
885 build_client(cx),
886 root_dir,
887 true,
888 fs.clone(),
889 Default::default(),
890 &mut cx.to_async(),
891 )
892 .await
893 .unwrap();
894
895 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
896 let updates = Arc::new(Mutex::new(Vec::new()));
897 worktree.update(cx, |tree, cx| {
898 check_worktree_change_events(tree, cx);
899
900 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
901 let updates = updates.clone();
902 move |update| {
903 updates.lock().push(update);
904 async { true }
905 }
906 });
907 });
908
909 for _ in 0..operations {
910 worktree
911 .update(cx, |worktree, cx| {
912 randomly_mutate_worktree(worktree, &mut rng, cx)
913 })
914 .await
915 .log_err();
916 worktree.read_with(cx, |tree, _| {
917 tree.as_local().unwrap().snapshot().check_invariants()
918 });
919
920 if rng.gen_bool(0.6) {
921 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
922 }
923 }
924
925 worktree
926 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
927 .await;
928
929 cx.foreground().run_until_parked();
930
931 let final_snapshot = worktree.read_with(cx, |tree, _| {
932 let tree = tree.as_local().unwrap();
933 let snapshot = tree.snapshot();
934 snapshot.check_invariants();
935 snapshot
936 });
937
938 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
939 let mut updated_snapshot = snapshot.clone();
940 for update in updates.lock().iter() {
941 if update.scan_id >= updated_snapshot.scan_id() as u64 {
942 updated_snapshot
943 .apply_remote_update(update.clone())
944 .unwrap();
945 }
946 }
947
948 assert_eq!(
949 updated_snapshot.entries(true).collect::<Vec<_>>(),
950 final_snapshot.entries(true).collect::<Vec<_>>(),
951 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
952 );
953 }
954}
955
956#[gpui::test(iterations = 100)]
957async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
958 let operations = env::var("OPERATIONS")
959 .map(|o| o.parse().unwrap())
960 .unwrap_or(40);
961 let initial_entries = env::var("INITIAL_ENTRIES")
962 .map(|o| o.parse().unwrap())
963 .unwrap_or(20);
964
965 let root_dir = Path::new("/test");
966 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
967 fs.as_fake().insert_tree(root_dir, json!({})).await;
968 for _ in 0..initial_entries {
969 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
970 }
971 log::info!("generated initial tree");
972
973 let worktree = Worktree::local(
974 build_client(cx),
975 root_dir,
976 true,
977 fs.clone(),
978 Default::default(),
979 &mut cx.to_async(),
980 )
981 .await
982 .unwrap();
983
984 let updates = Arc::new(Mutex::new(Vec::new()));
985 worktree.update(cx, |tree, cx| {
986 check_worktree_change_events(tree, cx);
987
988 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
989 let updates = updates.clone();
990 move |update| {
991 updates.lock().push(update);
992 async { true }
993 }
994 });
995 });
996
997 worktree
998 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
999 .await;
1000
1001 fs.as_fake().pause_events();
1002 let mut snapshots = Vec::new();
1003 let mut mutations_len = operations;
1004 while mutations_len > 1 {
1005 if rng.gen_bool(0.2) {
1006 worktree
1007 .update(cx, |worktree, cx| {
1008 randomly_mutate_worktree(worktree, &mut rng, cx)
1009 })
1010 .await
1011 .log_err();
1012 } else {
1013 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1014 }
1015
1016 let buffered_event_count = fs.as_fake().buffered_event_count();
1017 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1018 let len = rng.gen_range(0..=buffered_event_count);
1019 log::info!("flushing {} events", len);
1020 fs.as_fake().flush_events(len);
1021 } else {
1022 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1023 mutations_len -= 1;
1024 }
1025
1026 cx.foreground().run_until_parked();
1027 if rng.gen_bool(0.2) {
1028 log::info!("storing snapshot {}", snapshots.len());
1029 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1030 snapshots.push(snapshot);
1031 }
1032 }
1033
1034 log::info!("quiescing");
1035 fs.as_fake().flush_events(usize::MAX);
1036 cx.foreground().run_until_parked();
1037
1038 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1039 snapshot.check_invariants();
1040 let expanded_paths = snapshot
1041 .expanded_entries()
1042 .map(|e| e.path.clone())
1043 .collect::<Vec<_>>();
1044
1045 {
1046 let new_worktree = Worktree::local(
1047 build_client(cx),
1048 root_dir,
1049 true,
1050 fs.clone(),
1051 Default::default(),
1052 &mut cx.to_async(),
1053 )
1054 .await
1055 .unwrap();
1056 new_worktree
1057 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1058 .await;
1059 new_worktree
1060 .update(cx, |tree, _| {
1061 tree.as_local_mut()
1062 .unwrap()
1063 .refresh_entries_for_paths(expanded_paths)
1064 })
1065 .recv()
1066 .await;
1067 let new_snapshot =
1068 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1069 assert_eq!(
1070 snapshot.entries_without_ids(true),
1071 new_snapshot.entries_without_ids(true)
1072 );
1073 }
1074
1075 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1076 for update in updates.lock().iter() {
1077 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1078 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1079 }
1080 }
1081
1082 assert_eq!(
1083 prev_snapshot
1084 .entries(true)
1085 .map(ignore_pending_dir)
1086 .collect::<Vec<_>>(),
1087 snapshot
1088 .entries(true)
1089 .map(ignore_pending_dir)
1090 .collect::<Vec<_>>(),
1091 "wrong updates after snapshot {i}: {updates:#?}",
1092 );
1093 }
1094
1095 fn ignore_pending_dir(entry: &Entry) -> Entry {
1096 let mut entry = entry.clone();
1097 if entry.kind == EntryKind::PendingDir {
1098 entry.kind = EntryKind::Dir
1099 }
1100 entry
1101 }
1102}
1103
1104// The worktree's `UpdatedEntries` event can be used to follow along with
1105// all changes to the worktree's snapshot.
1106fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1107 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1108 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1109 if let Event::UpdatedEntries(changes) = event {
1110 for (path, _, change_type) in changes.iter() {
1111 let entry = tree.entry_for_path(&path).cloned();
1112 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1113 Ok(ix) | Err(ix) => ix,
1114 };
1115 match change_type {
1116 PathChange::Loaded => entries.insert(ix, entry.unwrap()),
1117 PathChange::Added => entries.insert(ix, entry.unwrap()),
1118 PathChange::Removed => drop(entries.remove(ix)),
1119 PathChange::Updated => {
1120 let entry = entry.unwrap();
1121 let existing_entry = entries.get_mut(ix).unwrap();
1122 assert_eq!(existing_entry.path, entry.path);
1123 *existing_entry = entry;
1124 }
1125 PathChange::AddedOrUpdated => {
1126 let entry = entry.unwrap();
1127 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1128 *entries.get_mut(ix).unwrap() = entry;
1129 } else {
1130 entries.insert(ix, entry);
1131 }
1132 }
1133 }
1134 }
1135
1136 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1137 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1138 }
1139 })
1140 .detach();
1141}
1142
1143fn randomly_mutate_worktree(
1144 worktree: &mut Worktree,
1145 rng: &mut impl Rng,
1146 cx: &mut ModelContext<Worktree>,
1147) -> Task<Result<()>> {
1148 log::info!("mutating worktree");
1149 let worktree = worktree.as_local_mut().unwrap();
1150 let snapshot = worktree.snapshot();
1151 let entry = snapshot.entries(false).choose(rng).unwrap();
1152
1153 match rng.gen_range(0_u32..100) {
1154 0..=33 if entry.path.as_ref() != Path::new("") => {
1155 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1156 worktree.delete_entry(entry.id, cx).unwrap()
1157 }
1158 ..=66 if entry.path.as_ref() != Path::new("") => {
1159 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1160 let new_parent_path = if other_entry.is_dir() {
1161 other_entry.path.clone()
1162 } else {
1163 other_entry.path.parent().unwrap().into()
1164 };
1165 let mut new_path = new_parent_path.join(random_filename(rng));
1166 if new_path.starts_with(&entry.path) {
1167 new_path = random_filename(rng).into();
1168 }
1169
1170 log::info!(
1171 "renaming entry {:?} ({}) to {:?}",
1172 entry.path,
1173 entry.id.0,
1174 new_path
1175 );
1176 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1177 cx.foreground().spawn(async move {
1178 task.await?;
1179 Ok(())
1180 })
1181 }
1182 _ => {
1183 let task = if entry.is_dir() {
1184 let child_path = entry.path.join(random_filename(rng));
1185 let is_dir = rng.gen_bool(0.3);
1186 log::info!(
1187 "creating {} at {:?}",
1188 if is_dir { "dir" } else { "file" },
1189 child_path,
1190 );
1191 worktree.create_entry(child_path, is_dir, cx)
1192 } else {
1193 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1194 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1195 };
1196 cx.foreground().spawn(async move {
1197 task.await?;
1198 Ok(())
1199 })
1200 }
1201 }
1202}
1203
1204async fn randomly_mutate_fs(
1205 fs: &Arc<dyn Fs>,
1206 root_path: &Path,
1207 insertion_probability: f64,
1208 rng: &mut impl Rng,
1209) {
1210 log::info!("mutating fs");
1211 let mut files = Vec::new();
1212 let mut dirs = Vec::new();
1213 for path in fs.as_fake().paths(false) {
1214 if path.starts_with(root_path) {
1215 if fs.is_file(&path).await {
1216 files.push(path);
1217 } else {
1218 dirs.push(path);
1219 }
1220 }
1221 }
1222
1223 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1224 let path = dirs.choose(rng).unwrap();
1225 let new_path = path.join(random_filename(rng));
1226
1227 if rng.gen() {
1228 log::info!(
1229 "creating dir {:?}",
1230 new_path.strip_prefix(root_path).unwrap()
1231 );
1232 fs.create_dir(&new_path).await.unwrap();
1233 } else {
1234 log::info!(
1235 "creating file {:?}",
1236 new_path.strip_prefix(root_path).unwrap()
1237 );
1238 fs.create_file(&new_path, Default::default()).await.unwrap();
1239 }
1240 } else if rng.gen_bool(0.05) {
1241 let ignore_dir_path = dirs.choose(rng).unwrap();
1242 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1243
1244 let subdirs = dirs
1245 .iter()
1246 .filter(|d| d.starts_with(&ignore_dir_path))
1247 .cloned()
1248 .collect::<Vec<_>>();
1249 let subfiles = files
1250 .iter()
1251 .filter(|d| d.starts_with(&ignore_dir_path))
1252 .cloned()
1253 .collect::<Vec<_>>();
1254 let files_to_ignore = {
1255 let len = rng.gen_range(0..=subfiles.len());
1256 subfiles.choose_multiple(rng, len)
1257 };
1258 let dirs_to_ignore = {
1259 let len = rng.gen_range(0..subdirs.len());
1260 subdirs.choose_multiple(rng, len)
1261 };
1262
1263 let mut ignore_contents = String::new();
1264 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1265 writeln!(
1266 ignore_contents,
1267 "{}",
1268 path_to_ignore
1269 .strip_prefix(&ignore_dir_path)
1270 .unwrap()
1271 .to_str()
1272 .unwrap()
1273 )
1274 .unwrap();
1275 }
1276 log::info!(
1277 "creating gitignore {:?} with contents:\n{}",
1278 ignore_path.strip_prefix(&root_path).unwrap(),
1279 ignore_contents
1280 );
1281 fs.save(
1282 &ignore_path,
1283 &ignore_contents.as_str().into(),
1284 Default::default(),
1285 )
1286 .await
1287 .unwrap();
1288 } else {
1289 let old_path = {
1290 let file_path = files.choose(rng);
1291 let dir_path = dirs[1..].choose(rng);
1292 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1293 };
1294
1295 let is_rename = rng.gen();
1296 if is_rename {
1297 let new_path_parent = dirs
1298 .iter()
1299 .filter(|d| !d.starts_with(old_path))
1300 .choose(rng)
1301 .unwrap();
1302
1303 let overwrite_existing_dir =
1304 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1305 let new_path = if overwrite_existing_dir {
1306 fs.remove_dir(
1307 &new_path_parent,
1308 RemoveOptions {
1309 recursive: true,
1310 ignore_if_not_exists: true,
1311 },
1312 )
1313 .await
1314 .unwrap();
1315 new_path_parent.to_path_buf()
1316 } else {
1317 new_path_parent.join(random_filename(rng))
1318 };
1319
1320 log::info!(
1321 "renaming {:?} to {}{:?}",
1322 old_path.strip_prefix(&root_path).unwrap(),
1323 if overwrite_existing_dir {
1324 "overwrite "
1325 } else {
1326 ""
1327 },
1328 new_path.strip_prefix(&root_path).unwrap()
1329 );
1330 fs.rename(
1331 &old_path,
1332 &new_path,
1333 fs::RenameOptions {
1334 overwrite: true,
1335 ignore_if_exists: true,
1336 },
1337 )
1338 .await
1339 .unwrap();
1340 } else if fs.is_file(&old_path).await {
1341 log::info!(
1342 "deleting file {:?}",
1343 old_path.strip_prefix(&root_path).unwrap()
1344 );
1345 fs.remove_file(old_path, Default::default()).await.unwrap();
1346 } else {
1347 log::info!(
1348 "deleting dir {:?}",
1349 old_path.strip_prefix(&root_path).unwrap()
1350 );
1351 fs.remove_dir(
1352 &old_path,
1353 RemoveOptions {
1354 recursive: true,
1355 ignore_if_not_exists: true,
1356 },
1357 )
1358 .await
1359 .unwrap();
1360 }
1361 }
1362}
1363
1364fn random_filename(rng: &mut impl Rng) -> String {
1365 (0..6)
1366 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1367 .map(char::from)
1368 .collect()
1369}
1370
1371#[gpui::test]
1372async fn test_rename_work_directory(cx: &mut TestAppContext) {
1373 let root = temp_tree(json!({
1374 "projects": {
1375 "project1": {
1376 "a": "",
1377 "b": "",
1378 }
1379 },
1380
1381 }));
1382 let root_path = root.path();
1383
1384 let tree = Worktree::local(
1385 build_client(cx),
1386 root_path,
1387 true,
1388 Arc::new(RealFs),
1389 Default::default(),
1390 &mut cx.to_async(),
1391 )
1392 .await
1393 .unwrap();
1394
1395 let repo = git_init(&root_path.join("projects/project1"));
1396 git_add("a", &repo);
1397 git_commit("init", &repo);
1398 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1399
1400 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1401 .await;
1402
1403 tree.flush_fs_events(cx).await;
1404
1405 cx.read(|cx| {
1406 let tree = tree.read(cx);
1407 let (work_dir, _) = tree.repositories().next().unwrap();
1408 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1409 assert_eq!(
1410 tree.status_for_file(Path::new("projects/project1/a")),
1411 Some(GitFileStatus::Modified)
1412 );
1413 assert_eq!(
1414 tree.status_for_file(Path::new("projects/project1/b")),
1415 Some(GitFileStatus::Added)
1416 );
1417 });
1418
1419 std::fs::rename(
1420 root_path.join("projects/project1"),
1421 root_path.join("projects/project2"),
1422 )
1423 .ok();
1424 tree.flush_fs_events(cx).await;
1425
1426 cx.read(|cx| {
1427 let tree = tree.read(cx);
1428 let (work_dir, _) = tree.repositories().next().unwrap();
1429 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1430 assert_eq!(
1431 tree.status_for_file(Path::new("projects/project2/a")),
1432 Some(GitFileStatus::Modified)
1433 );
1434 assert_eq!(
1435 tree.status_for_file(Path::new("projects/project2/b")),
1436 Some(GitFileStatus::Added)
1437 );
1438 });
1439}
1440
1441#[gpui::test]
1442async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1443 let root = temp_tree(json!({
1444 "c.txt": "",
1445 "dir1": {
1446 ".git": {},
1447 "deps": {
1448 "dep1": {
1449 ".git": {},
1450 "src": {
1451 "a.txt": ""
1452 }
1453 }
1454 },
1455 "src": {
1456 "b.txt": ""
1457 }
1458 },
1459 }));
1460
1461 let tree = Worktree::local(
1462 build_client(cx),
1463 root.path(),
1464 true,
1465 Arc::new(RealFs),
1466 Default::default(),
1467 &mut cx.to_async(),
1468 )
1469 .await
1470 .unwrap();
1471
1472 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1473 .await;
1474 tree.flush_fs_events(cx).await;
1475
1476 tree.read_with(cx, |tree, _cx| {
1477 let tree = tree.as_local().unwrap();
1478
1479 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1480
1481 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1482 assert_eq!(
1483 entry
1484 .work_directory(tree)
1485 .map(|directory| directory.as_ref().to_owned()),
1486 Some(Path::new("dir1").to_owned())
1487 );
1488
1489 let entry = tree
1490 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1491 .unwrap();
1492 assert_eq!(
1493 entry
1494 .work_directory(tree)
1495 .map(|directory| directory.as_ref().to_owned()),
1496 Some(Path::new("dir1/deps/dep1").to_owned())
1497 );
1498
1499 let entries = tree.files(false, 0);
1500
1501 let paths_with_repos = tree
1502 .entries_with_repositories(entries)
1503 .map(|(entry, repo)| {
1504 (
1505 entry.path.as_ref(),
1506 repo.and_then(|repo| {
1507 repo.work_directory(&tree)
1508 .map(|work_directory| work_directory.0.to_path_buf())
1509 }),
1510 )
1511 })
1512 .collect::<Vec<_>>();
1513
1514 assert_eq!(
1515 paths_with_repos,
1516 &[
1517 (Path::new("c.txt"), None),
1518 (
1519 Path::new("dir1/deps/dep1/src/a.txt"),
1520 Some(Path::new("dir1/deps/dep1").into())
1521 ),
1522 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1523 ]
1524 );
1525 });
1526
1527 let repo_update_events = Arc::new(Mutex::new(vec![]));
1528 tree.update(cx, |_, cx| {
1529 let repo_update_events = repo_update_events.clone();
1530 cx.subscribe(&tree, move |_, _, event, _| {
1531 if let Event::UpdatedGitRepositories(update) = event {
1532 repo_update_events.lock().push(update.clone());
1533 }
1534 })
1535 .detach();
1536 });
1537
1538 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1539 tree.flush_fs_events(cx).await;
1540
1541 assert_eq!(
1542 repo_update_events.lock()[0]
1543 .iter()
1544 .map(|e| e.0.clone())
1545 .collect::<Vec<Arc<Path>>>(),
1546 vec![Path::new("dir1").into()]
1547 );
1548
1549 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1550 tree.flush_fs_events(cx).await;
1551
1552 tree.read_with(cx, |tree, _cx| {
1553 let tree = tree.as_local().unwrap();
1554
1555 assert!(tree
1556 .repository_for_path("dir1/src/b.txt".as_ref())
1557 .is_none());
1558 });
1559}
1560
1561#[gpui::test]
1562async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1563 const IGNORE_RULE: &'static str = "**/target";
1564
1565 let root = temp_tree(json!({
1566 "project": {
1567 "a.txt": "a",
1568 "b.txt": "bb",
1569 "c": {
1570 "d": {
1571 "e.txt": "eee"
1572 }
1573 },
1574 "f.txt": "ffff",
1575 "target": {
1576 "build_file": "???"
1577 },
1578 ".gitignore": IGNORE_RULE
1579 },
1580
1581 }));
1582
1583 let tree = Worktree::local(
1584 build_client(cx),
1585 root.path(),
1586 true,
1587 Arc::new(RealFs),
1588 Default::default(),
1589 &mut cx.to_async(),
1590 )
1591 .await
1592 .unwrap();
1593
1594 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1595 .await;
1596
1597 const A_TXT: &'static str = "a.txt";
1598 const B_TXT: &'static str = "b.txt";
1599 const E_TXT: &'static str = "c/d/e.txt";
1600 const F_TXT: &'static str = "f.txt";
1601 const DOTGITIGNORE: &'static str = ".gitignore";
1602 const BUILD_FILE: &'static str = "target/build_file";
1603 let project_path: &Path = &Path::new("project");
1604
1605 let work_dir = root.path().join("project");
1606 let mut repo = git_init(work_dir.as_path());
1607 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1608 git_add(Path::new(A_TXT), &repo);
1609 git_add(Path::new(E_TXT), &repo);
1610 git_add(Path::new(DOTGITIGNORE), &repo);
1611 git_commit("Initial commit", &repo);
1612
1613 tree.flush_fs_events(cx).await;
1614 deterministic.run_until_parked();
1615
1616 // Check that the right git state is observed on startup
1617 tree.read_with(cx, |tree, _cx| {
1618 let snapshot = tree.snapshot();
1619 assert_eq!(snapshot.repositories().count(), 1);
1620 let (dir, _) = snapshot.repositories().next().unwrap();
1621 assert_eq!(dir.as_ref(), Path::new("project"));
1622
1623 assert_eq!(
1624 snapshot.status_for_file(project_path.join(B_TXT)),
1625 Some(GitFileStatus::Added)
1626 );
1627 assert_eq!(
1628 snapshot.status_for_file(project_path.join(F_TXT)),
1629 Some(GitFileStatus::Added)
1630 );
1631 });
1632
1633 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1634
1635 tree.flush_fs_events(cx).await;
1636 deterministic.run_until_parked();
1637
1638 tree.read_with(cx, |tree, _cx| {
1639 let snapshot = tree.snapshot();
1640
1641 assert_eq!(
1642 snapshot.status_for_file(project_path.join(A_TXT)),
1643 Some(GitFileStatus::Modified)
1644 );
1645 });
1646
1647 git_add(Path::new(A_TXT), &repo);
1648 git_add(Path::new(B_TXT), &repo);
1649 git_commit("Committing modified and added", &repo);
1650 tree.flush_fs_events(cx).await;
1651 deterministic.run_until_parked();
1652
1653 // Check that repo only changes are tracked
1654 tree.read_with(cx, |tree, _cx| {
1655 let snapshot = tree.snapshot();
1656
1657 assert_eq!(
1658 snapshot.status_for_file(project_path.join(F_TXT)),
1659 Some(GitFileStatus::Added)
1660 );
1661
1662 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1663 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1664 });
1665
1666 git_reset(0, &repo);
1667 git_remove_index(Path::new(B_TXT), &repo);
1668 git_stash(&mut repo);
1669 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1670 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1671 tree.flush_fs_events(cx).await;
1672 deterministic.run_until_parked();
1673
1674 // Check that more complex repo changes are tracked
1675 tree.read_with(cx, |tree, _cx| {
1676 let snapshot = tree.snapshot();
1677
1678 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1679 assert_eq!(
1680 snapshot.status_for_file(project_path.join(B_TXT)),
1681 Some(GitFileStatus::Added)
1682 );
1683 assert_eq!(
1684 snapshot.status_for_file(project_path.join(E_TXT)),
1685 Some(GitFileStatus::Modified)
1686 );
1687 });
1688
1689 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1690 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1691 std::fs::write(
1692 work_dir.join(DOTGITIGNORE),
1693 [IGNORE_RULE, "f.txt"].join("\n"),
1694 )
1695 .unwrap();
1696
1697 git_add(Path::new(DOTGITIGNORE), &repo);
1698 git_commit("Committing modified git ignore", &repo);
1699
1700 tree.flush_fs_events(cx).await;
1701 deterministic.run_until_parked();
1702
1703 let mut renamed_dir_name = "first_directory/second_directory";
1704 const RENAMED_FILE: &'static str = "rf.txt";
1705
1706 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
1707 std::fs::write(
1708 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
1709 "new-contents",
1710 )
1711 .unwrap();
1712
1713 tree.flush_fs_events(cx).await;
1714 deterministic.run_until_parked();
1715
1716 tree.read_with(cx, |tree, _cx| {
1717 let snapshot = tree.snapshot();
1718 assert_eq!(
1719 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
1720 Some(GitFileStatus::Added)
1721 );
1722 });
1723
1724 renamed_dir_name = "new_first_directory/second_directory";
1725
1726 std::fs::rename(
1727 work_dir.join("first_directory"),
1728 work_dir.join("new_first_directory"),
1729 )
1730 .unwrap();
1731
1732 tree.flush_fs_events(cx).await;
1733 deterministic.run_until_parked();
1734
1735 tree.read_with(cx, |tree, _cx| {
1736 let snapshot = tree.snapshot();
1737
1738 assert_eq!(
1739 snapshot.status_for_file(
1740 project_path
1741 .join(Path::new(renamed_dir_name))
1742 .join(RENAMED_FILE)
1743 ),
1744 Some(GitFileStatus::Added)
1745 );
1746 });
1747}
1748
1749#[gpui::test]
1750async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
1751 let fs = FakeFs::new(cx.background());
1752 fs.insert_tree(
1753 "/root",
1754 json!({
1755 ".git": {},
1756 "a": {
1757 "b": {
1758 "c1.txt": "",
1759 "c2.txt": "",
1760 },
1761 "d": {
1762 "e1.txt": "",
1763 "e2.txt": "",
1764 "e3.txt": "",
1765 }
1766 },
1767 "f": {
1768 "no-status.txt": ""
1769 },
1770 "g": {
1771 "h1.txt": "",
1772 "h2.txt": ""
1773 },
1774
1775 }),
1776 )
1777 .await;
1778
1779 fs.set_status_for_repo_via_git_operation(
1780 &Path::new("/root/.git"),
1781 &[
1782 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
1783 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
1784 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
1785 ],
1786 );
1787
1788 let tree = Worktree::local(
1789 build_client(cx),
1790 Path::new("/root"),
1791 true,
1792 fs.clone(),
1793 Default::default(),
1794 &mut cx.to_async(),
1795 )
1796 .await
1797 .unwrap();
1798
1799 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1800 .await;
1801
1802 cx.foreground().run_until_parked();
1803 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
1804
1805 check_propagated_statuses(
1806 &snapshot,
1807 &[
1808 (Path::new(""), Some(GitFileStatus::Conflict)),
1809 (Path::new("a"), Some(GitFileStatus::Modified)),
1810 (Path::new("a/b"), Some(GitFileStatus::Added)),
1811 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1812 (Path::new("a/b/c2.txt"), None),
1813 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1814 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1815 (Path::new("f"), None),
1816 (Path::new("f/no-status.txt"), None),
1817 (Path::new("g"), Some(GitFileStatus::Conflict)),
1818 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
1819 ],
1820 );
1821
1822 check_propagated_statuses(
1823 &snapshot,
1824 &[
1825 (Path::new("a/b"), Some(GitFileStatus::Added)),
1826 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1827 (Path::new("a/b/c2.txt"), None),
1828 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1829 (Path::new("a/d/e1.txt"), None),
1830 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1831 (Path::new("f"), None),
1832 (Path::new("f/no-status.txt"), None),
1833 (Path::new("g"), Some(GitFileStatus::Conflict)),
1834 ],
1835 );
1836
1837 check_propagated_statuses(
1838 &snapshot,
1839 &[
1840 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1841 (Path::new("a/b/c2.txt"), None),
1842 (Path::new("a/d/e1.txt"), None),
1843 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1844 (Path::new("f/no-status.txt"), None),
1845 ],
1846 );
1847
1848 #[track_caller]
1849 fn check_propagated_statuses(
1850 snapshot: &Snapshot,
1851 expected_statuses: &[(&Path, Option<GitFileStatus>)],
1852 ) {
1853 let mut entries = expected_statuses
1854 .iter()
1855 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
1856 .collect::<Vec<_>>();
1857 snapshot.propagate_git_statuses(&mut entries);
1858 assert_eq!(
1859 entries
1860 .iter()
1861 .map(|e| (e.path.as_ref(), e.git_status))
1862 .collect::<Vec<_>>(),
1863 expected_statuses
1864 );
1865 }
1866}
1867
1868fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
1869 let http_client = FakeHttpClient::with_404_response();
1870 cx.read(|cx| Client::new(http_client, cx))
1871}
1872
1873#[track_caller]
1874fn git_init(path: &Path) -> git2::Repository {
1875 git2::Repository::init(path).expect("Failed to initialize git repository")
1876}
1877
1878#[track_caller]
1879fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
1880 let path = path.as_ref();
1881 let mut index = repo.index().expect("Failed to get index");
1882 index.add_path(path).expect("Failed to add a.txt");
1883 index.write().expect("Failed to write index");
1884}
1885
1886#[track_caller]
1887fn git_remove_index(path: &Path, repo: &git2::Repository) {
1888 let mut index = repo.index().expect("Failed to get index");
1889 index.remove_path(path).expect("Failed to add a.txt");
1890 index.write().expect("Failed to write index");
1891}
1892
1893#[track_caller]
1894fn git_commit(msg: &'static str, repo: &git2::Repository) {
1895 use git2::Signature;
1896
1897 let signature = Signature::now("test", "test@zed.dev").unwrap();
1898 let oid = repo.index().unwrap().write_tree().unwrap();
1899 let tree = repo.find_tree(oid).unwrap();
1900 if let Some(head) = repo.head().ok() {
1901 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
1902
1903 let parent_commit = parent_obj.as_commit().unwrap();
1904
1905 repo.commit(
1906 Some("HEAD"),
1907 &signature,
1908 &signature,
1909 msg,
1910 &tree,
1911 &[parent_commit],
1912 )
1913 .expect("Failed to commit with parent");
1914 } else {
1915 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
1916 .expect("Failed to commit");
1917 }
1918}
1919
1920#[track_caller]
1921fn git_stash(repo: &mut git2::Repository) {
1922 use git2::Signature;
1923
1924 let signature = Signature::now("test", "test@zed.dev").unwrap();
1925 repo.stash_save(&signature, "N/A", None)
1926 .expect("Failed to stash");
1927}
1928
1929#[track_caller]
1930fn git_reset(offset: usize, repo: &git2::Repository) {
1931 let head = repo.head().expect("Couldn't get repo head");
1932 let object = head.peel(git2::ObjectType::Commit).unwrap();
1933 let commit = object.as_commit().unwrap();
1934 let new_head = commit
1935 .parents()
1936 .inspect(|parnet| {
1937 parnet.message();
1938 })
1939 .skip(offset)
1940 .next()
1941 .expect("Not enough history");
1942 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
1943 .expect("Could not reset");
1944}
1945
1946#[allow(dead_code)]
1947#[track_caller]
1948fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
1949 repo.statuses(None)
1950 .unwrap()
1951 .iter()
1952 .map(|status| (status.path().unwrap().to_string(), status.status()))
1953 .collect()
1954}