1use crate::{
2 worktree::{Event, Snapshot, WorktreeHandle},
3 Entry, EntryKind, PathChange, Worktree,
4};
5use anyhow::Result;
6use client::Client;
7use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
8use git::GITIGNORE;
9use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
10use parking_lot::Mutex;
11use postage::stream::Stream;
12use pretty_assertions::assert_eq;
13use rand::prelude::*;
14use serde_json::json;
15use std::{
16 env,
17 fmt::Write,
18 mem,
19 path::{Path, PathBuf},
20 sync::Arc,
21};
22use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
23
24#[gpui::test]
25async fn test_traversal(cx: &mut TestAppContext) {
26 let fs = FakeFs::new(cx.background());
27 fs.insert_tree(
28 "/root",
29 json!({
30 ".gitignore": "a/b\n",
31 "a": {
32 "b": "",
33 "c": "",
34 }
35 }),
36 )
37 .await;
38
39 let tree = Worktree::local(
40 build_client(cx),
41 Path::new("/root"),
42 true,
43 fs,
44 Default::default(),
45 &mut cx.to_async(),
46 )
47 .await
48 .unwrap();
49 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
50 .await;
51
52 tree.read_with(cx, |tree, _| {
53 assert_eq!(
54 tree.entries(false)
55 .map(|entry| entry.path.as_ref())
56 .collect::<Vec<_>>(),
57 vec![
58 Path::new(""),
59 Path::new(".gitignore"),
60 Path::new("a"),
61 Path::new("a/c"),
62 ]
63 );
64 assert_eq!(
65 tree.entries(true)
66 .map(|entry| entry.path.as_ref())
67 .collect::<Vec<_>>(),
68 vec![
69 Path::new(""),
70 Path::new(".gitignore"),
71 Path::new("a"),
72 Path::new("a/b"),
73 Path::new("a/c"),
74 ]
75 );
76 })
77}
78
79#[gpui::test]
80async fn test_descendent_entries(cx: &mut TestAppContext) {
81 let fs = FakeFs::new(cx.background());
82 fs.insert_tree(
83 "/root",
84 json!({
85 "a": "",
86 "b": {
87 "c": {
88 "d": ""
89 },
90 "e": {}
91 },
92 "f": "",
93 "g": {
94 "h": {}
95 },
96 "i": {
97 "j": {
98 "k": ""
99 },
100 "l": {
101
102 }
103 },
104 ".gitignore": "i/j\n",
105 }),
106 )
107 .await;
108
109 let tree = Worktree::local(
110 build_client(cx),
111 Path::new("/root"),
112 true,
113 fs,
114 Default::default(),
115 &mut cx.to_async(),
116 )
117 .await
118 .unwrap();
119 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
120 .await;
121
122 tree.read_with(cx, |tree, _| {
123 assert_eq!(
124 tree.descendent_entries(false, false, Path::new("b"))
125 .map(|entry| entry.path.as_ref())
126 .collect::<Vec<_>>(),
127 vec![Path::new("b/c/d"),]
128 );
129 assert_eq!(
130 tree.descendent_entries(true, false, Path::new("b"))
131 .map(|entry| entry.path.as_ref())
132 .collect::<Vec<_>>(),
133 vec![
134 Path::new("b"),
135 Path::new("b/c"),
136 Path::new("b/c/d"),
137 Path::new("b/e"),
138 ]
139 );
140
141 assert_eq!(
142 tree.descendent_entries(false, false, Path::new("g"))
143 .map(|entry| entry.path.as_ref())
144 .collect::<Vec<_>>(),
145 Vec::<PathBuf>::new()
146 );
147 assert_eq!(
148 tree.descendent_entries(true, false, Path::new("g"))
149 .map(|entry| entry.path.as_ref())
150 .collect::<Vec<_>>(),
151 vec![Path::new("g"), Path::new("g/h"),]
152 );
153 });
154
155 // Expand gitignored directory.
156 tree.read_with(cx, |tree, _| {
157 tree.as_local()
158 .unwrap()
159 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
160 })
161 .recv()
162 .await;
163
164 tree.read_with(cx, |tree, _| {
165 assert_eq!(
166 tree.descendent_entries(false, false, Path::new("i"))
167 .map(|entry| entry.path.as_ref())
168 .collect::<Vec<_>>(),
169 Vec::<PathBuf>::new()
170 );
171 assert_eq!(
172 tree.descendent_entries(false, true, Path::new("i"))
173 .map(|entry| entry.path.as_ref())
174 .collect::<Vec<_>>(),
175 vec![Path::new("i/j/k")]
176 );
177 assert_eq!(
178 tree.descendent_entries(true, false, Path::new("i"))
179 .map(|entry| entry.path.as_ref())
180 .collect::<Vec<_>>(),
181 vec![Path::new("i"), Path::new("i/l"),]
182 );
183 })
184}
185
186#[gpui::test(iterations = 10)]
187async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
188 let fs = FakeFs::new(cx.background());
189 fs.insert_tree(
190 "/root",
191 json!({
192 "lib": {
193 "a": {
194 "a.txt": ""
195 },
196 "b": {
197 "b.txt": ""
198 }
199 }
200 }),
201 )
202 .await;
203 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
204 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
205
206 let tree = Worktree::local(
207 build_client(cx),
208 Path::new("/root"),
209 true,
210 fs.clone(),
211 Default::default(),
212 &mut cx.to_async(),
213 )
214 .await
215 .unwrap();
216
217 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
218 .await;
219
220 tree.read_with(cx, |tree, _| {
221 assert_eq!(
222 tree.entries(false)
223 .map(|entry| entry.path.as_ref())
224 .collect::<Vec<_>>(),
225 vec![
226 Path::new(""),
227 Path::new("lib"),
228 Path::new("lib/a"),
229 Path::new("lib/a/a.txt"),
230 Path::new("lib/a/lib"),
231 Path::new("lib/b"),
232 Path::new("lib/b/b.txt"),
233 Path::new("lib/b/lib"),
234 ]
235 );
236 });
237
238 fs.rename(
239 Path::new("/root/lib/a/lib"),
240 Path::new("/root/lib/a/lib-2"),
241 Default::default(),
242 )
243 .await
244 .unwrap();
245 executor.run_until_parked();
246 tree.read_with(cx, |tree, _| {
247 assert_eq!(
248 tree.entries(false)
249 .map(|entry| entry.path.as_ref())
250 .collect::<Vec<_>>(),
251 vec![
252 Path::new(""),
253 Path::new("lib"),
254 Path::new("lib/a"),
255 Path::new("lib/a/a.txt"),
256 Path::new("lib/a/lib-2"),
257 Path::new("lib/b"),
258 Path::new("lib/b/b.txt"),
259 Path::new("lib/b/lib"),
260 ]
261 );
262 });
263}
264
265#[gpui::test]
266async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
267 let fs = FakeFs::new(cx.background());
268 fs.insert_tree(
269 "/root",
270 json!({
271 "dir1": {
272 "deps": {
273 // symlinks here
274 },
275 "src": {
276 "a.rs": "",
277 "b.rs": "",
278 },
279 },
280 "dir2": {
281 "src": {
282 "c.rs": "",
283 "d.rs": "",
284 }
285 },
286 "dir3": {
287 "deps": {},
288 "src": {
289 "e.rs": "",
290 "f.rs": "",
291 },
292 }
293 }),
294 )
295 .await;
296
297 // These symlinks point to directories outside of the worktree's root, dir1.
298 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
299 .await;
300 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
301 .await;
302
303 let tree = Worktree::local(
304 build_client(cx),
305 Path::new("/root/dir1"),
306 true,
307 fs.clone(),
308 Default::default(),
309 &mut cx.to_async(),
310 )
311 .await
312 .unwrap();
313
314 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
315 .await;
316
317 let tree_updates = Arc::new(Mutex::new(Vec::new()));
318 tree.update(cx, |_, cx| {
319 let tree_updates = tree_updates.clone();
320 cx.subscribe(&tree, move |_, _, event, _| {
321 if let Event::UpdatedEntries(update) = event {
322 tree_updates.lock().extend(
323 update
324 .iter()
325 .map(|(path, _, change)| (path.clone(), *change)),
326 );
327 }
328 })
329 .detach();
330 });
331
332 // The symlinked directories are not scanned by default.
333 tree.read_with(cx, |tree, _| {
334 assert_eq!(
335 tree.entries(true)
336 .map(|entry| (entry.path.as_ref(), entry.is_external))
337 .collect::<Vec<_>>(),
338 vec![
339 (Path::new(""), false),
340 (Path::new("deps"), false),
341 (Path::new("deps/dep-dir2"), true),
342 (Path::new("deps/dep-dir3"), true),
343 (Path::new("src"), false),
344 (Path::new("src/a.rs"), false),
345 (Path::new("src/b.rs"), false),
346 ]
347 );
348
349 assert_eq!(
350 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
351 EntryKind::UnloadedDir
352 );
353 });
354
355 // Expand one of the symlinked directories.
356 tree.read_with(cx, |tree, _| {
357 tree.as_local()
358 .unwrap()
359 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
360 })
361 .recv()
362 .await;
363
364 // The expanded directory's contents are loaded. Subdirectories are
365 // not scanned yet.
366 tree.read_with(cx, |tree, _| {
367 assert_eq!(
368 tree.entries(true)
369 .map(|entry| (entry.path.as_ref(), entry.is_external))
370 .collect::<Vec<_>>(),
371 vec![
372 (Path::new(""), false),
373 (Path::new("deps"), false),
374 (Path::new("deps/dep-dir2"), true),
375 (Path::new("deps/dep-dir3"), true),
376 (Path::new("deps/dep-dir3/deps"), true),
377 (Path::new("deps/dep-dir3/src"), true),
378 (Path::new("src"), false),
379 (Path::new("src/a.rs"), false),
380 (Path::new("src/b.rs"), false),
381 ]
382 );
383 });
384 assert_eq!(
385 mem::take(&mut *tree_updates.lock()),
386 &[
387 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
388 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
389 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
390 ]
391 );
392
393 // Expand a subdirectory of one of the symlinked directories.
394 tree.read_with(cx, |tree, _| {
395 tree.as_local()
396 .unwrap()
397 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
398 })
399 .recv()
400 .await;
401
402 // The expanded subdirectory's contents are loaded.
403 tree.read_with(cx, |tree, _| {
404 assert_eq!(
405 tree.entries(true)
406 .map(|entry| (entry.path.as_ref(), entry.is_external))
407 .collect::<Vec<_>>(),
408 vec![
409 (Path::new(""), false),
410 (Path::new("deps"), false),
411 (Path::new("deps/dep-dir2"), true),
412 (Path::new("deps/dep-dir3"), true),
413 (Path::new("deps/dep-dir3/deps"), true),
414 (Path::new("deps/dep-dir3/src"), true),
415 (Path::new("deps/dep-dir3/src/e.rs"), true),
416 (Path::new("deps/dep-dir3/src/f.rs"), true),
417 (Path::new("src"), false),
418 (Path::new("src/a.rs"), false),
419 (Path::new("src/b.rs"), false),
420 ]
421 );
422 });
423
424 assert_eq!(
425 mem::take(&mut *tree_updates.lock()),
426 &[
427 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
428 (
429 Path::new("deps/dep-dir3/src/e.rs").into(),
430 PathChange::Loaded
431 ),
432 (
433 Path::new("deps/dep-dir3/src/f.rs").into(),
434 PathChange::Loaded
435 )
436 ]
437 );
438}
439
440#[gpui::test]
441async fn test_open_gitignored_files(cx: &mut TestAppContext) {
442 let fs = FakeFs::new(cx.background());
443 fs.insert_tree(
444 "/root",
445 json!({
446 ".gitignore": "node_modules\n",
447 "one": {
448 "node_modules": {
449 "a": {
450 "a1.js": "a1",
451 "a2.js": "a2",
452 },
453 "b": {
454 "b1.js": "b1",
455 "b2.js": "b2",
456 },
457 },
458 },
459 "two": {
460 "x.js": "",
461 "y.js": "",
462 },
463 }),
464 )
465 .await;
466
467 let tree = Worktree::local(
468 build_client(cx),
469 Path::new("/root"),
470 true,
471 fs.clone(),
472 Default::default(),
473 &mut cx.to_async(),
474 )
475 .await
476 .unwrap();
477
478 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
479 .await;
480
481 tree.read_with(cx, |tree, _| {
482 assert_eq!(
483 tree.entries(true)
484 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
485 .collect::<Vec<_>>(),
486 vec![
487 (Path::new(""), false),
488 (Path::new(".gitignore"), false),
489 (Path::new("one"), false),
490 (Path::new("one/node_modules"), true),
491 (Path::new("two"), false),
492 (Path::new("two/x.js"), false),
493 (Path::new("two/y.js"), false),
494 ]
495 );
496 });
497
498 // Open a file that is nested inside of a gitignored directory that
499 // has not yet been expanded.
500 let prev_read_dir_count = fs.read_dir_call_count();
501 let buffer = tree
502 .update(cx, |tree, cx| {
503 tree.as_local_mut()
504 .unwrap()
505 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
506 })
507 .await
508 .unwrap();
509
510 tree.read_with(cx, |tree, cx| {
511 assert_eq!(
512 tree.entries(true)
513 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
514 .collect::<Vec<_>>(),
515 vec![
516 (Path::new(""), false),
517 (Path::new(".gitignore"), false),
518 (Path::new("one"), false),
519 (Path::new("one/node_modules"), true),
520 (Path::new("one/node_modules/a"), true),
521 (Path::new("one/node_modules/b"), true),
522 (Path::new("one/node_modules/b/b1.js"), true),
523 (Path::new("one/node_modules/b/b2.js"), true),
524 (Path::new("two"), false),
525 (Path::new("two/x.js"), false),
526 (Path::new("two/y.js"), false),
527 ]
528 );
529
530 assert_eq!(
531 buffer.read(cx).file().unwrap().path().as_ref(),
532 Path::new("one/node_modules/b/b1.js")
533 );
534
535 // Only the newly-expanded directories are scanned.
536 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
537 });
538
539 // Open another file in a different subdirectory of the same
540 // gitignored directory.
541 let prev_read_dir_count = fs.read_dir_call_count();
542 let buffer = tree
543 .update(cx, |tree, cx| {
544 tree.as_local_mut()
545 .unwrap()
546 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
547 })
548 .await
549 .unwrap();
550
551 tree.read_with(cx, |tree, cx| {
552 assert_eq!(
553 tree.entries(true)
554 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
555 .collect::<Vec<_>>(),
556 vec![
557 (Path::new(""), false),
558 (Path::new(".gitignore"), false),
559 (Path::new("one"), false),
560 (Path::new("one/node_modules"), true),
561 (Path::new("one/node_modules/a"), true),
562 (Path::new("one/node_modules/a/a1.js"), true),
563 (Path::new("one/node_modules/a/a2.js"), true),
564 (Path::new("one/node_modules/b"), true),
565 (Path::new("one/node_modules/b/b1.js"), true),
566 (Path::new("one/node_modules/b/b2.js"), true),
567 (Path::new("two"), false),
568 (Path::new("two/x.js"), false),
569 (Path::new("two/y.js"), false),
570 ]
571 );
572
573 assert_eq!(
574 buffer.read(cx).file().unwrap().path().as_ref(),
575 Path::new("one/node_modules/a/a2.js")
576 );
577
578 // Only the newly-expanded directory is scanned.
579 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
580 });
581}
582
583#[gpui::test]
584async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
585 let fs = FakeFs::new(cx.background());
586 fs.insert_tree(
587 "/root",
588 json!({
589 ".gitignore": "node_modules\n",
590 "a": {
591 "a.js": "",
592 },
593 "b": {
594 "b.js": "",
595 },
596 "node_modules": {
597 "c": {
598 "c.js": "",
599 },
600 "d": {
601 "d.js": "",
602 "e": {
603 "e1.js": "",
604 "e2.js": "",
605 },
606 "f": {
607 "f1.js": "",
608 "f2.js": "",
609 }
610 },
611 },
612 }),
613 )
614 .await;
615
616 let tree = Worktree::local(
617 build_client(cx),
618 Path::new("/root"),
619 true,
620 fs.clone(),
621 Default::default(),
622 &mut cx.to_async(),
623 )
624 .await
625 .unwrap();
626
627 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
628 .await;
629
630 // Open a file within the gitignored directory, forcing some of its
631 // subdirectories to be read, but not all.
632 let read_dir_count_1 = fs.read_dir_call_count();
633 tree.read_with(cx, |tree, _| {
634 tree.as_local()
635 .unwrap()
636 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
637 })
638 .recv()
639 .await;
640
641 // Those subdirectories are now loaded.
642 tree.read_with(cx, |tree, _| {
643 assert_eq!(
644 tree.entries(true)
645 .map(|e| (e.path.as_ref(), e.is_ignored))
646 .collect::<Vec<_>>(),
647 &[
648 (Path::new(""), false),
649 (Path::new(".gitignore"), false),
650 (Path::new("a"), false),
651 (Path::new("a/a.js"), false),
652 (Path::new("b"), false),
653 (Path::new("b/b.js"), false),
654 (Path::new("node_modules"), true),
655 (Path::new("node_modules/c"), true),
656 (Path::new("node_modules/d"), true),
657 (Path::new("node_modules/d/d.js"), true),
658 (Path::new("node_modules/d/e"), true),
659 (Path::new("node_modules/d/f"), true),
660 ]
661 );
662 });
663 let read_dir_count_2 = fs.read_dir_call_count();
664 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
665
666 // Update the gitignore so that node_modules is no longer ignored,
667 // but a subdirectory is ignored
668 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
669 .await
670 .unwrap();
671 cx.foreground().run_until_parked();
672
673 // All of the directories that are no longer ignored are now loaded.
674 tree.read_with(cx, |tree, _| {
675 assert_eq!(
676 tree.entries(true)
677 .map(|e| (e.path.as_ref(), e.is_ignored))
678 .collect::<Vec<_>>(),
679 &[
680 (Path::new(""), false),
681 (Path::new(".gitignore"), false),
682 (Path::new("a"), false),
683 (Path::new("a/a.js"), false),
684 (Path::new("b"), false),
685 (Path::new("b/b.js"), false),
686 // This directory is no longer ignored
687 (Path::new("node_modules"), false),
688 (Path::new("node_modules/c"), false),
689 (Path::new("node_modules/c/c.js"), false),
690 (Path::new("node_modules/d"), false),
691 (Path::new("node_modules/d/d.js"), false),
692 // This subdirectory is now ignored
693 (Path::new("node_modules/d/e"), true),
694 (Path::new("node_modules/d/f"), false),
695 (Path::new("node_modules/d/f/f1.js"), false),
696 (Path::new("node_modules/d/f/f2.js"), false),
697 ]
698 );
699 });
700
701 // Each of the newly-loaded directories is scanned only once.
702 let read_dir_count_3 = fs.read_dir_call_count();
703 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
704}
705
706#[gpui::test]
707async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
708 // .gitignores are handled explicitly by Zed and do not use the git
709 // machinery that the git_tests module checks
710 let parent_dir = temp_tree(json!({
711 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
712 "tree": {
713 ".git": {},
714 ".gitignore": "ignored-dir\n",
715 "tracked-dir": {
716 "tracked-file1": "",
717 "ancestor-ignored-file1": "",
718 },
719 "ignored-dir": {
720 "ignored-file1": ""
721 }
722 }
723 }));
724 let dir = parent_dir.path().join("tree");
725
726 let tree = Worktree::local(
727 build_client(cx),
728 dir.as_path(),
729 true,
730 Arc::new(RealFs),
731 Default::default(),
732 &mut cx.to_async(),
733 )
734 .await
735 .unwrap();
736 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
737 .await;
738
739 tree.read_with(cx, |tree, _| {
740 tree.as_local()
741 .unwrap()
742 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
743 })
744 .recv()
745 .await;
746
747 cx.read(|cx| {
748 let tree = tree.read(cx);
749 assert!(
750 !tree
751 .entry_for_path("tracked-dir/tracked-file1")
752 .unwrap()
753 .is_ignored
754 );
755 assert!(
756 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
757 .unwrap()
758 .is_ignored
759 );
760 assert!(
761 tree.entry_for_path("ignored-dir/ignored-file1")
762 .unwrap()
763 .is_ignored
764 );
765 });
766
767 std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
768 std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
769 std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
770 tree.flush_fs_events(cx).await;
771 cx.read(|cx| {
772 let tree = tree.read(cx);
773 assert!(
774 !tree
775 .entry_for_path("tracked-dir/tracked-file2")
776 .unwrap()
777 .is_ignored
778 );
779 assert!(
780 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
781 .unwrap()
782 .is_ignored
783 );
784 assert!(
785 tree.entry_for_path("ignored-dir/ignored-file2")
786 .unwrap()
787 .is_ignored
788 );
789 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
790 });
791}
792
793#[gpui::test]
794async fn test_write_file(cx: &mut TestAppContext) {
795 let dir = temp_tree(json!({
796 ".git": {},
797 ".gitignore": "ignored-dir\n",
798 "tracked-dir": {},
799 "ignored-dir": {}
800 }));
801
802 let tree = Worktree::local(
803 build_client(cx),
804 dir.path(),
805 true,
806 Arc::new(RealFs),
807 Default::default(),
808 &mut cx.to_async(),
809 )
810 .await
811 .unwrap();
812 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
813 .await;
814 tree.flush_fs_events(cx).await;
815
816 tree.update(cx, |tree, cx| {
817 tree.as_local().unwrap().write_file(
818 Path::new("tracked-dir/file.txt"),
819 "hello".into(),
820 Default::default(),
821 cx,
822 )
823 })
824 .await
825 .unwrap();
826 tree.update(cx, |tree, cx| {
827 tree.as_local().unwrap().write_file(
828 Path::new("ignored-dir/file.txt"),
829 "world".into(),
830 Default::default(),
831 cx,
832 )
833 })
834 .await
835 .unwrap();
836
837 tree.read_with(cx, |tree, _| {
838 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
839 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
840 assert!(!tracked.is_ignored);
841 assert!(ignored.is_ignored);
842 });
843}
844
845#[gpui::test(iterations = 30)]
846async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
847 let fs = FakeFs::new(cx.background());
848 fs.insert_tree(
849 "/root",
850 json!({
851 "b": {},
852 "c": {},
853 "d": {},
854 }),
855 )
856 .await;
857
858 let tree = Worktree::local(
859 build_client(cx),
860 "/root".as_ref(),
861 true,
862 fs,
863 Default::default(),
864 &mut cx.to_async(),
865 )
866 .await
867 .unwrap();
868
869 let snapshot1 = tree.update(cx, |tree, cx| {
870 let tree = tree.as_local_mut().unwrap();
871 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
872 let _ = tree.observe_updates(0, cx, {
873 let snapshot = snapshot.clone();
874 move |update| {
875 snapshot.lock().apply_remote_update(update).unwrap();
876 async { true }
877 }
878 });
879 snapshot
880 });
881
882 let entry = tree
883 .update(cx, |tree, cx| {
884 tree.as_local_mut()
885 .unwrap()
886 .create_entry("a/e".as_ref(), true, cx)
887 })
888 .await
889 .unwrap();
890 assert!(entry.is_dir());
891
892 cx.foreground().run_until_parked();
893 tree.read_with(cx, |tree, _| {
894 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
895 });
896
897 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
898 assert_eq!(
899 snapshot1.lock().entries(true).collect::<Vec<_>>(),
900 snapshot2.entries(true).collect::<Vec<_>>()
901 );
902}
903
904#[gpui::test(iterations = 100)]
905async fn test_random_worktree_operations_during_initial_scan(
906 cx: &mut TestAppContext,
907 mut rng: StdRng,
908) {
909 let operations = env::var("OPERATIONS")
910 .map(|o| o.parse().unwrap())
911 .unwrap_or(5);
912 let initial_entries = env::var("INITIAL_ENTRIES")
913 .map(|o| o.parse().unwrap())
914 .unwrap_or(20);
915
916 let root_dir = Path::new("/test");
917 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
918 fs.as_fake().insert_tree(root_dir, json!({})).await;
919 for _ in 0..initial_entries {
920 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
921 }
922 log::info!("generated initial tree");
923
924 let worktree = Worktree::local(
925 build_client(cx),
926 root_dir,
927 true,
928 fs.clone(),
929 Default::default(),
930 &mut cx.to_async(),
931 )
932 .await
933 .unwrap();
934
935 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
936 let updates = Arc::new(Mutex::new(Vec::new()));
937 worktree.update(cx, |tree, cx| {
938 check_worktree_change_events(tree, cx);
939
940 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
941 let updates = updates.clone();
942 move |update| {
943 updates.lock().push(update);
944 async { true }
945 }
946 });
947 });
948
949 for _ in 0..operations {
950 worktree
951 .update(cx, |worktree, cx| {
952 randomly_mutate_worktree(worktree, &mut rng, cx)
953 })
954 .await
955 .log_err();
956 worktree.read_with(cx, |tree, _| {
957 tree.as_local().unwrap().snapshot().check_invariants(true)
958 });
959
960 if rng.gen_bool(0.6) {
961 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
962 }
963 }
964
965 worktree
966 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
967 .await;
968
969 cx.foreground().run_until_parked();
970
971 let final_snapshot = worktree.read_with(cx, |tree, _| {
972 let tree = tree.as_local().unwrap();
973 let snapshot = tree.snapshot();
974 snapshot.check_invariants(true);
975 snapshot
976 });
977
978 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
979 let mut updated_snapshot = snapshot.clone();
980 for update in updates.lock().iter() {
981 if update.scan_id >= updated_snapshot.scan_id() as u64 {
982 updated_snapshot
983 .apply_remote_update(update.clone())
984 .unwrap();
985 }
986 }
987
988 assert_eq!(
989 updated_snapshot.entries(true).collect::<Vec<_>>(),
990 final_snapshot.entries(true).collect::<Vec<_>>(),
991 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
992 );
993 }
994}
995
996#[gpui::test(iterations = 100)]
997async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
998 let operations = env::var("OPERATIONS")
999 .map(|o| o.parse().unwrap())
1000 .unwrap_or(40);
1001 let initial_entries = env::var("INITIAL_ENTRIES")
1002 .map(|o| o.parse().unwrap())
1003 .unwrap_or(20);
1004
1005 let root_dir = Path::new("/test");
1006 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1007 fs.as_fake().insert_tree(root_dir, json!({})).await;
1008 for _ in 0..initial_entries {
1009 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1010 }
1011 log::info!("generated initial tree");
1012
1013 let worktree = Worktree::local(
1014 build_client(cx),
1015 root_dir,
1016 true,
1017 fs.clone(),
1018 Default::default(),
1019 &mut cx.to_async(),
1020 )
1021 .await
1022 .unwrap();
1023
1024 let updates = Arc::new(Mutex::new(Vec::new()));
1025 worktree.update(cx, |tree, cx| {
1026 check_worktree_change_events(tree, cx);
1027
1028 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1029 let updates = updates.clone();
1030 move |update| {
1031 updates.lock().push(update);
1032 async { true }
1033 }
1034 });
1035 });
1036
1037 worktree
1038 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1039 .await;
1040
1041 fs.as_fake().pause_events();
1042 let mut snapshots = Vec::new();
1043 let mut mutations_len = operations;
1044 while mutations_len > 1 {
1045 if rng.gen_bool(0.2) {
1046 worktree
1047 .update(cx, |worktree, cx| {
1048 randomly_mutate_worktree(worktree, &mut rng, cx)
1049 })
1050 .await
1051 .log_err();
1052 } else {
1053 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1054 }
1055
1056 let buffered_event_count = fs.as_fake().buffered_event_count();
1057 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1058 let len = rng.gen_range(0..=buffered_event_count);
1059 log::info!("flushing {} events", len);
1060 fs.as_fake().flush_events(len);
1061 } else {
1062 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1063 mutations_len -= 1;
1064 }
1065
1066 cx.foreground().run_until_parked();
1067 if rng.gen_bool(0.2) {
1068 log::info!("storing snapshot {}", snapshots.len());
1069 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1070 snapshots.push(snapshot);
1071 }
1072 }
1073
1074 log::info!("quiescing");
1075 fs.as_fake().flush_events(usize::MAX);
1076 cx.foreground().run_until_parked();
1077
1078 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1079 snapshot.check_invariants(true);
1080 let expanded_paths = snapshot
1081 .expanded_entries()
1082 .map(|e| e.path.clone())
1083 .collect::<Vec<_>>();
1084
1085 {
1086 let new_worktree = Worktree::local(
1087 build_client(cx),
1088 root_dir,
1089 true,
1090 fs.clone(),
1091 Default::default(),
1092 &mut cx.to_async(),
1093 )
1094 .await
1095 .unwrap();
1096 new_worktree
1097 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1098 .await;
1099 new_worktree
1100 .update(cx, |tree, _| {
1101 tree.as_local_mut()
1102 .unwrap()
1103 .refresh_entries_for_paths(expanded_paths)
1104 })
1105 .recv()
1106 .await;
1107 let new_snapshot =
1108 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1109 assert_eq!(
1110 snapshot.entries_without_ids(true),
1111 new_snapshot.entries_without_ids(true)
1112 );
1113 }
1114
1115 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1116 for update in updates.lock().iter() {
1117 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1118 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1119 }
1120 }
1121
1122 assert_eq!(
1123 prev_snapshot
1124 .entries(true)
1125 .map(ignore_pending_dir)
1126 .collect::<Vec<_>>(),
1127 snapshot
1128 .entries(true)
1129 .map(ignore_pending_dir)
1130 .collect::<Vec<_>>(),
1131 "wrong updates after snapshot {i}: {updates:#?}",
1132 );
1133 }
1134
1135 fn ignore_pending_dir(entry: &Entry) -> Entry {
1136 let mut entry = entry.clone();
1137 if entry.kind.is_dir() {
1138 entry.kind = EntryKind::Dir
1139 }
1140 entry
1141 }
1142}
1143
1144// The worktree's `UpdatedEntries` event can be used to follow along with
1145// all changes to the worktree's snapshot.
1146fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1147 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1148 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1149 if let Event::UpdatedEntries(changes) = event {
1150 for (path, _, change_type) in changes.iter() {
1151 let entry = tree.entry_for_path(&path).cloned();
1152 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1153 Ok(ix) | Err(ix) => ix,
1154 };
1155 match change_type {
1156 PathChange::Added => entries.insert(ix, entry.unwrap()),
1157 PathChange::Removed => drop(entries.remove(ix)),
1158 PathChange::Updated => {
1159 let entry = entry.unwrap();
1160 let existing_entry = entries.get_mut(ix).unwrap();
1161 assert_eq!(existing_entry.path, entry.path);
1162 *existing_entry = entry;
1163 }
1164 PathChange::AddedOrUpdated | PathChange::Loaded => {
1165 let entry = entry.unwrap();
1166 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1167 *entries.get_mut(ix).unwrap() = entry;
1168 } else {
1169 entries.insert(ix, entry);
1170 }
1171 }
1172 }
1173 }
1174
1175 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1176 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1177 }
1178 })
1179 .detach();
1180}
1181
1182fn randomly_mutate_worktree(
1183 worktree: &mut Worktree,
1184 rng: &mut impl Rng,
1185 cx: &mut ModelContext<Worktree>,
1186) -> Task<Result<()>> {
1187 log::info!("mutating worktree");
1188 let worktree = worktree.as_local_mut().unwrap();
1189 let snapshot = worktree.snapshot();
1190 let entry = snapshot.entries(false).choose(rng).unwrap();
1191
1192 match rng.gen_range(0_u32..100) {
1193 0..=33 if entry.path.as_ref() != Path::new("") => {
1194 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1195 worktree.delete_entry(entry.id, cx).unwrap()
1196 }
1197 ..=66 if entry.path.as_ref() != Path::new("") => {
1198 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1199 let new_parent_path = if other_entry.is_dir() {
1200 other_entry.path.clone()
1201 } else {
1202 other_entry.path.parent().unwrap().into()
1203 };
1204 let mut new_path = new_parent_path.join(random_filename(rng));
1205 if new_path.starts_with(&entry.path) {
1206 new_path = random_filename(rng).into();
1207 }
1208
1209 log::info!(
1210 "renaming entry {:?} ({}) to {:?}",
1211 entry.path,
1212 entry.id.0,
1213 new_path
1214 );
1215 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1216 cx.foreground().spawn(async move {
1217 task.await?;
1218 Ok(())
1219 })
1220 }
1221 _ => {
1222 let task = if entry.is_dir() {
1223 let child_path = entry.path.join(random_filename(rng));
1224 let is_dir = rng.gen_bool(0.3);
1225 log::info!(
1226 "creating {} at {:?}",
1227 if is_dir { "dir" } else { "file" },
1228 child_path,
1229 );
1230 worktree.create_entry(child_path, is_dir, cx)
1231 } else {
1232 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1233 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1234 };
1235 cx.foreground().spawn(async move {
1236 task.await?;
1237 Ok(())
1238 })
1239 }
1240 }
1241}
1242
1243async fn randomly_mutate_fs(
1244 fs: &Arc<dyn Fs>,
1245 root_path: &Path,
1246 insertion_probability: f64,
1247 rng: &mut impl Rng,
1248) {
1249 log::info!("mutating fs");
1250 let mut files = Vec::new();
1251 let mut dirs = Vec::new();
1252 for path in fs.as_fake().paths(false) {
1253 if path.starts_with(root_path) {
1254 if fs.is_file(&path).await {
1255 files.push(path);
1256 } else {
1257 dirs.push(path);
1258 }
1259 }
1260 }
1261
1262 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1263 let path = dirs.choose(rng).unwrap();
1264 let new_path = path.join(random_filename(rng));
1265
1266 if rng.gen() {
1267 log::info!(
1268 "creating dir {:?}",
1269 new_path.strip_prefix(root_path).unwrap()
1270 );
1271 fs.create_dir(&new_path).await.unwrap();
1272 } else {
1273 log::info!(
1274 "creating file {:?}",
1275 new_path.strip_prefix(root_path).unwrap()
1276 );
1277 fs.create_file(&new_path, Default::default()).await.unwrap();
1278 }
1279 } else if rng.gen_bool(0.05) {
1280 let ignore_dir_path = dirs.choose(rng).unwrap();
1281 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1282
1283 let subdirs = dirs
1284 .iter()
1285 .filter(|d| d.starts_with(&ignore_dir_path))
1286 .cloned()
1287 .collect::<Vec<_>>();
1288 let subfiles = files
1289 .iter()
1290 .filter(|d| d.starts_with(&ignore_dir_path))
1291 .cloned()
1292 .collect::<Vec<_>>();
1293 let files_to_ignore = {
1294 let len = rng.gen_range(0..=subfiles.len());
1295 subfiles.choose_multiple(rng, len)
1296 };
1297 let dirs_to_ignore = {
1298 let len = rng.gen_range(0..subdirs.len());
1299 subdirs.choose_multiple(rng, len)
1300 };
1301
1302 let mut ignore_contents = String::new();
1303 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1304 writeln!(
1305 ignore_contents,
1306 "{}",
1307 path_to_ignore
1308 .strip_prefix(&ignore_dir_path)
1309 .unwrap()
1310 .to_str()
1311 .unwrap()
1312 )
1313 .unwrap();
1314 }
1315 log::info!(
1316 "creating gitignore {:?} with contents:\n{}",
1317 ignore_path.strip_prefix(&root_path).unwrap(),
1318 ignore_contents
1319 );
1320 fs.save(
1321 &ignore_path,
1322 &ignore_contents.as_str().into(),
1323 Default::default(),
1324 )
1325 .await
1326 .unwrap();
1327 } else {
1328 let old_path = {
1329 let file_path = files.choose(rng);
1330 let dir_path = dirs[1..].choose(rng);
1331 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1332 };
1333
1334 let is_rename = rng.gen();
1335 if is_rename {
1336 let new_path_parent = dirs
1337 .iter()
1338 .filter(|d| !d.starts_with(old_path))
1339 .choose(rng)
1340 .unwrap();
1341
1342 let overwrite_existing_dir =
1343 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1344 let new_path = if overwrite_existing_dir {
1345 fs.remove_dir(
1346 &new_path_parent,
1347 RemoveOptions {
1348 recursive: true,
1349 ignore_if_not_exists: true,
1350 },
1351 )
1352 .await
1353 .unwrap();
1354 new_path_parent.to_path_buf()
1355 } else {
1356 new_path_parent.join(random_filename(rng))
1357 };
1358
1359 log::info!(
1360 "renaming {:?} to {}{:?}",
1361 old_path.strip_prefix(&root_path).unwrap(),
1362 if overwrite_existing_dir {
1363 "overwrite "
1364 } else {
1365 ""
1366 },
1367 new_path.strip_prefix(&root_path).unwrap()
1368 );
1369 fs.rename(
1370 &old_path,
1371 &new_path,
1372 fs::RenameOptions {
1373 overwrite: true,
1374 ignore_if_exists: true,
1375 },
1376 )
1377 .await
1378 .unwrap();
1379 } else if fs.is_file(&old_path).await {
1380 log::info!(
1381 "deleting file {:?}",
1382 old_path.strip_prefix(&root_path).unwrap()
1383 );
1384 fs.remove_file(old_path, Default::default()).await.unwrap();
1385 } else {
1386 log::info!(
1387 "deleting dir {:?}",
1388 old_path.strip_prefix(&root_path).unwrap()
1389 );
1390 fs.remove_dir(
1391 &old_path,
1392 RemoveOptions {
1393 recursive: true,
1394 ignore_if_not_exists: true,
1395 },
1396 )
1397 .await
1398 .unwrap();
1399 }
1400 }
1401}
1402
1403fn random_filename(rng: &mut impl Rng) -> String {
1404 (0..6)
1405 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1406 .map(char::from)
1407 .collect()
1408}
1409
1410#[gpui::test]
1411async fn test_rename_work_directory(cx: &mut TestAppContext) {
1412 let root = temp_tree(json!({
1413 "projects": {
1414 "project1": {
1415 "a": "",
1416 "b": "",
1417 }
1418 },
1419
1420 }));
1421 let root_path = root.path();
1422
1423 let tree = Worktree::local(
1424 build_client(cx),
1425 root_path,
1426 true,
1427 Arc::new(RealFs),
1428 Default::default(),
1429 &mut cx.to_async(),
1430 )
1431 .await
1432 .unwrap();
1433
1434 let repo = git_init(&root_path.join("projects/project1"));
1435 git_add("a", &repo);
1436 git_commit("init", &repo);
1437 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1438
1439 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1440 .await;
1441
1442 tree.flush_fs_events(cx).await;
1443
1444 cx.read(|cx| {
1445 let tree = tree.read(cx);
1446 let (work_dir, _) = tree.repositories().next().unwrap();
1447 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1448 assert_eq!(
1449 tree.status_for_file(Path::new("projects/project1/a")),
1450 Some(GitFileStatus::Modified)
1451 );
1452 assert_eq!(
1453 tree.status_for_file(Path::new("projects/project1/b")),
1454 Some(GitFileStatus::Added)
1455 );
1456 });
1457
1458 std::fs::rename(
1459 root_path.join("projects/project1"),
1460 root_path.join("projects/project2"),
1461 )
1462 .ok();
1463 tree.flush_fs_events(cx).await;
1464
1465 cx.read(|cx| {
1466 let tree = tree.read(cx);
1467 let (work_dir, _) = tree.repositories().next().unwrap();
1468 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1469 assert_eq!(
1470 tree.status_for_file(Path::new("projects/project2/a")),
1471 Some(GitFileStatus::Modified)
1472 );
1473 assert_eq!(
1474 tree.status_for_file(Path::new("projects/project2/b")),
1475 Some(GitFileStatus::Added)
1476 );
1477 });
1478}
1479
1480#[gpui::test]
1481async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1482 let root = temp_tree(json!({
1483 "c.txt": "",
1484 "dir1": {
1485 ".git": {},
1486 "deps": {
1487 "dep1": {
1488 ".git": {},
1489 "src": {
1490 "a.txt": ""
1491 }
1492 }
1493 },
1494 "src": {
1495 "b.txt": ""
1496 }
1497 },
1498 }));
1499
1500 let tree = Worktree::local(
1501 build_client(cx),
1502 root.path(),
1503 true,
1504 Arc::new(RealFs),
1505 Default::default(),
1506 &mut cx.to_async(),
1507 )
1508 .await
1509 .unwrap();
1510
1511 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1512 .await;
1513 tree.flush_fs_events(cx).await;
1514
1515 tree.read_with(cx, |tree, _cx| {
1516 let tree = tree.as_local().unwrap();
1517
1518 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1519
1520 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1521 assert_eq!(
1522 entry
1523 .work_directory(tree)
1524 .map(|directory| directory.as_ref().to_owned()),
1525 Some(Path::new("dir1").to_owned())
1526 );
1527
1528 let entry = tree
1529 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1530 .unwrap();
1531 assert_eq!(
1532 entry
1533 .work_directory(tree)
1534 .map(|directory| directory.as_ref().to_owned()),
1535 Some(Path::new("dir1/deps/dep1").to_owned())
1536 );
1537
1538 let entries = tree.files(false, 0);
1539
1540 let paths_with_repos = tree
1541 .entries_with_repositories(entries)
1542 .map(|(entry, repo)| {
1543 (
1544 entry.path.as_ref(),
1545 repo.and_then(|repo| {
1546 repo.work_directory(&tree)
1547 .map(|work_directory| work_directory.0.to_path_buf())
1548 }),
1549 )
1550 })
1551 .collect::<Vec<_>>();
1552
1553 assert_eq!(
1554 paths_with_repos,
1555 &[
1556 (Path::new("c.txt"), None),
1557 (
1558 Path::new("dir1/deps/dep1/src/a.txt"),
1559 Some(Path::new("dir1/deps/dep1").into())
1560 ),
1561 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1562 ]
1563 );
1564 });
1565
1566 let repo_update_events = Arc::new(Mutex::new(vec![]));
1567 tree.update(cx, |_, cx| {
1568 let repo_update_events = repo_update_events.clone();
1569 cx.subscribe(&tree, move |_, _, event, _| {
1570 if let Event::UpdatedGitRepositories(update) = event {
1571 repo_update_events.lock().push(update.clone());
1572 }
1573 })
1574 .detach();
1575 });
1576
1577 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1578 tree.flush_fs_events(cx).await;
1579
1580 assert_eq!(
1581 repo_update_events.lock()[0]
1582 .iter()
1583 .map(|e| e.0.clone())
1584 .collect::<Vec<Arc<Path>>>(),
1585 vec![Path::new("dir1").into()]
1586 );
1587
1588 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1589 tree.flush_fs_events(cx).await;
1590
1591 tree.read_with(cx, |tree, _cx| {
1592 let tree = tree.as_local().unwrap();
1593
1594 assert!(tree
1595 .repository_for_path("dir1/src/b.txt".as_ref())
1596 .is_none());
1597 });
1598}
1599
1600#[gpui::test]
1601async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1602 const IGNORE_RULE: &'static str = "**/target";
1603
1604 let root = temp_tree(json!({
1605 "project": {
1606 "a.txt": "a",
1607 "b.txt": "bb",
1608 "c": {
1609 "d": {
1610 "e.txt": "eee"
1611 }
1612 },
1613 "f.txt": "ffff",
1614 "target": {
1615 "build_file": "???"
1616 },
1617 ".gitignore": IGNORE_RULE
1618 },
1619
1620 }));
1621
1622 let tree = Worktree::local(
1623 build_client(cx),
1624 root.path(),
1625 true,
1626 Arc::new(RealFs),
1627 Default::default(),
1628 &mut cx.to_async(),
1629 )
1630 .await
1631 .unwrap();
1632
1633 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1634 .await;
1635
1636 const A_TXT: &'static str = "a.txt";
1637 const B_TXT: &'static str = "b.txt";
1638 const E_TXT: &'static str = "c/d/e.txt";
1639 const F_TXT: &'static str = "f.txt";
1640 const DOTGITIGNORE: &'static str = ".gitignore";
1641 const BUILD_FILE: &'static str = "target/build_file";
1642 let project_path: &Path = &Path::new("project");
1643
1644 let work_dir = root.path().join("project");
1645 let mut repo = git_init(work_dir.as_path());
1646 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1647 git_add(Path::new(A_TXT), &repo);
1648 git_add(Path::new(E_TXT), &repo);
1649 git_add(Path::new(DOTGITIGNORE), &repo);
1650 git_commit("Initial commit", &repo);
1651
1652 tree.flush_fs_events(cx).await;
1653 deterministic.run_until_parked();
1654
1655 // Check that the right git state is observed on startup
1656 tree.read_with(cx, |tree, _cx| {
1657 let snapshot = tree.snapshot();
1658 assert_eq!(snapshot.repositories().count(), 1);
1659 let (dir, _) = snapshot.repositories().next().unwrap();
1660 assert_eq!(dir.as_ref(), Path::new("project"));
1661
1662 assert_eq!(
1663 snapshot.status_for_file(project_path.join(B_TXT)),
1664 Some(GitFileStatus::Added)
1665 );
1666 assert_eq!(
1667 snapshot.status_for_file(project_path.join(F_TXT)),
1668 Some(GitFileStatus::Added)
1669 );
1670 });
1671
1672 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1673
1674 tree.flush_fs_events(cx).await;
1675 deterministic.run_until_parked();
1676
1677 tree.read_with(cx, |tree, _cx| {
1678 let snapshot = tree.snapshot();
1679
1680 assert_eq!(
1681 snapshot.status_for_file(project_path.join(A_TXT)),
1682 Some(GitFileStatus::Modified)
1683 );
1684 });
1685
1686 git_add(Path::new(A_TXT), &repo);
1687 git_add(Path::new(B_TXT), &repo);
1688 git_commit("Committing modified and added", &repo);
1689 tree.flush_fs_events(cx).await;
1690 deterministic.run_until_parked();
1691
1692 // Check that repo only changes are tracked
1693 tree.read_with(cx, |tree, _cx| {
1694 let snapshot = tree.snapshot();
1695
1696 assert_eq!(
1697 snapshot.status_for_file(project_path.join(F_TXT)),
1698 Some(GitFileStatus::Added)
1699 );
1700
1701 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1702 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1703 });
1704
1705 git_reset(0, &repo);
1706 git_remove_index(Path::new(B_TXT), &repo);
1707 git_stash(&mut repo);
1708 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1709 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1710 tree.flush_fs_events(cx).await;
1711 deterministic.run_until_parked();
1712
1713 // Check that more complex repo changes are tracked
1714 tree.read_with(cx, |tree, _cx| {
1715 let snapshot = tree.snapshot();
1716
1717 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1718 assert_eq!(
1719 snapshot.status_for_file(project_path.join(B_TXT)),
1720 Some(GitFileStatus::Added)
1721 );
1722 assert_eq!(
1723 snapshot.status_for_file(project_path.join(E_TXT)),
1724 Some(GitFileStatus::Modified)
1725 );
1726 });
1727
1728 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1729 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1730 std::fs::write(
1731 work_dir.join(DOTGITIGNORE),
1732 [IGNORE_RULE, "f.txt"].join("\n"),
1733 )
1734 .unwrap();
1735
1736 git_add(Path::new(DOTGITIGNORE), &repo);
1737 git_commit("Committing modified git ignore", &repo);
1738
1739 tree.flush_fs_events(cx).await;
1740 deterministic.run_until_parked();
1741
1742 let mut renamed_dir_name = "first_directory/second_directory";
1743 const RENAMED_FILE: &'static str = "rf.txt";
1744
1745 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
1746 std::fs::write(
1747 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
1748 "new-contents",
1749 )
1750 .unwrap();
1751
1752 tree.flush_fs_events(cx).await;
1753 deterministic.run_until_parked();
1754
1755 tree.read_with(cx, |tree, _cx| {
1756 let snapshot = tree.snapshot();
1757 assert_eq!(
1758 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
1759 Some(GitFileStatus::Added)
1760 );
1761 });
1762
1763 renamed_dir_name = "new_first_directory/second_directory";
1764
1765 std::fs::rename(
1766 work_dir.join("first_directory"),
1767 work_dir.join("new_first_directory"),
1768 )
1769 .unwrap();
1770
1771 tree.flush_fs_events(cx).await;
1772 deterministic.run_until_parked();
1773
1774 tree.read_with(cx, |tree, _cx| {
1775 let snapshot = tree.snapshot();
1776
1777 assert_eq!(
1778 snapshot.status_for_file(
1779 project_path
1780 .join(Path::new(renamed_dir_name))
1781 .join(RENAMED_FILE)
1782 ),
1783 Some(GitFileStatus::Added)
1784 );
1785 });
1786}
1787
1788#[gpui::test]
1789async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
1790 let fs = FakeFs::new(cx.background());
1791 fs.insert_tree(
1792 "/root",
1793 json!({
1794 ".git": {},
1795 "a": {
1796 "b": {
1797 "c1.txt": "",
1798 "c2.txt": "",
1799 },
1800 "d": {
1801 "e1.txt": "",
1802 "e2.txt": "",
1803 "e3.txt": "",
1804 }
1805 },
1806 "f": {
1807 "no-status.txt": ""
1808 },
1809 "g": {
1810 "h1.txt": "",
1811 "h2.txt": ""
1812 },
1813
1814 }),
1815 )
1816 .await;
1817
1818 fs.set_status_for_repo_via_git_operation(
1819 &Path::new("/root/.git"),
1820 &[
1821 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
1822 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
1823 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
1824 ],
1825 );
1826
1827 let tree = Worktree::local(
1828 build_client(cx),
1829 Path::new("/root"),
1830 true,
1831 fs.clone(),
1832 Default::default(),
1833 &mut cx.to_async(),
1834 )
1835 .await
1836 .unwrap();
1837
1838 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1839 .await;
1840
1841 cx.foreground().run_until_parked();
1842 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
1843
1844 check_propagated_statuses(
1845 &snapshot,
1846 &[
1847 (Path::new(""), Some(GitFileStatus::Conflict)),
1848 (Path::new("a"), Some(GitFileStatus::Modified)),
1849 (Path::new("a/b"), Some(GitFileStatus::Added)),
1850 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1851 (Path::new("a/b/c2.txt"), None),
1852 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1853 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1854 (Path::new("f"), None),
1855 (Path::new("f/no-status.txt"), None),
1856 (Path::new("g"), Some(GitFileStatus::Conflict)),
1857 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
1858 ],
1859 );
1860
1861 check_propagated_statuses(
1862 &snapshot,
1863 &[
1864 (Path::new("a/b"), Some(GitFileStatus::Added)),
1865 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1866 (Path::new("a/b/c2.txt"), None),
1867 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1868 (Path::new("a/d/e1.txt"), None),
1869 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1870 (Path::new("f"), None),
1871 (Path::new("f/no-status.txt"), None),
1872 (Path::new("g"), Some(GitFileStatus::Conflict)),
1873 ],
1874 );
1875
1876 check_propagated_statuses(
1877 &snapshot,
1878 &[
1879 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1880 (Path::new("a/b/c2.txt"), None),
1881 (Path::new("a/d/e1.txt"), None),
1882 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1883 (Path::new("f/no-status.txt"), None),
1884 ],
1885 );
1886
1887 #[track_caller]
1888 fn check_propagated_statuses(
1889 snapshot: &Snapshot,
1890 expected_statuses: &[(&Path, Option<GitFileStatus>)],
1891 ) {
1892 let mut entries = expected_statuses
1893 .iter()
1894 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
1895 .collect::<Vec<_>>();
1896 snapshot.propagate_git_statuses(&mut entries);
1897 assert_eq!(
1898 entries
1899 .iter()
1900 .map(|e| (e.path.as_ref(), e.git_status))
1901 .collect::<Vec<_>>(),
1902 expected_statuses
1903 );
1904 }
1905}
1906
1907fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
1908 let http_client = FakeHttpClient::with_404_response();
1909 cx.read(|cx| Client::new(http_client, cx))
1910}
1911
1912#[track_caller]
1913fn git_init(path: &Path) -> git2::Repository {
1914 git2::Repository::init(path).expect("Failed to initialize git repository")
1915}
1916
1917#[track_caller]
1918fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
1919 let path = path.as_ref();
1920 let mut index = repo.index().expect("Failed to get index");
1921 index.add_path(path).expect("Failed to add a.txt");
1922 index.write().expect("Failed to write index");
1923}
1924
1925#[track_caller]
1926fn git_remove_index(path: &Path, repo: &git2::Repository) {
1927 let mut index = repo.index().expect("Failed to get index");
1928 index.remove_path(path).expect("Failed to add a.txt");
1929 index.write().expect("Failed to write index");
1930}
1931
1932#[track_caller]
1933fn git_commit(msg: &'static str, repo: &git2::Repository) {
1934 use git2::Signature;
1935
1936 let signature = Signature::now("test", "test@zed.dev").unwrap();
1937 let oid = repo.index().unwrap().write_tree().unwrap();
1938 let tree = repo.find_tree(oid).unwrap();
1939 if let Some(head) = repo.head().ok() {
1940 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
1941
1942 let parent_commit = parent_obj.as_commit().unwrap();
1943
1944 repo.commit(
1945 Some("HEAD"),
1946 &signature,
1947 &signature,
1948 msg,
1949 &tree,
1950 &[parent_commit],
1951 )
1952 .expect("Failed to commit with parent");
1953 } else {
1954 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
1955 .expect("Failed to commit");
1956 }
1957}
1958
1959#[track_caller]
1960fn git_stash(repo: &mut git2::Repository) {
1961 use git2::Signature;
1962
1963 let signature = Signature::now("test", "test@zed.dev").unwrap();
1964 repo.stash_save(&signature, "N/A", None)
1965 .expect("Failed to stash");
1966}
1967
1968#[track_caller]
1969fn git_reset(offset: usize, repo: &git2::Repository) {
1970 let head = repo.head().expect("Couldn't get repo head");
1971 let object = head.peel(git2::ObjectType::Commit).unwrap();
1972 let commit = object.as_commit().unwrap();
1973 let new_head = commit
1974 .parents()
1975 .inspect(|parnet| {
1976 parnet.message();
1977 })
1978 .skip(offset)
1979 .next()
1980 .expect("Not enough history");
1981 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
1982 .expect("Could not reset");
1983}
1984
1985#[allow(dead_code)]
1986#[track_caller]
1987fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
1988 repo.statuses(None)
1989 .unwrap()
1990 .iter()
1991 .map(|status| (status.path().unwrap().to_string(), status.status()))
1992 .collect()
1993}