1use crate::{
2 worktree::{Event, Snapshot, WorktreeHandle},
3 Entry, EntryKind, PathChange, Worktree,
4};
5use anyhow::Result;
6use client::Client;
7use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
8use git::GITIGNORE;
9use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
10use parking_lot::Mutex;
11use postage::stream::Stream;
12use pretty_assertions::assert_eq;
13use rand::prelude::*;
14use serde_json::json;
15use std::{
16 env,
17 fmt::Write,
18 mem,
19 path::{Path, PathBuf},
20 sync::Arc,
21};
22use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
23
24#[gpui::test]
25async fn test_traversal(cx: &mut TestAppContext) {
26 let fs = FakeFs::new(cx.background());
27 fs.insert_tree(
28 "/root",
29 json!({
30 ".gitignore": "a/b\n",
31 "a": {
32 "b": "",
33 "c": "",
34 }
35 }),
36 )
37 .await;
38
39 let tree = Worktree::local(
40 build_client(cx),
41 Path::new("/root"),
42 true,
43 fs,
44 Default::default(),
45 &mut cx.to_async(),
46 )
47 .await
48 .unwrap();
49 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
50 .await;
51
52 tree.read_with(cx, |tree, _| {
53 assert_eq!(
54 tree.entries(false)
55 .map(|entry| entry.path.as_ref())
56 .collect::<Vec<_>>(),
57 vec![
58 Path::new(""),
59 Path::new(".gitignore"),
60 Path::new("a"),
61 Path::new("a/c"),
62 ]
63 );
64 assert_eq!(
65 tree.entries(true)
66 .map(|entry| entry.path.as_ref())
67 .collect::<Vec<_>>(),
68 vec![
69 Path::new(""),
70 Path::new(".gitignore"),
71 Path::new("a"),
72 Path::new("a/b"),
73 Path::new("a/c"),
74 ]
75 );
76 })
77}
78
79#[gpui::test]
80async fn test_descendent_entries(cx: &mut TestAppContext) {
81 let fs = FakeFs::new(cx.background());
82 fs.insert_tree(
83 "/root",
84 json!({
85 "a": "",
86 "b": {
87 "c": {
88 "d": ""
89 },
90 "e": {}
91 },
92 "f": "",
93 "g": {
94 "h": {}
95 },
96 "i": {
97 "j": {
98 "k": ""
99 },
100 "l": {
101
102 }
103 },
104 ".gitignore": "i/j\n",
105 }),
106 )
107 .await;
108
109 let tree = Worktree::local(
110 build_client(cx),
111 Path::new("/root"),
112 true,
113 fs,
114 Default::default(),
115 &mut cx.to_async(),
116 )
117 .await
118 .unwrap();
119 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
120 .await;
121
122 tree.read_with(cx, |tree, _| {
123 assert_eq!(
124 tree.descendent_entries(false, false, Path::new("b"))
125 .map(|entry| entry.path.as_ref())
126 .collect::<Vec<_>>(),
127 vec![Path::new("b/c/d"),]
128 );
129 assert_eq!(
130 tree.descendent_entries(true, false, Path::new("b"))
131 .map(|entry| entry.path.as_ref())
132 .collect::<Vec<_>>(),
133 vec![
134 Path::new("b"),
135 Path::new("b/c"),
136 Path::new("b/c/d"),
137 Path::new("b/e"),
138 ]
139 );
140
141 assert_eq!(
142 tree.descendent_entries(false, false, Path::new("g"))
143 .map(|entry| entry.path.as_ref())
144 .collect::<Vec<_>>(),
145 Vec::<PathBuf>::new()
146 );
147 assert_eq!(
148 tree.descendent_entries(true, false, Path::new("g"))
149 .map(|entry| entry.path.as_ref())
150 .collect::<Vec<_>>(),
151 vec![Path::new("g"), Path::new("g/h"),]
152 );
153 });
154
155 // Expand gitignored directory.
156 tree.read_with(cx, |tree, _| {
157 tree.as_local()
158 .unwrap()
159 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
160 })
161 .recv()
162 .await;
163
164 tree.read_with(cx, |tree, _| {
165 assert_eq!(
166 tree.descendent_entries(false, false, Path::new("i"))
167 .map(|entry| entry.path.as_ref())
168 .collect::<Vec<_>>(),
169 Vec::<PathBuf>::new()
170 );
171 assert_eq!(
172 tree.descendent_entries(false, true, Path::new("i"))
173 .map(|entry| entry.path.as_ref())
174 .collect::<Vec<_>>(),
175 vec![Path::new("i/j/k")]
176 );
177 assert_eq!(
178 tree.descendent_entries(true, false, Path::new("i"))
179 .map(|entry| entry.path.as_ref())
180 .collect::<Vec<_>>(),
181 vec![Path::new("i"), Path::new("i/l"),]
182 );
183 })
184}
185
186#[gpui::test(iterations = 10)]
187async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
188 let fs = FakeFs::new(cx.background());
189 fs.insert_tree(
190 "/root",
191 json!({
192 "lib": {
193 "a": {
194 "a.txt": ""
195 },
196 "b": {
197 "b.txt": ""
198 }
199 }
200 }),
201 )
202 .await;
203 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
204 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
205
206 let tree = Worktree::local(
207 build_client(cx),
208 Path::new("/root"),
209 true,
210 fs.clone(),
211 Default::default(),
212 &mut cx.to_async(),
213 )
214 .await
215 .unwrap();
216
217 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
218 .await;
219
220 tree.read_with(cx, |tree, _| {
221 assert_eq!(
222 tree.entries(false)
223 .map(|entry| entry.path.as_ref())
224 .collect::<Vec<_>>(),
225 vec![
226 Path::new(""),
227 Path::new("lib"),
228 Path::new("lib/a"),
229 Path::new("lib/a/a.txt"),
230 Path::new("lib/a/lib"),
231 Path::new("lib/b"),
232 Path::new("lib/b/b.txt"),
233 Path::new("lib/b/lib"),
234 ]
235 );
236 });
237
238 fs.rename(
239 Path::new("/root/lib/a/lib"),
240 Path::new("/root/lib/a/lib-2"),
241 Default::default(),
242 )
243 .await
244 .unwrap();
245 executor.run_until_parked();
246 tree.read_with(cx, |tree, _| {
247 assert_eq!(
248 tree.entries(false)
249 .map(|entry| entry.path.as_ref())
250 .collect::<Vec<_>>(),
251 vec![
252 Path::new(""),
253 Path::new("lib"),
254 Path::new("lib/a"),
255 Path::new("lib/a/a.txt"),
256 Path::new("lib/a/lib-2"),
257 Path::new("lib/b"),
258 Path::new("lib/b/b.txt"),
259 Path::new("lib/b/lib"),
260 ]
261 );
262 });
263}
264
265#[gpui::test]
266async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
267 let fs = FakeFs::new(cx.background());
268 fs.insert_tree(
269 "/root",
270 json!({
271 "dir1": {
272 "deps": {
273 // symlinks here
274 },
275 "src": {
276 "a.rs": "",
277 "b.rs": "",
278 },
279 },
280 "dir2": {
281 "src": {
282 "c.rs": "",
283 "d.rs": "",
284 }
285 },
286 "dir3": {
287 "deps": {},
288 "src": {
289 "e.rs": "",
290 "f.rs": "",
291 },
292 }
293 }),
294 )
295 .await;
296
297 // These symlinks point to directories outside of the worktree's root, dir1.
298 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
299 .await;
300 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
301 .await;
302
303 let tree = Worktree::local(
304 build_client(cx),
305 Path::new("/root/dir1"),
306 true,
307 fs.clone(),
308 Default::default(),
309 &mut cx.to_async(),
310 )
311 .await
312 .unwrap();
313
314 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
315 .await;
316
317 let tree_updates = Arc::new(Mutex::new(Vec::new()));
318 tree.update(cx, |_, cx| {
319 let tree_updates = tree_updates.clone();
320 cx.subscribe(&tree, move |_, _, event, _| {
321 if let Event::UpdatedEntries(update) = event {
322 tree_updates.lock().extend(
323 update
324 .iter()
325 .map(|(path, _, change)| (path.clone(), *change)),
326 );
327 }
328 })
329 .detach();
330 });
331
332 // The symlinked directories are not scanned by default.
333 tree.read_with(cx, |tree, _| {
334 assert_eq!(
335 tree.entries(true)
336 .map(|entry| (entry.path.as_ref(), entry.is_external))
337 .collect::<Vec<_>>(),
338 vec![
339 (Path::new(""), false),
340 (Path::new("deps"), false),
341 (Path::new("deps/dep-dir2"), true),
342 (Path::new("deps/dep-dir3"), true),
343 (Path::new("src"), false),
344 (Path::new("src/a.rs"), false),
345 (Path::new("src/b.rs"), false),
346 ]
347 );
348
349 assert_eq!(
350 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
351 EntryKind::UnloadedDir
352 );
353 });
354
355 // Expand one of the symlinked directories.
356 tree.read_with(cx, |tree, _| {
357 tree.as_local()
358 .unwrap()
359 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
360 })
361 .recv()
362 .await;
363
364 // The expanded directory's contents are loaded. Subdirectories are
365 // not scanned yet.
366 tree.read_with(cx, |tree, _| {
367 assert_eq!(
368 tree.entries(true)
369 .map(|entry| (entry.path.as_ref(), entry.is_external))
370 .collect::<Vec<_>>(),
371 vec![
372 (Path::new(""), false),
373 (Path::new("deps"), false),
374 (Path::new("deps/dep-dir2"), true),
375 (Path::new("deps/dep-dir3"), true),
376 (Path::new("deps/dep-dir3/deps"), true),
377 (Path::new("deps/dep-dir3/src"), true),
378 (Path::new("src"), false),
379 (Path::new("src/a.rs"), false),
380 (Path::new("src/b.rs"), false),
381 ]
382 );
383 });
384 assert_eq!(
385 mem::take(&mut *tree_updates.lock()),
386 &[
387 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
388 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
389 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
390 ]
391 );
392
393 // Expand a subdirectory of one of the symlinked directories.
394 tree.read_with(cx, |tree, _| {
395 tree.as_local()
396 .unwrap()
397 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
398 })
399 .recv()
400 .await;
401
402 // The expanded subdirectory's contents are loaded.
403 tree.read_with(cx, |tree, _| {
404 assert_eq!(
405 tree.entries(true)
406 .map(|entry| (entry.path.as_ref(), entry.is_external))
407 .collect::<Vec<_>>(),
408 vec![
409 (Path::new(""), false),
410 (Path::new("deps"), false),
411 (Path::new("deps/dep-dir2"), true),
412 (Path::new("deps/dep-dir3"), true),
413 (Path::new("deps/dep-dir3/deps"), true),
414 (Path::new("deps/dep-dir3/src"), true),
415 (Path::new("deps/dep-dir3/src/e.rs"), true),
416 (Path::new("deps/dep-dir3/src/f.rs"), true),
417 (Path::new("src"), false),
418 (Path::new("src/a.rs"), false),
419 (Path::new("src/b.rs"), false),
420 ]
421 );
422 });
423
424 assert_eq!(
425 mem::take(&mut *tree_updates.lock()),
426 &[
427 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
428 (
429 Path::new("deps/dep-dir3/src/e.rs").into(),
430 PathChange::Loaded
431 ),
432 (
433 Path::new("deps/dep-dir3/src/f.rs").into(),
434 PathChange::Loaded
435 )
436 ]
437 );
438}
439
440#[gpui::test]
441async fn test_open_gitignored_files(cx: &mut TestAppContext) {
442 let fs = FakeFs::new(cx.background());
443 fs.insert_tree(
444 "/root",
445 json!({
446 ".gitignore": "node_modules\n",
447 "one": {
448 "node_modules": {
449 "a": {
450 "a1.js": "a1",
451 "a2.js": "a2",
452 },
453 "b": {
454 "b1.js": "b1",
455 "b2.js": "b2",
456 },
457 },
458 },
459 "two": {
460 "x.js": "",
461 "y.js": "",
462 },
463 }),
464 )
465 .await;
466
467 let tree = Worktree::local(
468 build_client(cx),
469 Path::new("/root"),
470 true,
471 fs.clone(),
472 Default::default(),
473 &mut cx.to_async(),
474 )
475 .await
476 .unwrap();
477
478 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
479 .await;
480
481 tree.read_with(cx, |tree, _| {
482 assert_eq!(
483 tree.entries(true)
484 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
485 .collect::<Vec<_>>(),
486 vec![
487 (Path::new(""), false),
488 (Path::new(".gitignore"), false),
489 (Path::new("one"), false),
490 (Path::new("one/node_modules"), true),
491 (Path::new("two"), false),
492 (Path::new("two/x.js"), false),
493 (Path::new("two/y.js"), false),
494 ]
495 );
496 });
497
498 // Open a file that is nested inside of a gitignored directory that
499 // has not yet been expanded.
500 let prev_read_dir_count = fs.read_dir_call_count();
501 let buffer = tree
502 .update(cx, |tree, cx| {
503 tree.as_local_mut()
504 .unwrap()
505 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
506 })
507 .await
508 .unwrap();
509
510 tree.read_with(cx, |tree, cx| {
511 assert_eq!(
512 tree.entries(true)
513 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
514 .collect::<Vec<_>>(),
515 vec![
516 (Path::new(""), false),
517 (Path::new(".gitignore"), false),
518 (Path::new("one"), false),
519 (Path::new("one/node_modules"), true),
520 (Path::new("one/node_modules/a"), true),
521 (Path::new("one/node_modules/b"), true),
522 (Path::new("one/node_modules/b/b1.js"), true),
523 (Path::new("one/node_modules/b/b2.js"), true),
524 (Path::new("two"), false),
525 (Path::new("two/x.js"), false),
526 (Path::new("two/y.js"), false),
527 ]
528 );
529
530 assert_eq!(
531 buffer.read(cx).file().unwrap().path().as_ref(),
532 Path::new("one/node_modules/b/b1.js")
533 );
534
535 // Only the newly-expanded directories are scanned.
536 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
537 });
538
539 // Open another file in a different subdirectory of the same
540 // gitignored directory.
541 let prev_read_dir_count = fs.read_dir_call_count();
542 let buffer = tree
543 .update(cx, |tree, cx| {
544 tree.as_local_mut()
545 .unwrap()
546 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
547 })
548 .await
549 .unwrap();
550
551 tree.read_with(cx, |tree, cx| {
552 assert_eq!(
553 tree.entries(true)
554 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
555 .collect::<Vec<_>>(),
556 vec![
557 (Path::new(""), false),
558 (Path::new(".gitignore"), false),
559 (Path::new("one"), false),
560 (Path::new("one/node_modules"), true),
561 (Path::new("one/node_modules/a"), true),
562 (Path::new("one/node_modules/a/a1.js"), true),
563 (Path::new("one/node_modules/a/a2.js"), true),
564 (Path::new("one/node_modules/b"), true),
565 (Path::new("one/node_modules/b/b1.js"), true),
566 (Path::new("one/node_modules/b/b2.js"), true),
567 (Path::new("two"), false),
568 (Path::new("two/x.js"), false),
569 (Path::new("two/y.js"), false),
570 ]
571 );
572
573 assert_eq!(
574 buffer.read(cx).file().unwrap().path().as_ref(),
575 Path::new("one/node_modules/a/a2.js")
576 );
577
578 // Only the newly-expanded directory is scanned.
579 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
580 });
581}
582
583#[gpui::test]
584async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
585 let fs = FakeFs::new(cx.background());
586 fs.insert_tree(
587 "/root",
588 json!({
589 ".gitignore": "node_modules\n",
590 "a": {
591 "a.js": "",
592 },
593 "b": {
594 "b.js": "",
595 },
596 "node_modules": {
597 "c": {
598 "c.js": "",
599 },
600 "d": {
601 "d.js": "",
602 "e": {
603 "e1.js": "",
604 "e2.js": "",
605 },
606 "f": {
607 "f1.js": "",
608 "f2.js": "",
609 }
610 },
611 },
612 }),
613 )
614 .await;
615
616 let tree = Worktree::local(
617 build_client(cx),
618 Path::new("/root"),
619 true,
620 fs.clone(),
621 Default::default(),
622 &mut cx.to_async(),
623 )
624 .await
625 .unwrap();
626
627 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
628 .await;
629
630 // Open a file within the gitignored directory, forcing some of its
631 // subdirectories to be read, but not all.
632 let read_dir_count_1 = fs.read_dir_call_count();
633 tree.read_with(cx, |tree, _| {
634 tree.as_local()
635 .unwrap()
636 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
637 })
638 .recv()
639 .await;
640
641 // Those subdirectories are now loaded.
642 tree.read_with(cx, |tree, _| {
643 assert_eq!(
644 tree.entries(true)
645 .map(|e| (e.path.as_ref(), e.is_ignored))
646 .collect::<Vec<_>>(),
647 &[
648 (Path::new(""), false),
649 (Path::new(".gitignore"), false),
650 (Path::new("a"), false),
651 (Path::new("a/a.js"), false),
652 (Path::new("b"), false),
653 (Path::new("b/b.js"), false),
654 (Path::new("node_modules"), true),
655 (Path::new("node_modules/c"), true),
656 (Path::new("node_modules/d"), true),
657 (Path::new("node_modules/d/d.js"), true),
658 (Path::new("node_modules/d/e"), true),
659 (Path::new("node_modules/d/f"), true),
660 ]
661 );
662 });
663 let read_dir_count_2 = fs.read_dir_call_count();
664 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
665
666 // Update the gitignore so that node_modules is no longer ignored,
667 // but a subdirectory is ignored
668 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
669 .await
670 .unwrap();
671 cx.foreground().run_until_parked();
672
673 // All of the directories that are no longer ignored are now loaded.
674 tree.read_with(cx, |tree, _| {
675 assert_eq!(
676 tree.entries(true)
677 .map(|e| (e.path.as_ref(), e.is_ignored))
678 .collect::<Vec<_>>(),
679 &[
680 (Path::new(""), false),
681 (Path::new(".gitignore"), false),
682 (Path::new("a"), false),
683 (Path::new("a/a.js"), false),
684 (Path::new("b"), false),
685 (Path::new("b/b.js"), false),
686 // This directory is no longer ignored
687 (Path::new("node_modules"), false),
688 (Path::new("node_modules/c"), false),
689 (Path::new("node_modules/c/c.js"), false),
690 (Path::new("node_modules/d"), false),
691 (Path::new("node_modules/d/d.js"), false),
692 // This subdirectory is now ignored
693 (Path::new("node_modules/d/e"), true),
694 (Path::new("node_modules/d/f"), false),
695 (Path::new("node_modules/d/f/f1.js"), false),
696 (Path::new("node_modules/d/f/f2.js"), false),
697 ]
698 );
699 });
700
701 // Each of the newly-loaded directories is scanned only once.
702 let read_dir_count_3 = fs.read_dir_call_count();
703 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
704}
705
706#[gpui::test(iterations = 10)]
707async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
708 let fs = FakeFs::new(cx.background());
709 fs.insert_tree(
710 "/root",
711 json!({
712 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
713 "tree": {
714 ".git": {},
715 ".gitignore": "ignored-dir\n",
716 "tracked-dir": {
717 "tracked-file1": "",
718 "ancestor-ignored-file1": "",
719 },
720 "ignored-dir": {
721 "ignored-file1": ""
722 }
723 }
724 }),
725 )
726 .await;
727
728 let tree = Worktree::local(
729 build_client(cx),
730 "/root/tree".as_ref(),
731 true,
732 fs.clone(),
733 Default::default(),
734 &mut cx.to_async(),
735 )
736 .await
737 .unwrap();
738 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
739 .await;
740
741 tree.read_with(cx, |tree, _| {
742 tree.as_local()
743 .unwrap()
744 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
745 })
746 .recv()
747 .await;
748
749 cx.read(|cx| {
750 let tree = tree.read(cx);
751 assert!(
752 !tree
753 .entry_for_path("tracked-dir/tracked-file1")
754 .unwrap()
755 .is_ignored
756 );
757 assert!(
758 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
759 .unwrap()
760 .is_ignored
761 );
762 assert!(
763 tree.entry_for_path("ignored-dir/ignored-file1")
764 .unwrap()
765 .is_ignored
766 );
767 });
768
769 fs.create_file(
770 "/root/tree/tracked-dir/tracked-file2".as_ref(),
771 Default::default(),
772 )
773 .await
774 .unwrap();
775 fs.create_file(
776 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
777 Default::default(),
778 )
779 .await
780 .unwrap();
781 fs.create_file(
782 "/root/tree/ignored-dir/ignored-file2".as_ref(),
783 Default::default(),
784 )
785 .await
786 .unwrap();
787
788 cx.foreground().run_until_parked();
789 cx.read(|cx| {
790 let tree = tree.read(cx);
791 assert!(
792 !tree
793 .entry_for_path("tracked-dir/tracked-file2")
794 .unwrap()
795 .is_ignored
796 );
797 assert!(
798 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
799 .unwrap()
800 .is_ignored
801 );
802 assert!(
803 tree.entry_for_path("ignored-dir/ignored-file2")
804 .unwrap()
805 .is_ignored
806 );
807 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
808 });
809}
810
811#[gpui::test]
812async fn test_write_file(cx: &mut TestAppContext) {
813 let dir = temp_tree(json!({
814 ".git": {},
815 ".gitignore": "ignored-dir\n",
816 "tracked-dir": {},
817 "ignored-dir": {}
818 }));
819
820 let tree = Worktree::local(
821 build_client(cx),
822 dir.path(),
823 true,
824 Arc::new(RealFs),
825 Default::default(),
826 &mut cx.to_async(),
827 )
828 .await
829 .unwrap();
830 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
831 .await;
832 tree.flush_fs_events(cx).await;
833
834 tree.update(cx, |tree, cx| {
835 tree.as_local().unwrap().write_file(
836 Path::new("tracked-dir/file.txt"),
837 "hello".into(),
838 Default::default(),
839 cx,
840 )
841 })
842 .await
843 .unwrap();
844 tree.update(cx, |tree, cx| {
845 tree.as_local().unwrap().write_file(
846 Path::new("ignored-dir/file.txt"),
847 "world".into(),
848 Default::default(),
849 cx,
850 )
851 })
852 .await
853 .unwrap();
854
855 tree.read_with(cx, |tree, _| {
856 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
857 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
858 assert!(!tracked.is_ignored);
859 assert!(ignored.is_ignored);
860 });
861}
862
863#[gpui::test(iterations = 30)]
864async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
865 let fs = FakeFs::new(cx.background());
866 fs.insert_tree(
867 "/root",
868 json!({
869 "b": {},
870 "c": {},
871 "d": {},
872 }),
873 )
874 .await;
875
876 let tree = Worktree::local(
877 build_client(cx),
878 "/root".as_ref(),
879 true,
880 fs,
881 Default::default(),
882 &mut cx.to_async(),
883 )
884 .await
885 .unwrap();
886
887 let snapshot1 = tree.update(cx, |tree, cx| {
888 let tree = tree.as_local_mut().unwrap();
889 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
890 let _ = tree.observe_updates(0, cx, {
891 let snapshot = snapshot.clone();
892 move |update| {
893 snapshot.lock().apply_remote_update(update).unwrap();
894 async { true }
895 }
896 });
897 snapshot
898 });
899
900 let entry = tree
901 .update(cx, |tree, cx| {
902 tree.as_local_mut()
903 .unwrap()
904 .create_entry("a/e".as_ref(), true, cx)
905 })
906 .await
907 .unwrap();
908 assert!(entry.is_dir());
909
910 cx.foreground().run_until_parked();
911 tree.read_with(cx, |tree, _| {
912 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
913 });
914
915 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
916 assert_eq!(
917 snapshot1.lock().entries(true).collect::<Vec<_>>(),
918 snapshot2.entries(true).collect::<Vec<_>>()
919 );
920}
921
922#[gpui::test(iterations = 100)]
923async fn test_random_worktree_operations_during_initial_scan(
924 cx: &mut TestAppContext,
925 mut rng: StdRng,
926) {
927 let operations = env::var("OPERATIONS")
928 .map(|o| o.parse().unwrap())
929 .unwrap_or(5);
930 let initial_entries = env::var("INITIAL_ENTRIES")
931 .map(|o| o.parse().unwrap())
932 .unwrap_or(20);
933
934 let root_dir = Path::new("/test");
935 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
936 fs.as_fake().insert_tree(root_dir, json!({})).await;
937 for _ in 0..initial_entries {
938 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
939 }
940 log::info!("generated initial tree");
941
942 let worktree = Worktree::local(
943 build_client(cx),
944 root_dir,
945 true,
946 fs.clone(),
947 Default::default(),
948 &mut cx.to_async(),
949 )
950 .await
951 .unwrap();
952
953 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
954 let updates = Arc::new(Mutex::new(Vec::new()));
955 worktree.update(cx, |tree, cx| {
956 check_worktree_change_events(tree, cx);
957
958 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
959 let updates = updates.clone();
960 move |update| {
961 updates.lock().push(update);
962 async { true }
963 }
964 });
965 });
966
967 for _ in 0..operations {
968 worktree
969 .update(cx, |worktree, cx| {
970 randomly_mutate_worktree(worktree, &mut rng, cx)
971 })
972 .await
973 .log_err();
974 worktree.read_with(cx, |tree, _| {
975 tree.as_local().unwrap().snapshot().check_invariants(true)
976 });
977
978 if rng.gen_bool(0.6) {
979 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
980 }
981 }
982
983 worktree
984 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
985 .await;
986
987 cx.foreground().run_until_parked();
988
989 let final_snapshot = worktree.read_with(cx, |tree, _| {
990 let tree = tree.as_local().unwrap();
991 let snapshot = tree.snapshot();
992 snapshot.check_invariants(true);
993 snapshot
994 });
995
996 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
997 let mut updated_snapshot = snapshot.clone();
998 for update in updates.lock().iter() {
999 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1000 updated_snapshot
1001 .apply_remote_update(update.clone())
1002 .unwrap();
1003 }
1004 }
1005
1006 assert_eq!(
1007 updated_snapshot.entries(true).collect::<Vec<_>>(),
1008 final_snapshot.entries(true).collect::<Vec<_>>(),
1009 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1010 );
1011 }
1012}
1013
1014#[gpui::test(iterations = 100)]
1015async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1016 let operations = env::var("OPERATIONS")
1017 .map(|o| o.parse().unwrap())
1018 .unwrap_or(40);
1019 let initial_entries = env::var("INITIAL_ENTRIES")
1020 .map(|o| o.parse().unwrap())
1021 .unwrap_or(20);
1022
1023 let root_dir = Path::new("/test");
1024 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1025 fs.as_fake().insert_tree(root_dir, json!({})).await;
1026 for _ in 0..initial_entries {
1027 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1028 }
1029 log::info!("generated initial tree");
1030
1031 let worktree = Worktree::local(
1032 build_client(cx),
1033 root_dir,
1034 true,
1035 fs.clone(),
1036 Default::default(),
1037 &mut cx.to_async(),
1038 )
1039 .await
1040 .unwrap();
1041
1042 let updates = Arc::new(Mutex::new(Vec::new()));
1043 worktree.update(cx, |tree, cx| {
1044 check_worktree_change_events(tree, cx);
1045
1046 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1047 let updates = updates.clone();
1048 move |update| {
1049 updates.lock().push(update);
1050 async { true }
1051 }
1052 });
1053 });
1054
1055 worktree
1056 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1057 .await;
1058
1059 fs.as_fake().pause_events();
1060 let mut snapshots = Vec::new();
1061 let mut mutations_len = operations;
1062 while mutations_len > 1 {
1063 if rng.gen_bool(0.2) {
1064 worktree
1065 .update(cx, |worktree, cx| {
1066 randomly_mutate_worktree(worktree, &mut rng, cx)
1067 })
1068 .await
1069 .log_err();
1070 } else {
1071 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1072 }
1073
1074 let buffered_event_count = fs.as_fake().buffered_event_count();
1075 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1076 let len = rng.gen_range(0..=buffered_event_count);
1077 log::info!("flushing {} events", len);
1078 fs.as_fake().flush_events(len);
1079 } else {
1080 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1081 mutations_len -= 1;
1082 }
1083
1084 cx.foreground().run_until_parked();
1085 if rng.gen_bool(0.2) {
1086 log::info!("storing snapshot {}", snapshots.len());
1087 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1088 snapshots.push(snapshot);
1089 }
1090 }
1091
1092 log::info!("quiescing");
1093 fs.as_fake().flush_events(usize::MAX);
1094 cx.foreground().run_until_parked();
1095
1096 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1097 snapshot.check_invariants(true);
1098 let expanded_paths = snapshot
1099 .expanded_entries()
1100 .map(|e| e.path.clone())
1101 .collect::<Vec<_>>();
1102
1103 {
1104 let new_worktree = Worktree::local(
1105 build_client(cx),
1106 root_dir,
1107 true,
1108 fs.clone(),
1109 Default::default(),
1110 &mut cx.to_async(),
1111 )
1112 .await
1113 .unwrap();
1114 new_worktree
1115 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1116 .await;
1117 new_worktree
1118 .update(cx, |tree, _| {
1119 tree.as_local_mut()
1120 .unwrap()
1121 .refresh_entries_for_paths(expanded_paths)
1122 })
1123 .recv()
1124 .await;
1125 let new_snapshot =
1126 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1127 assert_eq!(
1128 snapshot.entries_without_ids(true),
1129 new_snapshot.entries_without_ids(true)
1130 );
1131 }
1132
1133 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1134 for update in updates.lock().iter() {
1135 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1136 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1137 }
1138 }
1139
1140 assert_eq!(
1141 prev_snapshot
1142 .entries(true)
1143 .map(ignore_pending_dir)
1144 .collect::<Vec<_>>(),
1145 snapshot
1146 .entries(true)
1147 .map(ignore_pending_dir)
1148 .collect::<Vec<_>>(),
1149 "wrong updates after snapshot {i}: {updates:#?}",
1150 );
1151 }
1152
1153 fn ignore_pending_dir(entry: &Entry) -> Entry {
1154 let mut entry = entry.clone();
1155 if entry.kind.is_dir() {
1156 entry.kind = EntryKind::Dir
1157 }
1158 entry
1159 }
1160}
1161
1162// The worktree's `UpdatedEntries` event can be used to follow along with
1163// all changes to the worktree's snapshot.
1164fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1165 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1166 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1167 if let Event::UpdatedEntries(changes) = event {
1168 for (path, _, change_type) in changes.iter() {
1169 let entry = tree.entry_for_path(&path).cloned();
1170 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1171 Ok(ix) | Err(ix) => ix,
1172 };
1173 match change_type {
1174 PathChange::Added => entries.insert(ix, entry.unwrap()),
1175 PathChange::Removed => drop(entries.remove(ix)),
1176 PathChange::Updated => {
1177 let entry = entry.unwrap();
1178 let existing_entry = entries.get_mut(ix).unwrap();
1179 assert_eq!(existing_entry.path, entry.path);
1180 *existing_entry = entry;
1181 }
1182 PathChange::AddedOrUpdated | PathChange::Loaded => {
1183 let entry = entry.unwrap();
1184 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1185 *entries.get_mut(ix).unwrap() = entry;
1186 } else {
1187 entries.insert(ix, entry);
1188 }
1189 }
1190 }
1191 }
1192
1193 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1194 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1195 }
1196 })
1197 .detach();
1198}
1199
1200fn randomly_mutate_worktree(
1201 worktree: &mut Worktree,
1202 rng: &mut impl Rng,
1203 cx: &mut ModelContext<Worktree>,
1204) -> Task<Result<()>> {
1205 log::info!("mutating worktree");
1206 let worktree = worktree.as_local_mut().unwrap();
1207 let snapshot = worktree.snapshot();
1208 let entry = snapshot.entries(false).choose(rng).unwrap();
1209
1210 match rng.gen_range(0_u32..100) {
1211 0..=33 if entry.path.as_ref() != Path::new("") => {
1212 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1213 worktree.delete_entry(entry.id, cx).unwrap()
1214 }
1215 ..=66 if entry.path.as_ref() != Path::new("") => {
1216 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1217 let new_parent_path = if other_entry.is_dir() {
1218 other_entry.path.clone()
1219 } else {
1220 other_entry.path.parent().unwrap().into()
1221 };
1222 let mut new_path = new_parent_path.join(random_filename(rng));
1223 if new_path.starts_with(&entry.path) {
1224 new_path = random_filename(rng).into();
1225 }
1226
1227 log::info!(
1228 "renaming entry {:?} ({}) to {:?}",
1229 entry.path,
1230 entry.id.0,
1231 new_path
1232 );
1233 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1234 cx.foreground().spawn(async move {
1235 task.await?;
1236 Ok(())
1237 })
1238 }
1239 _ => {
1240 let task = if entry.is_dir() {
1241 let child_path = entry.path.join(random_filename(rng));
1242 let is_dir = rng.gen_bool(0.3);
1243 log::info!(
1244 "creating {} at {:?}",
1245 if is_dir { "dir" } else { "file" },
1246 child_path,
1247 );
1248 worktree.create_entry(child_path, is_dir, cx)
1249 } else {
1250 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1251 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1252 };
1253 cx.foreground().spawn(async move {
1254 task.await?;
1255 Ok(())
1256 })
1257 }
1258 }
1259}
1260
1261async fn randomly_mutate_fs(
1262 fs: &Arc<dyn Fs>,
1263 root_path: &Path,
1264 insertion_probability: f64,
1265 rng: &mut impl Rng,
1266) {
1267 log::info!("mutating fs");
1268 let mut files = Vec::new();
1269 let mut dirs = Vec::new();
1270 for path in fs.as_fake().paths(false) {
1271 if path.starts_with(root_path) {
1272 if fs.is_file(&path).await {
1273 files.push(path);
1274 } else {
1275 dirs.push(path);
1276 }
1277 }
1278 }
1279
1280 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1281 let path = dirs.choose(rng).unwrap();
1282 let new_path = path.join(random_filename(rng));
1283
1284 if rng.gen() {
1285 log::info!(
1286 "creating dir {:?}",
1287 new_path.strip_prefix(root_path).unwrap()
1288 );
1289 fs.create_dir(&new_path).await.unwrap();
1290 } else {
1291 log::info!(
1292 "creating file {:?}",
1293 new_path.strip_prefix(root_path).unwrap()
1294 );
1295 fs.create_file(&new_path, Default::default()).await.unwrap();
1296 }
1297 } else if rng.gen_bool(0.05) {
1298 let ignore_dir_path = dirs.choose(rng).unwrap();
1299 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1300
1301 let subdirs = dirs
1302 .iter()
1303 .filter(|d| d.starts_with(&ignore_dir_path))
1304 .cloned()
1305 .collect::<Vec<_>>();
1306 let subfiles = files
1307 .iter()
1308 .filter(|d| d.starts_with(&ignore_dir_path))
1309 .cloned()
1310 .collect::<Vec<_>>();
1311 let files_to_ignore = {
1312 let len = rng.gen_range(0..=subfiles.len());
1313 subfiles.choose_multiple(rng, len)
1314 };
1315 let dirs_to_ignore = {
1316 let len = rng.gen_range(0..subdirs.len());
1317 subdirs.choose_multiple(rng, len)
1318 };
1319
1320 let mut ignore_contents = String::new();
1321 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1322 writeln!(
1323 ignore_contents,
1324 "{}",
1325 path_to_ignore
1326 .strip_prefix(&ignore_dir_path)
1327 .unwrap()
1328 .to_str()
1329 .unwrap()
1330 )
1331 .unwrap();
1332 }
1333 log::info!(
1334 "creating gitignore {:?} with contents:\n{}",
1335 ignore_path.strip_prefix(&root_path).unwrap(),
1336 ignore_contents
1337 );
1338 fs.save(
1339 &ignore_path,
1340 &ignore_contents.as_str().into(),
1341 Default::default(),
1342 )
1343 .await
1344 .unwrap();
1345 } else {
1346 let old_path = {
1347 let file_path = files.choose(rng);
1348 let dir_path = dirs[1..].choose(rng);
1349 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1350 };
1351
1352 let is_rename = rng.gen();
1353 if is_rename {
1354 let new_path_parent = dirs
1355 .iter()
1356 .filter(|d| !d.starts_with(old_path))
1357 .choose(rng)
1358 .unwrap();
1359
1360 let overwrite_existing_dir =
1361 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1362 let new_path = if overwrite_existing_dir {
1363 fs.remove_dir(
1364 &new_path_parent,
1365 RemoveOptions {
1366 recursive: true,
1367 ignore_if_not_exists: true,
1368 },
1369 )
1370 .await
1371 .unwrap();
1372 new_path_parent.to_path_buf()
1373 } else {
1374 new_path_parent.join(random_filename(rng))
1375 };
1376
1377 log::info!(
1378 "renaming {:?} to {}{:?}",
1379 old_path.strip_prefix(&root_path).unwrap(),
1380 if overwrite_existing_dir {
1381 "overwrite "
1382 } else {
1383 ""
1384 },
1385 new_path.strip_prefix(&root_path).unwrap()
1386 );
1387 fs.rename(
1388 &old_path,
1389 &new_path,
1390 fs::RenameOptions {
1391 overwrite: true,
1392 ignore_if_exists: true,
1393 },
1394 )
1395 .await
1396 .unwrap();
1397 } else if fs.is_file(&old_path).await {
1398 log::info!(
1399 "deleting file {:?}",
1400 old_path.strip_prefix(&root_path).unwrap()
1401 );
1402 fs.remove_file(old_path, Default::default()).await.unwrap();
1403 } else {
1404 log::info!(
1405 "deleting dir {:?}",
1406 old_path.strip_prefix(&root_path).unwrap()
1407 );
1408 fs.remove_dir(
1409 &old_path,
1410 RemoveOptions {
1411 recursive: true,
1412 ignore_if_not_exists: true,
1413 },
1414 )
1415 .await
1416 .unwrap();
1417 }
1418 }
1419}
1420
1421fn random_filename(rng: &mut impl Rng) -> String {
1422 (0..6)
1423 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1424 .map(char::from)
1425 .collect()
1426}
1427
1428#[gpui::test]
1429async fn test_rename_work_directory(cx: &mut TestAppContext) {
1430 let root = temp_tree(json!({
1431 "projects": {
1432 "project1": {
1433 "a": "",
1434 "b": "",
1435 }
1436 },
1437
1438 }));
1439 let root_path = root.path();
1440
1441 let tree = Worktree::local(
1442 build_client(cx),
1443 root_path,
1444 true,
1445 Arc::new(RealFs),
1446 Default::default(),
1447 &mut cx.to_async(),
1448 )
1449 .await
1450 .unwrap();
1451
1452 let repo = git_init(&root_path.join("projects/project1"));
1453 git_add("a", &repo);
1454 git_commit("init", &repo);
1455 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1456
1457 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1458 .await;
1459
1460 tree.flush_fs_events(cx).await;
1461
1462 cx.read(|cx| {
1463 let tree = tree.read(cx);
1464 let (work_dir, _) = tree.repositories().next().unwrap();
1465 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1466 assert_eq!(
1467 tree.status_for_file(Path::new("projects/project1/a")),
1468 Some(GitFileStatus::Modified)
1469 );
1470 assert_eq!(
1471 tree.status_for_file(Path::new("projects/project1/b")),
1472 Some(GitFileStatus::Added)
1473 );
1474 });
1475
1476 std::fs::rename(
1477 root_path.join("projects/project1"),
1478 root_path.join("projects/project2"),
1479 )
1480 .ok();
1481 tree.flush_fs_events(cx).await;
1482
1483 cx.read(|cx| {
1484 let tree = tree.read(cx);
1485 let (work_dir, _) = tree.repositories().next().unwrap();
1486 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1487 assert_eq!(
1488 tree.status_for_file(Path::new("projects/project2/a")),
1489 Some(GitFileStatus::Modified)
1490 );
1491 assert_eq!(
1492 tree.status_for_file(Path::new("projects/project2/b")),
1493 Some(GitFileStatus::Added)
1494 );
1495 });
1496}
1497
1498#[gpui::test]
1499async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1500 let root = temp_tree(json!({
1501 "c.txt": "",
1502 "dir1": {
1503 ".git": {},
1504 "deps": {
1505 "dep1": {
1506 ".git": {},
1507 "src": {
1508 "a.txt": ""
1509 }
1510 }
1511 },
1512 "src": {
1513 "b.txt": ""
1514 }
1515 },
1516 }));
1517
1518 let tree = Worktree::local(
1519 build_client(cx),
1520 root.path(),
1521 true,
1522 Arc::new(RealFs),
1523 Default::default(),
1524 &mut cx.to_async(),
1525 )
1526 .await
1527 .unwrap();
1528
1529 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1530 .await;
1531 tree.flush_fs_events(cx).await;
1532
1533 tree.read_with(cx, |tree, _cx| {
1534 let tree = tree.as_local().unwrap();
1535
1536 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1537
1538 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1539 assert_eq!(
1540 entry
1541 .work_directory(tree)
1542 .map(|directory| directory.as_ref().to_owned()),
1543 Some(Path::new("dir1").to_owned())
1544 );
1545
1546 let entry = tree
1547 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1548 .unwrap();
1549 assert_eq!(
1550 entry
1551 .work_directory(tree)
1552 .map(|directory| directory.as_ref().to_owned()),
1553 Some(Path::new("dir1/deps/dep1").to_owned())
1554 );
1555
1556 let entries = tree.files(false, 0);
1557
1558 let paths_with_repos = tree
1559 .entries_with_repositories(entries)
1560 .map(|(entry, repo)| {
1561 (
1562 entry.path.as_ref(),
1563 repo.and_then(|repo| {
1564 repo.work_directory(&tree)
1565 .map(|work_directory| work_directory.0.to_path_buf())
1566 }),
1567 )
1568 })
1569 .collect::<Vec<_>>();
1570
1571 assert_eq!(
1572 paths_with_repos,
1573 &[
1574 (Path::new("c.txt"), None),
1575 (
1576 Path::new("dir1/deps/dep1/src/a.txt"),
1577 Some(Path::new("dir1/deps/dep1").into())
1578 ),
1579 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1580 ]
1581 );
1582 });
1583
1584 let repo_update_events = Arc::new(Mutex::new(vec![]));
1585 tree.update(cx, |_, cx| {
1586 let repo_update_events = repo_update_events.clone();
1587 cx.subscribe(&tree, move |_, _, event, _| {
1588 if let Event::UpdatedGitRepositories(update) = event {
1589 repo_update_events.lock().push(update.clone());
1590 }
1591 })
1592 .detach();
1593 });
1594
1595 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1596 tree.flush_fs_events(cx).await;
1597
1598 assert_eq!(
1599 repo_update_events.lock()[0]
1600 .iter()
1601 .map(|e| e.0.clone())
1602 .collect::<Vec<Arc<Path>>>(),
1603 vec![Path::new("dir1").into()]
1604 );
1605
1606 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1607 tree.flush_fs_events(cx).await;
1608
1609 tree.read_with(cx, |tree, _cx| {
1610 let tree = tree.as_local().unwrap();
1611
1612 assert!(tree
1613 .repository_for_path("dir1/src/b.txt".as_ref())
1614 .is_none());
1615 });
1616}
1617
1618#[gpui::test]
1619async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1620 const IGNORE_RULE: &'static str = "**/target";
1621
1622 let root = temp_tree(json!({
1623 "project": {
1624 "a.txt": "a",
1625 "b.txt": "bb",
1626 "c": {
1627 "d": {
1628 "e.txt": "eee"
1629 }
1630 },
1631 "f.txt": "ffff",
1632 "target": {
1633 "build_file": "???"
1634 },
1635 ".gitignore": IGNORE_RULE
1636 },
1637
1638 }));
1639
1640 let tree = Worktree::local(
1641 build_client(cx),
1642 root.path(),
1643 true,
1644 Arc::new(RealFs),
1645 Default::default(),
1646 &mut cx.to_async(),
1647 )
1648 .await
1649 .unwrap();
1650
1651 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1652 .await;
1653
1654 const A_TXT: &'static str = "a.txt";
1655 const B_TXT: &'static str = "b.txt";
1656 const E_TXT: &'static str = "c/d/e.txt";
1657 const F_TXT: &'static str = "f.txt";
1658 const DOTGITIGNORE: &'static str = ".gitignore";
1659 const BUILD_FILE: &'static str = "target/build_file";
1660 let project_path: &Path = &Path::new("project");
1661
1662 let work_dir = root.path().join("project");
1663 let mut repo = git_init(work_dir.as_path());
1664 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1665 git_add(Path::new(A_TXT), &repo);
1666 git_add(Path::new(E_TXT), &repo);
1667 git_add(Path::new(DOTGITIGNORE), &repo);
1668 git_commit("Initial commit", &repo);
1669
1670 tree.flush_fs_events(cx).await;
1671 deterministic.run_until_parked();
1672
1673 // Check that the right git state is observed on startup
1674 tree.read_with(cx, |tree, _cx| {
1675 let snapshot = tree.snapshot();
1676 assert_eq!(snapshot.repositories().count(), 1);
1677 let (dir, _) = snapshot.repositories().next().unwrap();
1678 assert_eq!(dir.as_ref(), Path::new("project"));
1679
1680 assert_eq!(
1681 snapshot.status_for_file(project_path.join(B_TXT)),
1682 Some(GitFileStatus::Added)
1683 );
1684 assert_eq!(
1685 snapshot.status_for_file(project_path.join(F_TXT)),
1686 Some(GitFileStatus::Added)
1687 );
1688 });
1689
1690 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1691
1692 tree.flush_fs_events(cx).await;
1693 deterministic.run_until_parked();
1694
1695 tree.read_with(cx, |tree, _cx| {
1696 let snapshot = tree.snapshot();
1697
1698 assert_eq!(
1699 snapshot.status_for_file(project_path.join(A_TXT)),
1700 Some(GitFileStatus::Modified)
1701 );
1702 });
1703
1704 git_add(Path::new(A_TXT), &repo);
1705 git_add(Path::new(B_TXT), &repo);
1706 git_commit("Committing modified and added", &repo);
1707 tree.flush_fs_events(cx).await;
1708 deterministic.run_until_parked();
1709
1710 // Check that repo only changes are tracked
1711 tree.read_with(cx, |tree, _cx| {
1712 let snapshot = tree.snapshot();
1713
1714 assert_eq!(
1715 snapshot.status_for_file(project_path.join(F_TXT)),
1716 Some(GitFileStatus::Added)
1717 );
1718
1719 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1720 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1721 });
1722
1723 git_reset(0, &repo);
1724 git_remove_index(Path::new(B_TXT), &repo);
1725 git_stash(&mut repo);
1726 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1727 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1728 tree.flush_fs_events(cx).await;
1729 deterministic.run_until_parked();
1730
1731 // Check that more complex repo changes are tracked
1732 tree.read_with(cx, |tree, _cx| {
1733 let snapshot = tree.snapshot();
1734
1735 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1736 assert_eq!(
1737 snapshot.status_for_file(project_path.join(B_TXT)),
1738 Some(GitFileStatus::Added)
1739 );
1740 assert_eq!(
1741 snapshot.status_for_file(project_path.join(E_TXT)),
1742 Some(GitFileStatus::Modified)
1743 );
1744 });
1745
1746 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1747 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1748 std::fs::write(
1749 work_dir.join(DOTGITIGNORE),
1750 [IGNORE_RULE, "f.txt"].join("\n"),
1751 )
1752 .unwrap();
1753
1754 git_add(Path::new(DOTGITIGNORE), &repo);
1755 git_commit("Committing modified git ignore", &repo);
1756
1757 tree.flush_fs_events(cx).await;
1758 deterministic.run_until_parked();
1759
1760 let mut renamed_dir_name = "first_directory/second_directory";
1761 const RENAMED_FILE: &'static str = "rf.txt";
1762
1763 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
1764 std::fs::write(
1765 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
1766 "new-contents",
1767 )
1768 .unwrap();
1769
1770 tree.flush_fs_events(cx).await;
1771 deterministic.run_until_parked();
1772
1773 tree.read_with(cx, |tree, _cx| {
1774 let snapshot = tree.snapshot();
1775 assert_eq!(
1776 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
1777 Some(GitFileStatus::Added)
1778 );
1779 });
1780
1781 renamed_dir_name = "new_first_directory/second_directory";
1782
1783 std::fs::rename(
1784 work_dir.join("first_directory"),
1785 work_dir.join("new_first_directory"),
1786 )
1787 .unwrap();
1788
1789 tree.flush_fs_events(cx).await;
1790 deterministic.run_until_parked();
1791
1792 tree.read_with(cx, |tree, _cx| {
1793 let snapshot = tree.snapshot();
1794
1795 assert_eq!(
1796 snapshot.status_for_file(
1797 project_path
1798 .join(Path::new(renamed_dir_name))
1799 .join(RENAMED_FILE)
1800 ),
1801 Some(GitFileStatus::Added)
1802 );
1803 });
1804}
1805
1806#[gpui::test]
1807async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
1808 let fs = FakeFs::new(cx.background());
1809 fs.insert_tree(
1810 "/root",
1811 json!({
1812 ".git": {},
1813 "a": {
1814 "b": {
1815 "c1.txt": "",
1816 "c2.txt": "",
1817 },
1818 "d": {
1819 "e1.txt": "",
1820 "e2.txt": "",
1821 "e3.txt": "",
1822 }
1823 },
1824 "f": {
1825 "no-status.txt": ""
1826 },
1827 "g": {
1828 "h1.txt": "",
1829 "h2.txt": ""
1830 },
1831
1832 }),
1833 )
1834 .await;
1835
1836 fs.set_status_for_repo_via_git_operation(
1837 &Path::new("/root/.git"),
1838 &[
1839 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
1840 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
1841 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
1842 ],
1843 );
1844
1845 let tree = Worktree::local(
1846 build_client(cx),
1847 Path::new("/root"),
1848 true,
1849 fs.clone(),
1850 Default::default(),
1851 &mut cx.to_async(),
1852 )
1853 .await
1854 .unwrap();
1855
1856 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1857 .await;
1858
1859 cx.foreground().run_until_parked();
1860 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
1861
1862 check_propagated_statuses(
1863 &snapshot,
1864 &[
1865 (Path::new(""), Some(GitFileStatus::Conflict)),
1866 (Path::new("a"), Some(GitFileStatus::Modified)),
1867 (Path::new("a/b"), Some(GitFileStatus::Added)),
1868 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1869 (Path::new("a/b/c2.txt"), None),
1870 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1871 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1872 (Path::new("f"), None),
1873 (Path::new("f/no-status.txt"), None),
1874 (Path::new("g"), Some(GitFileStatus::Conflict)),
1875 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
1876 ],
1877 );
1878
1879 check_propagated_statuses(
1880 &snapshot,
1881 &[
1882 (Path::new("a/b"), Some(GitFileStatus::Added)),
1883 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1884 (Path::new("a/b/c2.txt"), None),
1885 (Path::new("a/d"), Some(GitFileStatus::Modified)),
1886 (Path::new("a/d/e1.txt"), None),
1887 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1888 (Path::new("f"), None),
1889 (Path::new("f/no-status.txt"), None),
1890 (Path::new("g"), Some(GitFileStatus::Conflict)),
1891 ],
1892 );
1893
1894 check_propagated_statuses(
1895 &snapshot,
1896 &[
1897 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
1898 (Path::new("a/b/c2.txt"), None),
1899 (Path::new("a/d/e1.txt"), None),
1900 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
1901 (Path::new("f/no-status.txt"), None),
1902 ],
1903 );
1904
1905 #[track_caller]
1906 fn check_propagated_statuses(
1907 snapshot: &Snapshot,
1908 expected_statuses: &[(&Path, Option<GitFileStatus>)],
1909 ) {
1910 let mut entries = expected_statuses
1911 .iter()
1912 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
1913 .collect::<Vec<_>>();
1914 snapshot.propagate_git_statuses(&mut entries);
1915 assert_eq!(
1916 entries
1917 .iter()
1918 .map(|e| (e.path.as_ref(), e.git_status))
1919 .collect::<Vec<_>>(),
1920 expected_statuses
1921 );
1922 }
1923}
1924
1925fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
1926 let http_client = FakeHttpClient::with_404_response();
1927 cx.read(|cx| Client::new(http_client, cx))
1928}
1929
1930#[track_caller]
1931fn git_init(path: &Path) -> git2::Repository {
1932 git2::Repository::init(path).expect("Failed to initialize git repository")
1933}
1934
1935#[track_caller]
1936fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
1937 let path = path.as_ref();
1938 let mut index = repo.index().expect("Failed to get index");
1939 index.add_path(path).expect("Failed to add a.txt");
1940 index.write().expect("Failed to write index");
1941}
1942
1943#[track_caller]
1944fn git_remove_index(path: &Path, repo: &git2::Repository) {
1945 let mut index = repo.index().expect("Failed to get index");
1946 index.remove_path(path).expect("Failed to add a.txt");
1947 index.write().expect("Failed to write index");
1948}
1949
1950#[track_caller]
1951fn git_commit(msg: &'static str, repo: &git2::Repository) {
1952 use git2::Signature;
1953
1954 let signature = Signature::now("test", "test@zed.dev").unwrap();
1955 let oid = repo.index().unwrap().write_tree().unwrap();
1956 let tree = repo.find_tree(oid).unwrap();
1957 if let Some(head) = repo.head().ok() {
1958 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
1959
1960 let parent_commit = parent_obj.as_commit().unwrap();
1961
1962 repo.commit(
1963 Some("HEAD"),
1964 &signature,
1965 &signature,
1966 msg,
1967 &tree,
1968 &[parent_commit],
1969 )
1970 .expect("Failed to commit with parent");
1971 } else {
1972 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
1973 .expect("Failed to commit");
1974 }
1975}
1976
1977#[track_caller]
1978fn git_stash(repo: &mut git2::Repository) {
1979 use git2::Signature;
1980
1981 let signature = Signature::now("test", "test@zed.dev").unwrap();
1982 repo.stash_save(&signature, "N/A", None)
1983 .expect("Failed to stash");
1984}
1985
1986#[track_caller]
1987fn git_reset(offset: usize, repo: &git2::Repository) {
1988 let head = repo.head().expect("Couldn't get repo head");
1989 let object = head.peel(git2::ObjectType::Commit).unwrap();
1990 let commit = object.as_commit().unwrap();
1991 let new_head = commit
1992 .parents()
1993 .inspect(|parnet| {
1994 parnet.message();
1995 })
1996 .skip(offset)
1997 .next()
1998 .expect("Not enough history");
1999 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2000 .expect("Could not reset");
2001}
2002
2003#[allow(dead_code)]
2004#[track_caller]
2005fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2006 repo.statuses(None)
2007 .unwrap()
2008 .iter()
2009 .map(|status| (status.path().unwrap().to_string(), status.status()))
2010 .collect()
2011}