1use crate::{
2 project_settings::ProjectSettings,
3 worktree::{Event, Snapshot, WorktreeModelHandle},
4 Entry, EntryKind, PathChange, Project, Worktree,
5};
6use anyhow::Result;
7use client::Client;
8use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
9use git::GITIGNORE;
10use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
11use parking_lot::Mutex;
12use postage::stream::Stream;
13use pretty_assertions::assert_eq;
14use rand::prelude::*;
15use serde_json::json;
16use settings::SettingsStore;
17use std::{
18 env,
19 fmt::Write,
20 mem,
21 path::{Path, PathBuf},
22 sync::Arc,
23};
24use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
25
26#[gpui::test]
27async fn test_traversal(cx: &mut TestAppContext) {
28 let fs = FakeFs::new(cx.background());
29 fs.insert_tree(
30 "/root",
31 json!({
32 ".gitignore": "a/b\n",
33 "a": {
34 "b": "",
35 "c": "",
36 }
37 }),
38 )
39 .await;
40
41 let tree = Worktree::local(
42 build_client(cx),
43 Path::new("/root"),
44 true,
45 fs,
46 Default::default(),
47 &mut cx.to_async(),
48 )
49 .await
50 .unwrap();
51 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
52 .await;
53
54 tree.read_with(cx, |tree, _| {
55 assert_eq!(
56 tree.entries(false)
57 .map(|entry| entry.path.as_ref())
58 .collect::<Vec<_>>(),
59 vec![
60 Path::new(""),
61 Path::new(".gitignore"),
62 Path::new("a"),
63 Path::new("a/c"),
64 ]
65 );
66 assert_eq!(
67 tree.entries(true)
68 .map(|entry| entry.path.as_ref())
69 .collect::<Vec<_>>(),
70 vec![
71 Path::new(""),
72 Path::new(".gitignore"),
73 Path::new("a"),
74 Path::new("a/b"),
75 Path::new("a/c"),
76 ]
77 );
78 })
79}
80
81#[gpui::test]
82async fn test_descendent_entries(cx: &mut TestAppContext) {
83 let fs = FakeFs::new(cx.background());
84 fs.insert_tree(
85 "/root",
86 json!({
87 "a": "",
88 "b": {
89 "c": {
90 "d": ""
91 },
92 "e": {}
93 },
94 "f": "",
95 "g": {
96 "h": {}
97 },
98 "i": {
99 "j": {
100 "k": ""
101 },
102 "l": {
103
104 }
105 },
106 ".gitignore": "i/j\n",
107 }),
108 )
109 .await;
110
111 let tree = Worktree::local(
112 build_client(cx),
113 Path::new("/root"),
114 true,
115 fs,
116 Default::default(),
117 &mut cx.to_async(),
118 )
119 .await
120 .unwrap();
121 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
122 .await;
123
124 tree.read_with(cx, |tree, _| {
125 assert_eq!(
126 tree.descendent_entries(false, false, Path::new("b"))
127 .map(|entry| entry.path.as_ref())
128 .collect::<Vec<_>>(),
129 vec![Path::new("b/c/d"),]
130 );
131 assert_eq!(
132 tree.descendent_entries(true, false, Path::new("b"))
133 .map(|entry| entry.path.as_ref())
134 .collect::<Vec<_>>(),
135 vec![
136 Path::new("b"),
137 Path::new("b/c"),
138 Path::new("b/c/d"),
139 Path::new("b/e"),
140 ]
141 );
142
143 assert_eq!(
144 tree.descendent_entries(false, false, Path::new("g"))
145 .map(|entry| entry.path.as_ref())
146 .collect::<Vec<_>>(),
147 Vec::<PathBuf>::new()
148 );
149 assert_eq!(
150 tree.descendent_entries(true, false, Path::new("g"))
151 .map(|entry| entry.path.as_ref())
152 .collect::<Vec<_>>(),
153 vec![Path::new("g"), Path::new("g/h"),]
154 );
155 });
156
157 // Expand gitignored directory.
158 tree.read_with(cx, |tree, _| {
159 tree.as_local()
160 .unwrap()
161 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
162 })
163 .recv()
164 .await;
165
166 tree.read_with(cx, |tree, _| {
167 assert_eq!(
168 tree.descendent_entries(false, false, Path::new("i"))
169 .map(|entry| entry.path.as_ref())
170 .collect::<Vec<_>>(),
171 Vec::<PathBuf>::new()
172 );
173 assert_eq!(
174 tree.descendent_entries(false, true, Path::new("i"))
175 .map(|entry| entry.path.as_ref())
176 .collect::<Vec<_>>(),
177 vec![Path::new("i/j/k")]
178 );
179 assert_eq!(
180 tree.descendent_entries(true, false, Path::new("i"))
181 .map(|entry| entry.path.as_ref())
182 .collect::<Vec<_>>(),
183 vec![Path::new("i"), Path::new("i/l"),]
184 );
185 })
186}
187
188#[gpui::test(iterations = 10)]
189async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
190 let fs = FakeFs::new(cx.background());
191 fs.insert_tree(
192 "/root",
193 json!({
194 "lib": {
195 "a": {
196 "a.txt": ""
197 },
198 "b": {
199 "b.txt": ""
200 }
201 }
202 }),
203 )
204 .await;
205 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
206 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
207
208 let tree = Worktree::local(
209 build_client(cx),
210 Path::new("/root"),
211 true,
212 fs.clone(),
213 Default::default(),
214 &mut cx.to_async(),
215 )
216 .await
217 .unwrap();
218
219 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
220 .await;
221
222 tree.read_with(cx, |tree, _| {
223 assert_eq!(
224 tree.entries(false)
225 .map(|entry| entry.path.as_ref())
226 .collect::<Vec<_>>(),
227 vec![
228 Path::new(""),
229 Path::new("lib"),
230 Path::new("lib/a"),
231 Path::new("lib/a/a.txt"),
232 Path::new("lib/a/lib"),
233 Path::new("lib/b"),
234 Path::new("lib/b/b.txt"),
235 Path::new("lib/b/lib"),
236 ]
237 );
238 });
239
240 fs.rename(
241 Path::new("/root/lib/a/lib"),
242 Path::new("/root/lib/a/lib-2"),
243 Default::default(),
244 )
245 .await
246 .unwrap();
247 executor.run_until_parked();
248 tree.read_with(cx, |tree, _| {
249 assert_eq!(
250 tree.entries(false)
251 .map(|entry| entry.path.as_ref())
252 .collect::<Vec<_>>(),
253 vec![
254 Path::new(""),
255 Path::new("lib"),
256 Path::new("lib/a"),
257 Path::new("lib/a/a.txt"),
258 Path::new("lib/a/lib-2"),
259 Path::new("lib/b"),
260 Path::new("lib/b/b.txt"),
261 Path::new("lib/b/lib"),
262 ]
263 );
264 });
265}
266
267#[gpui::test]
268async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
269 let fs = FakeFs::new(cx.background());
270 fs.insert_tree(
271 "/root",
272 json!({
273 "dir1": {
274 "deps": {
275 // symlinks here
276 },
277 "src": {
278 "a.rs": "",
279 "b.rs": "",
280 },
281 },
282 "dir2": {
283 "src": {
284 "c.rs": "",
285 "d.rs": "",
286 }
287 },
288 "dir3": {
289 "deps": {},
290 "src": {
291 "e.rs": "",
292 "f.rs": "",
293 },
294 }
295 }),
296 )
297 .await;
298
299 // These symlinks point to directories outside of the worktree's root, dir1.
300 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
301 .await;
302 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
303 .await;
304
305 let tree = Worktree::local(
306 build_client(cx),
307 Path::new("/root/dir1"),
308 true,
309 fs.clone(),
310 Default::default(),
311 &mut cx.to_async(),
312 )
313 .await
314 .unwrap();
315
316 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
317 .await;
318
319 let tree_updates = Arc::new(Mutex::new(Vec::new()));
320 tree.update(cx, |_, cx| {
321 let tree_updates = tree_updates.clone();
322 cx.subscribe(&tree, move |_, _, event, _| {
323 if let Event::UpdatedEntries(update) = event {
324 tree_updates.lock().extend(
325 update
326 .iter()
327 .map(|(path, _, change)| (path.clone(), *change)),
328 );
329 }
330 })
331 .detach();
332 });
333
334 // The symlinked directories are not scanned by default.
335 tree.read_with(cx, |tree, _| {
336 assert_eq!(
337 tree.entries(true)
338 .map(|entry| (entry.path.as_ref(), entry.is_external))
339 .collect::<Vec<_>>(),
340 vec![
341 (Path::new(""), false),
342 (Path::new("deps"), false),
343 (Path::new("deps/dep-dir2"), true),
344 (Path::new("deps/dep-dir3"), true),
345 (Path::new("src"), false),
346 (Path::new("src/a.rs"), false),
347 (Path::new("src/b.rs"), false),
348 ]
349 );
350
351 assert_eq!(
352 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
353 EntryKind::UnloadedDir
354 );
355 });
356
357 // Expand one of the symlinked directories.
358 tree.read_with(cx, |tree, _| {
359 tree.as_local()
360 .unwrap()
361 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
362 })
363 .recv()
364 .await;
365
366 // The expanded directory's contents are loaded. Subdirectories are
367 // not scanned yet.
368 tree.read_with(cx, |tree, _| {
369 assert_eq!(
370 tree.entries(true)
371 .map(|entry| (entry.path.as_ref(), entry.is_external))
372 .collect::<Vec<_>>(),
373 vec![
374 (Path::new(""), false),
375 (Path::new("deps"), false),
376 (Path::new("deps/dep-dir2"), true),
377 (Path::new("deps/dep-dir3"), true),
378 (Path::new("deps/dep-dir3/deps"), true),
379 (Path::new("deps/dep-dir3/src"), true),
380 (Path::new("src"), false),
381 (Path::new("src/a.rs"), false),
382 (Path::new("src/b.rs"), false),
383 ]
384 );
385 });
386 assert_eq!(
387 mem::take(&mut *tree_updates.lock()),
388 &[
389 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
390 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
391 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
392 ]
393 );
394
395 // Expand a subdirectory of one of the symlinked directories.
396 tree.read_with(cx, |tree, _| {
397 tree.as_local()
398 .unwrap()
399 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
400 })
401 .recv()
402 .await;
403
404 // The expanded subdirectory's contents are loaded.
405 tree.read_with(cx, |tree, _| {
406 assert_eq!(
407 tree.entries(true)
408 .map(|entry| (entry.path.as_ref(), entry.is_external))
409 .collect::<Vec<_>>(),
410 vec![
411 (Path::new(""), false),
412 (Path::new("deps"), false),
413 (Path::new("deps/dep-dir2"), true),
414 (Path::new("deps/dep-dir3"), true),
415 (Path::new("deps/dep-dir3/deps"), true),
416 (Path::new("deps/dep-dir3/src"), true),
417 (Path::new("deps/dep-dir3/src/e.rs"), true),
418 (Path::new("deps/dep-dir3/src/f.rs"), true),
419 (Path::new("src"), false),
420 (Path::new("src/a.rs"), false),
421 (Path::new("src/b.rs"), false),
422 ]
423 );
424 });
425
426 assert_eq!(
427 mem::take(&mut *tree_updates.lock()),
428 &[
429 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
430 (
431 Path::new("deps/dep-dir3/src/e.rs").into(),
432 PathChange::Loaded
433 ),
434 (
435 Path::new("deps/dep-dir3/src/f.rs").into(),
436 PathChange::Loaded
437 )
438 ]
439 );
440}
441
442#[gpui::test]
443async fn test_open_gitignored_files(cx: &mut TestAppContext) {
444 let fs = FakeFs::new(cx.background());
445 fs.insert_tree(
446 "/root",
447 json!({
448 ".gitignore": "node_modules\n",
449 "one": {
450 "node_modules": {
451 "a": {
452 "a1.js": "a1",
453 "a2.js": "a2",
454 },
455 "b": {
456 "b1.js": "b1",
457 "b2.js": "b2",
458 },
459 "c": {
460 "c1.js": "c1",
461 "c2.js": "c2",
462 }
463 },
464 },
465 "two": {
466 "x.js": "",
467 "y.js": "",
468 },
469 }),
470 )
471 .await;
472
473 let tree = Worktree::local(
474 build_client(cx),
475 Path::new("/root"),
476 true,
477 fs.clone(),
478 Default::default(),
479 &mut cx.to_async(),
480 )
481 .await
482 .unwrap();
483
484 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
485 .await;
486
487 tree.read_with(cx, |tree, _| {
488 assert_eq!(
489 tree.entries(true)
490 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
491 .collect::<Vec<_>>(),
492 vec![
493 (Path::new(""), false),
494 (Path::new(".gitignore"), false),
495 (Path::new("one"), false),
496 (Path::new("one/node_modules"), true),
497 (Path::new("two"), false),
498 (Path::new("two/x.js"), false),
499 (Path::new("two/y.js"), false),
500 ]
501 );
502 });
503
504 // Open a file that is nested inside of a gitignored directory that
505 // has not yet been expanded.
506 let prev_read_dir_count = fs.read_dir_call_count();
507 let buffer = tree
508 .update(cx, |tree, cx| {
509 tree.as_local_mut()
510 .unwrap()
511 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
512 })
513 .await
514 .unwrap();
515
516 tree.read_with(cx, |tree, cx| {
517 assert_eq!(
518 tree.entries(true)
519 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
520 .collect::<Vec<_>>(),
521 vec![
522 (Path::new(""), false),
523 (Path::new(".gitignore"), false),
524 (Path::new("one"), false),
525 (Path::new("one/node_modules"), true),
526 (Path::new("one/node_modules/a"), true),
527 (Path::new("one/node_modules/b"), true),
528 (Path::new("one/node_modules/b/b1.js"), true),
529 (Path::new("one/node_modules/b/b2.js"), true),
530 (Path::new("one/node_modules/c"), true),
531 (Path::new("two"), false),
532 (Path::new("two/x.js"), false),
533 (Path::new("two/y.js"), false),
534 ]
535 );
536
537 assert_eq!(
538 buffer.read(cx).file().unwrap().path().as_ref(),
539 Path::new("one/node_modules/b/b1.js")
540 );
541
542 // Only the newly-expanded directories are scanned.
543 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
544 });
545
546 // Open another file in a different subdirectory of the same
547 // gitignored directory.
548 let prev_read_dir_count = fs.read_dir_call_count();
549 let buffer = tree
550 .update(cx, |tree, cx| {
551 tree.as_local_mut()
552 .unwrap()
553 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
554 })
555 .await
556 .unwrap();
557
558 tree.read_with(cx, |tree, cx| {
559 assert_eq!(
560 tree.entries(true)
561 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
562 .collect::<Vec<_>>(),
563 vec![
564 (Path::new(""), false),
565 (Path::new(".gitignore"), false),
566 (Path::new("one"), false),
567 (Path::new("one/node_modules"), true),
568 (Path::new("one/node_modules/a"), true),
569 (Path::new("one/node_modules/a/a1.js"), true),
570 (Path::new("one/node_modules/a/a2.js"), true),
571 (Path::new("one/node_modules/b"), true),
572 (Path::new("one/node_modules/b/b1.js"), true),
573 (Path::new("one/node_modules/b/b2.js"), true),
574 (Path::new("one/node_modules/c"), true),
575 (Path::new("two"), false),
576 (Path::new("two/x.js"), false),
577 (Path::new("two/y.js"), false),
578 ]
579 );
580
581 assert_eq!(
582 buffer.read(cx).file().unwrap().path().as_ref(),
583 Path::new("one/node_modules/a/a2.js")
584 );
585
586 // Only the newly-expanded directory is scanned.
587 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
588 });
589
590 // No work happens when files and directories change within an unloaded directory.
591 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
592 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
593 .await
594 .unwrap();
595 cx.foreground().run_until_parked();
596 assert_eq!(
597 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
598 0
599 );
600}
601
602#[gpui::test]
603async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
604 let fs = FakeFs::new(cx.background());
605 fs.insert_tree(
606 "/root",
607 json!({
608 ".gitignore": "node_modules\n",
609 "a": {
610 "a.js": "",
611 },
612 "b": {
613 "b.js": "",
614 },
615 "node_modules": {
616 "c": {
617 "c.js": "",
618 },
619 "d": {
620 "d.js": "",
621 "e": {
622 "e1.js": "",
623 "e2.js": "",
624 },
625 "f": {
626 "f1.js": "",
627 "f2.js": "",
628 }
629 },
630 },
631 }),
632 )
633 .await;
634
635 let tree = Worktree::local(
636 build_client(cx),
637 Path::new("/root"),
638 true,
639 fs.clone(),
640 Default::default(),
641 &mut cx.to_async(),
642 )
643 .await
644 .unwrap();
645
646 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
647 .await;
648
649 // Open a file within the gitignored directory, forcing some of its
650 // subdirectories to be read, but not all.
651 let read_dir_count_1 = fs.read_dir_call_count();
652 tree.read_with(cx, |tree, _| {
653 tree.as_local()
654 .unwrap()
655 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
656 })
657 .recv()
658 .await;
659
660 // Those subdirectories are now loaded.
661 tree.read_with(cx, |tree, _| {
662 assert_eq!(
663 tree.entries(true)
664 .map(|e| (e.path.as_ref(), e.is_ignored))
665 .collect::<Vec<_>>(),
666 &[
667 (Path::new(""), false),
668 (Path::new(".gitignore"), false),
669 (Path::new("a"), false),
670 (Path::new("a/a.js"), false),
671 (Path::new("b"), false),
672 (Path::new("b/b.js"), false),
673 (Path::new("node_modules"), true),
674 (Path::new("node_modules/c"), true),
675 (Path::new("node_modules/d"), true),
676 (Path::new("node_modules/d/d.js"), true),
677 (Path::new("node_modules/d/e"), true),
678 (Path::new("node_modules/d/f"), true),
679 ]
680 );
681 });
682 let read_dir_count_2 = fs.read_dir_call_count();
683 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
684
685 // Update the gitignore so that node_modules is no longer ignored,
686 // but a subdirectory is ignored
687 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
688 .await
689 .unwrap();
690 cx.foreground().run_until_parked();
691
692 // All of the directories that are no longer ignored are now loaded.
693 tree.read_with(cx, |tree, _| {
694 assert_eq!(
695 tree.entries(true)
696 .map(|e| (e.path.as_ref(), e.is_ignored))
697 .collect::<Vec<_>>(),
698 &[
699 (Path::new(""), false),
700 (Path::new(".gitignore"), false),
701 (Path::new("a"), false),
702 (Path::new("a/a.js"), false),
703 (Path::new("b"), false),
704 (Path::new("b/b.js"), false),
705 // This directory is no longer ignored
706 (Path::new("node_modules"), false),
707 (Path::new("node_modules/c"), false),
708 (Path::new("node_modules/c/c.js"), false),
709 (Path::new("node_modules/d"), false),
710 (Path::new("node_modules/d/d.js"), false),
711 // This subdirectory is now ignored
712 (Path::new("node_modules/d/e"), true),
713 (Path::new("node_modules/d/f"), false),
714 (Path::new("node_modules/d/f/f1.js"), false),
715 (Path::new("node_modules/d/f/f2.js"), false),
716 ]
717 );
718 });
719
720 // Each of the newly-loaded directories is scanned only once.
721 let read_dir_count_3 = fs.read_dir_call_count();
722 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
723}
724
725#[gpui::test(iterations = 10)]
726async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
727 let fs = FakeFs::new(cx.background());
728 fs.insert_tree(
729 "/root",
730 json!({
731 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
732 "tree": {
733 ".git": {},
734 ".gitignore": "ignored-dir\n",
735 "tracked-dir": {
736 "tracked-file1": "",
737 "ancestor-ignored-file1": "",
738 },
739 "ignored-dir": {
740 "ignored-file1": ""
741 }
742 }
743 }),
744 )
745 .await;
746
747 let tree = Worktree::local(
748 build_client(cx),
749 "/root/tree".as_ref(),
750 true,
751 fs.clone(),
752 Default::default(),
753 &mut cx.to_async(),
754 )
755 .await
756 .unwrap();
757 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
758 .await;
759
760 tree.read_with(cx, |tree, _| {
761 tree.as_local()
762 .unwrap()
763 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
764 })
765 .recv()
766 .await;
767
768 cx.read(|cx| {
769 let tree = tree.read(cx);
770 assert!(
771 !tree
772 .entry_for_path("tracked-dir/tracked-file1")
773 .unwrap()
774 .is_ignored
775 );
776 assert!(
777 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
778 .unwrap()
779 .is_ignored
780 );
781 assert!(
782 tree.entry_for_path("ignored-dir/ignored-file1")
783 .unwrap()
784 .is_ignored
785 );
786 });
787
788 fs.create_file(
789 "/root/tree/tracked-dir/tracked-file2".as_ref(),
790 Default::default(),
791 )
792 .await
793 .unwrap();
794 fs.create_file(
795 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
796 Default::default(),
797 )
798 .await
799 .unwrap();
800 fs.create_file(
801 "/root/tree/ignored-dir/ignored-file2".as_ref(),
802 Default::default(),
803 )
804 .await
805 .unwrap();
806
807 cx.foreground().run_until_parked();
808 cx.read(|cx| {
809 let tree = tree.read(cx);
810 assert!(
811 !tree
812 .entry_for_path("tracked-dir/tracked-file2")
813 .unwrap()
814 .is_ignored
815 );
816 assert!(
817 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
818 .unwrap()
819 .is_ignored
820 );
821 assert!(
822 tree.entry_for_path("ignored-dir/ignored-file2")
823 .unwrap()
824 .is_ignored
825 );
826 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
827 });
828}
829
830#[gpui::test]
831async fn test_write_file(cx: &mut TestAppContext) {
832 let dir = temp_tree(json!({
833 ".git": {},
834 ".gitignore": "ignored-dir\n",
835 "tracked-dir": {},
836 "ignored-dir": {}
837 }));
838
839 let tree = Worktree::local(
840 build_client(cx),
841 dir.path(),
842 true,
843 Arc::new(RealFs),
844 Default::default(),
845 &mut cx.to_async(),
846 )
847 .await
848 .unwrap();
849 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
850 .await;
851 tree.flush_fs_events(cx).await;
852
853 tree.update(cx, |tree, cx| {
854 tree.as_local().unwrap().write_file(
855 Path::new("tracked-dir/file.txt"),
856 "hello".into(),
857 Default::default(),
858 cx,
859 )
860 })
861 .await
862 .unwrap();
863 tree.update(cx, |tree, cx| {
864 tree.as_local().unwrap().write_file(
865 Path::new("ignored-dir/file.txt"),
866 "world".into(),
867 Default::default(),
868 cx,
869 )
870 })
871 .await
872 .unwrap();
873
874 tree.read_with(cx, |tree, _| {
875 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
876 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
877 assert!(!tracked.is_ignored);
878 assert!(ignored.is_ignored);
879 });
880}
881
882#[gpui::test]
883async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) {
884 let dir = temp_tree(json!({
885 ".git": {},
886 ".gitignore": "**/target\n/node_modules\n",
887 "target": {},
888 "node_modules": {
889 ".DS_Store": "",
890 "prettier": {
891 "package.json": "{}",
892 },
893 },
894 "src": {
895 ".DS_Store": "",
896 "foo": {
897 "foo.rs": "mod another;\n",
898 "another.rs": "// another",
899 },
900 "bar": {
901 "bar.rs": "// bar",
902 },
903 "lib.rs": "mod foo;\nmod bar;\n",
904 },
905 ".DS_Store": "",
906 }));
907 cx.update(|cx| {
908 cx.set_global(SettingsStore::test(cx));
909 Project::init_settings(cx);
910 cx.update_global::<SettingsStore, _, _>(|store, cx| {
911 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
912 project_settings.scan_exclude_files =
913 vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()];
914 });
915 });
916 });
917
918 let tree = Worktree::local(
919 build_client(cx),
920 dir.path(),
921 true,
922 Arc::new(RealFs),
923 Default::default(),
924 &mut cx.to_async(),
925 )
926 .await
927 .unwrap();
928 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
929 .await;
930 tree.flush_fs_events(cx).await;
931
932 // tree.update(cx, |tree, cx| {
933 // tree.as_local().unwrap().write_file(
934 // Path::new("tracked-dir/file.txt"),
935 // "hello".into(),
936 // Default::default(),
937 // cx,
938 // )
939 // })
940 // .await
941 // .unwrap();
942 // tree.update(cx, |tree, cx| {
943 // tree.as_local().unwrap().write_file(
944 // Path::new("ignored-dir/file.txt"),
945 // "world".into(),
946 // Default::default(),
947 // cx,
948 // )
949 // })
950 // .await
951 // .unwrap();
952
953 // tree.read_with(cx, |tree, _| {
954 // let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
955 // let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
956 // assert!(!tracked.is_ignored);
957 // assert!(ignored.is_ignored);
958 // });
959 dbg!("!!!!!!!!!!!!");
960}
961
962#[gpui::test(iterations = 30)]
963async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
964 let fs = FakeFs::new(cx.background());
965 fs.insert_tree(
966 "/root",
967 json!({
968 "b": {},
969 "c": {},
970 "d": {},
971 }),
972 )
973 .await;
974
975 let tree = Worktree::local(
976 build_client(cx),
977 "/root".as_ref(),
978 true,
979 fs,
980 Default::default(),
981 &mut cx.to_async(),
982 )
983 .await
984 .unwrap();
985
986 let snapshot1 = tree.update(cx, |tree, cx| {
987 let tree = tree.as_local_mut().unwrap();
988 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
989 let _ = tree.observe_updates(0, cx, {
990 let snapshot = snapshot.clone();
991 move |update| {
992 snapshot.lock().apply_remote_update(update).unwrap();
993 async { true }
994 }
995 });
996 snapshot
997 });
998
999 let entry = tree
1000 .update(cx, |tree, cx| {
1001 tree.as_local_mut()
1002 .unwrap()
1003 .create_entry("a/e".as_ref(), true, cx)
1004 })
1005 .await
1006 .unwrap();
1007 assert!(entry.is_dir());
1008
1009 cx.foreground().run_until_parked();
1010 tree.read_with(cx, |tree, _| {
1011 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
1012 });
1013
1014 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1015 assert_eq!(
1016 snapshot1.lock().entries(true).collect::<Vec<_>>(),
1017 snapshot2.entries(true).collect::<Vec<_>>()
1018 );
1019}
1020
1021#[gpui::test]
1022async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1023 let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1024
1025 let fs_fake = FakeFs::new(cx.background());
1026 fs_fake
1027 .insert_tree(
1028 "/root",
1029 json!({
1030 "a": {},
1031 }),
1032 )
1033 .await;
1034
1035 let tree_fake = Worktree::local(
1036 client_fake,
1037 "/root".as_ref(),
1038 true,
1039 fs_fake,
1040 Default::default(),
1041 &mut cx.to_async(),
1042 )
1043 .await
1044 .unwrap();
1045
1046 let entry = tree_fake
1047 .update(cx, |tree, cx| {
1048 tree.as_local_mut()
1049 .unwrap()
1050 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1051 })
1052 .await
1053 .unwrap();
1054 assert!(entry.is_file());
1055
1056 cx.foreground().run_until_parked();
1057 tree_fake.read_with(cx, |tree, _| {
1058 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1059 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1060 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1061 });
1062
1063 let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1064
1065 let fs_real = Arc::new(RealFs);
1066 let temp_root = temp_tree(json!({
1067 "a": {}
1068 }));
1069
1070 let tree_real = Worktree::local(
1071 client_real,
1072 temp_root.path(),
1073 true,
1074 fs_real,
1075 Default::default(),
1076 &mut cx.to_async(),
1077 )
1078 .await
1079 .unwrap();
1080
1081 let entry = tree_real
1082 .update(cx, |tree, cx| {
1083 tree.as_local_mut()
1084 .unwrap()
1085 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1086 })
1087 .await
1088 .unwrap();
1089 assert!(entry.is_file());
1090
1091 cx.foreground().run_until_parked();
1092 tree_real.read_with(cx, |tree, _| {
1093 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1094 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1095 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1096 });
1097
1098 // Test smallest change
1099 let entry = tree_real
1100 .update(cx, |tree, cx| {
1101 tree.as_local_mut()
1102 .unwrap()
1103 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
1104 })
1105 .await
1106 .unwrap();
1107 assert!(entry.is_file());
1108
1109 cx.foreground().run_until_parked();
1110 tree_real.read_with(cx, |tree, _| {
1111 assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
1112 });
1113
1114 // Test largest change
1115 let entry = tree_real
1116 .update(cx, |tree, cx| {
1117 tree.as_local_mut()
1118 .unwrap()
1119 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
1120 })
1121 .await
1122 .unwrap();
1123 assert!(entry.is_file());
1124
1125 cx.foreground().run_until_parked();
1126 tree_real.read_with(cx, |tree, _| {
1127 assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
1128 assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
1129 assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
1130 assert!(tree.entry_for_path("d/").unwrap().is_dir());
1131 });
1132}
1133
1134#[gpui::test(iterations = 100)]
1135async fn test_random_worktree_operations_during_initial_scan(
1136 cx: &mut TestAppContext,
1137 mut rng: StdRng,
1138) {
1139 let operations = env::var("OPERATIONS")
1140 .map(|o| o.parse().unwrap())
1141 .unwrap_or(5);
1142 let initial_entries = env::var("INITIAL_ENTRIES")
1143 .map(|o| o.parse().unwrap())
1144 .unwrap_or(20);
1145
1146 let root_dir = Path::new("/test");
1147 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1148 fs.as_fake().insert_tree(root_dir, json!({})).await;
1149 for _ in 0..initial_entries {
1150 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1151 }
1152 log::info!("generated initial tree");
1153
1154 let worktree = Worktree::local(
1155 build_client(cx),
1156 root_dir,
1157 true,
1158 fs.clone(),
1159 Default::default(),
1160 &mut cx.to_async(),
1161 )
1162 .await
1163 .unwrap();
1164
1165 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1166 let updates = Arc::new(Mutex::new(Vec::new()));
1167 worktree.update(cx, |tree, cx| {
1168 check_worktree_change_events(tree, cx);
1169
1170 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1171 let updates = updates.clone();
1172 move |update| {
1173 updates.lock().push(update);
1174 async { true }
1175 }
1176 });
1177 });
1178
1179 for _ in 0..operations {
1180 worktree
1181 .update(cx, |worktree, cx| {
1182 randomly_mutate_worktree(worktree, &mut rng, cx)
1183 })
1184 .await
1185 .log_err();
1186 worktree.read_with(cx, |tree, _| {
1187 tree.as_local().unwrap().snapshot().check_invariants(true)
1188 });
1189
1190 if rng.gen_bool(0.6) {
1191 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1192 }
1193 }
1194
1195 worktree
1196 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1197 .await;
1198
1199 cx.foreground().run_until_parked();
1200
1201 let final_snapshot = worktree.read_with(cx, |tree, _| {
1202 let tree = tree.as_local().unwrap();
1203 let snapshot = tree.snapshot();
1204 snapshot.check_invariants(true);
1205 snapshot
1206 });
1207
1208 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1209 let mut updated_snapshot = snapshot.clone();
1210 for update in updates.lock().iter() {
1211 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1212 updated_snapshot
1213 .apply_remote_update(update.clone())
1214 .unwrap();
1215 }
1216 }
1217
1218 assert_eq!(
1219 updated_snapshot.entries(true).collect::<Vec<_>>(),
1220 final_snapshot.entries(true).collect::<Vec<_>>(),
1221 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1222 );
1223 }
1224}
1225
1226#[gpui::test(iterations = 100)]
1227async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1228 let operations = env::var("OPERATIONS")
1229 .map(|o| o.parse().unwrap())
1230 .unwrap_or(40);
1231 let initial_entries = env::var("INITIAL_ENTRIES")
1232 .map(|o| o.parse().unwrap())
1233 .unwrap_or(20);
1234
1235 let root_dir = Path::new("/test");
1236 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1237 fs.as_fake().insert_tree(root_dir, json!({})).await;
1238 for _ in 0..initial_entries {
1239 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1240 }
1241 log::info!("generated initial tree");
1242
1243 let worktree = Worktree::local(
1244 build_client(cx),
1245 root_dir,
1246 true,
1247 fs.clone(),
1248 Default::default(),
1249 &mut cx.to_async(),
1250 )
1251 .await
1252 .unwrap();
1253
1254 let updates = Arc::new(Mutex::new(Vec::new()));
1255 worktree.update(cx, |tree, cx| {
1256 check_worktree_change_events(tree, cx);
1257
1258 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1259 let updates = updates.clone();
1260 move |update| {
1261 updates.lock().push(update);
1262 async { true }
1263 }
1264 });
1265 });
1266
1267 worktree
1268 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1269 .await;
1270
1271 fs.as_fake().pause_events();
1272 let mut snapshots = Vec::new();
1273 let mut mutations_len = operations;
1274 while mutations_len > 1 {
1275 if rng.gen_bool(0.2) {
1276 worktree
1277 .update(cx, |worktree, cx| {
1278 randomly_mutate_worktree(worktree, &mut rng, cx)
1279 })
1280 .await
1281 .log_err();
1282 } else {
1283 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1284 }
1285
1286 let buffered_event_count = fs.as_fake().buffered_event_count();
1287 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1288 let len = rng.gen_range(0..=buffered_event_count);
1289 log::info!("flushing {} events", len);
1290 fs.as_fake().flush_events(len);
1291 } else {
1292 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1293 mutations_len -= 1;
1294 }
1295
1296 cx.foreground().run_until_parked();
1297 if rng.gen_bool(0.2) {
1298 log::info!("storing snapshot {}", snapshots.len());
1299 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1300 snapshots.push(snapshot);
1301 }
1302 }
1303
1304 log::info!("quiescing");
1305 fs.as_fake().flush_events(usize::MAX);
1306 cx.foreground().run_until_parked();
1307
1308 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1309 snapshot.check_invariants(true);
1310 let expanded_paths = snapshot
1311 .expanded_entries()
1312 .map(|e| e.path.clone())
1313 .collect::<Vec<_>>();
1314
1315 {
1316 let new_worktree = Worktree::local(
1317 build_client(cx),
1318 root_dir,
1319 true,
1320 fs.clone(),
1321 Default::default(),
1322 &mut cx.to_async(),
1323 )
1324 .await
1325 .unwrap();
1326 new_worktree
1327 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1328 .await;
1329 new_worktree
1330 .update(cx, |tree, _| {
1331 tree.as_local_mut()
1332 .unwrap()
1333 .refresh_entries_for_paths(expanded_paths)
1334 })
1335 .recv()
1336 .await;
1337 let new_snapshot =
1338 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1339 assert_eq!(
1340 snapshot.entries_without_ids(true),
1341 new_snapshot.entries_without_ids(true)
1342 );
1343 }
1344
1345 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1346 for update in updates.lock().iter() {
1347 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1348 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1349 }
1350 }
1351
1352 assert_eq!(
1353 prev_snapshot
1354 .entries(true)
1355 .map(ignore_pending_dir)
1356 .collect::<Vec<_>>(),
1357 snapshot
1358 .entries(true)
1359 .map(ignore_pending_dir)
1360 .collect::<Vec<_>>(),
1361 "wrong updates after snapshot {i}: {updates:#?}",
1362 );
1363 }
1364
1365 fn ignore_pending_dir(entry: &Entry) -> Entry {
1366 let mut entry = entry.clone();
1367 if entry.kind.is_dir() {
1368 entry.kind = EntryKind::Dir
1369 }
1370 entry
1371 }
1372}
1373
1374// The worktree's `UpdatedEntries` event can be used to follow along with
1375// all changes to the worktree's snapshot.
1376fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1377 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1378 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1379 if let Event::UpdatedEntries(changes) = event {
1380 for (path, _, change_type) in changes.iter() {
1381 let entry = tree.entry_for_path(&path).cloned();
1382 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1383 Ok(ix) | Err(ix) => ix,
1384 };
1385 match change_type {
1386 PathChange::Added => entries.insert(ix, entry.unwrap()),
1387 PathChange::Removed => drop(entries.remove(ix)),
1388 PathChange::Updated => {
1389 let entry = entry.unwrap();
1390 let existing_entry = entries.get_mut(ix).unwrap();
1391 assert_eq!(existing_entry.path, entry.path);
1392 *existing_entry = entry;
1393 }
1394 PathChange::AddedOrUpdated | PathChange::Loaded => {
1395 let entry = entry.unwrap();
1396 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1397 *entries.get_mut(ix).unwrap() = entry;
1398 } else {
1399 entries.insert(ix, entry);
1400 }
1401 }
1402 }
1403 }
1404
1405 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1406 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1407 }
1408 })
1409 .detach();
1410}
1411
1412fn randomly_mutate_worktree(
1413 worktree: &mut Worktree,
1414 rng: &mut impl Rng,
1415 cx: &mut ModelContext<Worktree>,
1416) -> Task<Result<()>> {
1417 log::info!("mutating worktree");
1418 let worktree = worktree.as_local_mut().unwrap();
1419 let snapshot = worktree.snapshot();
1420 let entry = snapshot.entries(false).choose(rng).unwrap();
1421
1422 match rng.gen_range(0_u32..100) {
1423 0..=33 if entry.path.as_ref() != Path::new("") => {
1424 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1425 worktree.delete_entry(entry.id, cx).unwrap()
1426 }
1427 ..=66 if entry.path.as_ref() != Path::new("") => {
1428 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1429 let new_parent_path = if other_entry.is_dir() {
1430 other_entry.path.clone()
1431 } else {
1432 other_entry.path.parent().unwrap().into()
1433 };
1434 let mut new_path = new_parent_path.join(random_filename(rng));
1435 if new_path.starts_with(&entry.path) {
1436 new_path = random_filename(rng).into();
1437 }
1438
1439 log::info!(
1440 "renaming entry {:?} ({}) to {:?}",
1441 entry.path,
1442 entry.id.0,
1443 new_path
1444 );
1445 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1446 cx.foreground().spawn(async move {
1447 task.await?;
1448 Ok(())
1449 })
1450 }
1451 _ => {
1452 let task = if entry.is_dir() {
1453 let child_path = entry.path.join(random_filename(rng));
1454 let is_dir = rng.gen_bool(0.3);
1455 log::info!(
1456 "creating {} at {:?}",
1457 if is_dir { "dir" } else { "file" },
1458 child_path,
1459 );
1460 worktree.create_entry(child_path, is_dir, cx)
1461 } else {
1462 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1463 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1464 };
1465 cx.foreground().spawn(async move {
1466 task.await?;
1467 Ok(())
1468 })
1469 }
1470 }
1471}
1472
1473async fn randomly_mutate_fs(
1474 fs: &Arc<dyn Fs>,
1475 root_path: &Path,
1476 insertion_probability: f64,
1477 rng: &mut impl Rng,
1478) {
1479 log::info!("mutating fs");
1480 let mut files = Vec::new();
1481 let mut dirs = Vec::new();
1482 for path in fs.as_fake().paths(false) {
1483 if path.starts_with(root_path) {
1484 if fs.is_file(&path).await {
1485 files.push(path);
1486 } else {
1487 dirs.push(path);
1488 }
1489 }
1490 }
1491
1492 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1493 let path = dirs.choose(rng).unwrap();
1494 let new_path = path.join(random_filename(rng));
1495
1496 if rng.gen() {
1497 log::info!(
1498 "creating dir {:?}",
1499 new_path.strip_prefix(root_path).unwrap()
1500 );
1501 fs.create_dir(&new_path).await.unwrap();
1502 } else {
1503 log::info!(
1504 "creating file {:?}",
1505 new_path.strip_prefix(root_path).unwrap()
1506 );
1507 fs.create_file(&new_path, Default::default()).await.unwrap();
1508 }
1509 } else if rng.gen_bool(0.05) {
1510 let ignore_dir_path = dirs.choose(rng).unwrap();
1511 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1512
1513 let subdirs = dirs
1514 .iter()
1515 .filter(|d| d.starts_with(&ignore_dir_path))
1516 .cloned()
1517 .collect::<Vec<_>>();
1518 let subfiles = files
1519 .iter()
1520 .filter(|d| d.starts_with(&ignore_dir_path))
1521 .cloned()
1522 .collect::<Vec<_>>();
1523 let files_to_ignore = {
1524 let len = rng.gen_range(0..=subfiles.len());
1525 subfiles.choose_multiple(rng, len)
1526 };
1527 let dirs_to_ignore = {
1528 let len = rng.gen_range(0..subdirs.len());
1529 subdirs.choose_multiple(rng, len)
1530 };
1531
1532 let mut ignore_contents = String::new();
1533 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1534 writeln!(
1535 ignore_contents,
1536 "{}",
1537 path_to_ignore
1538 .strip_prefix(&ignore_dir_path)
1539 .unwrap()
1540 .to_str()
1541 .unwrap()
1542 )
1543 .unwrap();
1544 }
1545 log::info!(
1546 "creating gitignore {:?} with contents:\n{}",
1547 ignore_path.strip_prefix(&root_path).unwrap(),
1548 ignore_contents
1549 );
1550 fs.save(
1551 &ignore_path,
1552 &ignore_contents.as_str().into(),
1553 Default::default(),
1554 )
1555 .await
1556 .unwrap();
1557 } else {
1558 let old_path = {
1559 let file_path = files.choose(rng);
1560 let dir_path = dirs[1..].choose(rng);
1561 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1562 };
1563
1564 let is_rename = rng.gen();
1565 if is_rename {
1566 let new_path_parent = dirs
1567 .iter()
1568 .filter(|d| !d.starts_with(old_path))
1569 .choose(rng)
1570 .unwrap();
1571
1572 let overwrite_existing_dir =
1573 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1574 let new_path = if overwrite_existing_dir {
1575 fs.remove_dir(
1576 &new_path_parent,
1577 RemoveOptions {
1578 recursive: true,
1579 ignore_if_not_exists: true,
1580 },
1581 )
1582 .await
1583 .unwrap();
1584 new_path_parent.to_path_buf()
1585 } else {
1586 new_path_parent.join(random_filename(rng))
1587 };
1588
1589 log::info!(
1590 "renaming {:?} to {}{:?}",
1591 old_path.strip_prefix(&root_path).unwrap(),
1592 if overwrite_existing_dir {
1593 "overwrite "
1594 } else {
1595 ""
1596 },
1597 new_path.strip_prefix(&root_path).unwrap()
1598 );
1599 fs.rename(
1600 &old_path,
1601 &new_path,
1602 fs::RenameOptions {
1603 overwrite: true,
1604 ignore_if_exists: true,
1605 },
1606 )
1607 .await
1608 .unwrap();
1609 } else if fs.is_file(&old_path).await {
1610 log::info!(
1611 "deleting file {:?}",
1612 old_path.strip_prefix(&root_path).unwrap()
1613 );
1614 fs.remove_file(old_path, Default::default()).await.unwrap();
1615 } else {
1616 log::info!(
1617 "deleting dir {:?}",
1618 old_path.strip_prefix(&root_path).unwrap()
1619 );
1620 fs.remove_dir(
1621 &old_path,
1622 RemoveOptions {
1623 recursive: true,
1624 ignore_if_not_exists: true,
1625 },
1626 )
1627 .await
1628 .unwrap();
1629 }
1630 }
1631}
1632
1633fn random_filename(rng: &mut impl Rng) -> String {
1634 (0..6)
1635 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1636 .map(char::from)
1637 .collect()
1638}
1639
1640#[gpui::test]
1641async fn test_rename_work_directory(cx: &mut TestAppContext) {
1642 let root = temp_tree(json!({
1643 "projects": {
1644 "project1": {
1645 "a": "",
1646 "b": "",
1647 }
1648 },
1649
1650 }));
1651 let root_path = root.path();
1652
1653 let tree = Worktree::local(
1654 build_client(cx),
1655 root_path,
1656 true,
1657 Arc::new(RealFs),
1658 Default::default(),
1659 &mut cx.to_async(),
1660 )
1661 .await
1662 .unwrap();
1663
1664 let repo = git_init(&root_path.join("projects/project1"));
1665 git_add("a", &repo);
1666 git_commit("init", &repo);
1667 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1668
1669 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1670 .await;
1671
1672 tree.flush_fs_events(cx).await;
1673
1674 cx.read(|cx| {
1675 let tree = tree.read(cx);
1676 let (work_dir, _) = tree.repositories().next().unwrap();
1677 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1678 assert_eq!(
1679 tree.status_for_file(Path::new("projects/project1/a")),
1680 Some(GitFileStatus::Modified)
1681 );
1682 assert_eq!(
1683 tree.status_for_file(Path::new("projects/project1/b")),
1684 Some(GitFileStatus::Added)
1685 );
1686 });
1687
1688 std::fs::rename(
1689 root_path.join("projects/project1"),
1690 root_path.join("projects/project2"),
1691 )
1692 .ok();
1693 tree.flush_fs_events(cx).await;
1694
1695 cx.read(|cx| {
1696 let tree = tree.read(cx);
1697 let (work_dir, _) = tree.repositories().next().unwrap();
1698 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1699 assert_eq!(
1700 tree.status_for_file(Path::new("projects/project2/a")),
1701 Some(GitFileStatus::Modified)
1702 );
1703 assert_eq!(
1704 tree.status_for_file(Path::new("projects/project2/b")),
1705 Some(GitFileStatus::Added)
1706 );
1707 });
1708}
1709
1710#[gpui::test]
1711async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1712 let root = temp_tree(json!({
1713 "c.txt": "",
1714 "dir1": {
1715 ".git": {},
1716 "deps": {
1717 "dep1": {
1718 ".git": {},
1719 "src": {
1720 "a.txt": ""
1721 }
1722 }
1723 },
1724 "src": {
1725 "b.txt": ""
1726 }
1727 },
1728 }));
1729
1730 let tree = Worktree::local(
1731 build_client(cx),
1732 root.path(),
1733 true,
1734 Arc::new(RealFs),
1735 Default::default(),
1736 &mut cx.to_async(),
1737 )
1738 .await
1739 .unwrap();
1740
1741 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1742 .await;
1743 tree.flush_fs_events(cx).await;
1744
1745 tree.read_with(cx, |tree, _cx| {
1746 let tree = tree.as_local().unwrap();
1747
1748 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1749
1750 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1751 assert_eq!(
1752 entry
1753 .work_directory(tree)
1754 .map(|directory| directory.as_ref().to_owned()),
1755 Some(Path::new("dir1").to_owned())
1756 );
1757
1758 let entry = tree
1759 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1760 .unwrap();
1761 assert_eq!(
1762 entry
1763 .work_directory(tree)
1764 .map(|directory| directory.as_ref().to_owned()),
1765 Some(Path::new("dir1/deps/dep1").to_owned())
1766 );
1767
1768 let entries = tree.files(false, 0);
1769
1770 let paths_with_repos = tree
1771 .entries_with_repositories(entries)
1772 .map(|(entry, repo)| {
1773 (
1774 entry.path.as_ref(),
1775 repo.and_then(|repo| {
1776 repo.work_directory(&tree)
1777 .map(|work_directory| work_directory.0.to_path_buf())
1778 }),
1779 )
1780 })
1781 .collect::<Vec<_>>();
1782
1783 assert_eq!(
1784 paths_with_repos,
1785 &[
1786 (Path::new("c.txt"), None),
1787 (
1788 Path::new("dir1/deps/dep1/src/a.txt"),
1789 Some(Path::new("dir1/deps/dep1").into())
1790 ),
1791 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1792 ]
1793 );
1794 });
1795
1796 let repo_update_events = Arc::new(Mutex::new(vec![]));
1797 tree.update(cx, |_, cx| {
1798 let repo_update_events = repo_update_events.clone();
1799 cx.subscribe(&tree, move |_, _, event, _| {
1800 if let Event::UpdatedGitRepositories(update) = event {
1801 repo_update_events.lock().push(update.clone());
1802 }
1803 })
1804 .detach();
1805 });
1806
1807 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1808 tree.flush_fs_events(cx).await;
1809
1810 assert_eq!(
1811 repo_update_events.lock()[0]
1812 .iter()
1813 .map(|e| e.0.clone())
1814 .collect::<Vec<Arc<Path>>>(),
1815 vec![Path::new("dir1").into()]
1816 );
1817
1818 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1819 tree.flush_fs_events(cx).await;
1820
1821 tree.read_with(cx, |tree, _cx| {
1822 let tree = tree.as_local().unwrap();
1823
1824 assert!(tree
1825 .repository_for_path("dir1/src/b.txt".as_ref())
1826 .is_none());
1827 });
1828}
1829
1830#[gpui::test]
1831async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1832 const IGNORE_RULE: &'static str = "**/target";
1833
1834 let root = temp_tree(json!({
1835 "project": {
1836 "a.txt": "a",
1837 "b.txt": "bb",
1838 "c": {
1839 "d": {
1840 "e.txt": "eee"
1841 }
1842 },
1843 "f.txt": "ffff",
1844 "target": {
1845 "build_file": "???"
1846 },
1847 ".gitignore": IGNORE_RULE
1848 },
1849
1850 }));
1851
1852 const A_TXT: &'static str = "a.txt";
1853 const B_TXT: &'static str = "b.txt";
1854 const E_TXT: &'static str = "c/d/e.txt";
1855 const F_TXT: &'static str = "f.txt";
1856 const DOTGITIGNORE: &'static str = ".gitignore";
1857 const BUILD_FILE: &'static str = "target/build_file";
1858 let project_path = Path::new("project");
1859
1860 // Set up git repository before creating the worktree.
1861 let work_dir = root.path().join("project");
1862 let mut repo = git_init(work_dir.as_path());
1863 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1864 git_add(A_TXT, &repo);
1865 git_add(E_TXT, &repo);
1866 git_add(DOTGITIGNORE, &repo);
1867 git_commit("Initial commit", &repo);
1868
1869 let tree = Worktree::local(
1870 build_client(cx),
1871 root.path(),
1872 true,
1873 Arc::new(RealFs),
1874 Default::default(),
1875 &mut cx.to_async(),
1876 )
1877 .await
1878 .unwrap();
1879
1880 tree.flush_fs_events(cx).await;
1881 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1882 .await;
1883 deterministic.run_until_parked();
1884
1885 // Check that the right git state is observed on startup
1886 tree.read_with(cx, |tree, _cx| {
1887 let snapshot = tree.snapshot();
1888 assert_eq!(snapshot.repositories().count(), 1);
1889 let (dir, _) = snapshot.repositories().next().unwrap();
1890 assert_eq!(dir.as_ref(), Path::new("project"));
1891
1892 assert_eq!(
1893 snapshot.status_for_file(project_path.join(B_TXT)),
1894 Some(GitFileStatus::Added)
1895 );
1896 assert_eq!(
1897 snapshot.status_for_file(project_path.join(F_TXT)),
1898 Some(GitFileStatus::Added)
1899 );
1900 });
1901
1902 // Modify a file in the working copy.
1903 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1904 tree.flush_fs_events(cx).await;
1905 deterministic.run_until_parked();
1906
1907 // The worktree detects that the file's git status has changed.
1908 tree.read_with(cx, |tree, _cx| {
1909 let snapshot = tree.snapshot();
1910 assert_eq!(
1911 snapshot.status_for_file(project_path.join(A_TXT)),
1912 Some(GitFileStatus::Modified)
1913 );
1914 });
1915
1916 // Create a commit in the git repository.
1917 git_add(A_TXT, &repo);
1918 git_add(B_TXT, &repo);
1919 git_commit("Committing modified and added", &repo);
1920 tree.flush_fs_events(cx).await;
1921 deterministic.run_until_parked();
1922
1923 // The worktree detects that the files' git status have changed.
1924 tree.read_with(cx, |tree, _cx| {
1925 let snapshot = tree.snapshot();
1926 assert_eq!(
1927 snapshot.status_for_file(project_path.join(F_TXT)),
1928 Some(GitFileStatus::Added)
1929 );
1930 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1931 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1932 });
1933
1934 // Modify files in the working copy and perform git operations on other files.
1935 git_reset(0, &repo);
1936 git_remove_index(Path::new(B_TXT), &repo);
1937 git_stash(&mut repo);
1938 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1939 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1940 tree.flush_fs_events(cx).await;
1941 deterministic.run_until_parked();
1942
1943 // Check that more complex repo changes are tracked
1944 tree.read_with(cx, |tree, _cx| {
1945 let snapshot = tree.snapshot();
1946
1947 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1948 assert_eq!(
1949 snapshot.status_for_file(project_path.join(B_TXT)),
1950 Some(GitFileStatus::Added)
1951 );
1952 assert_eq!(
1953 snapshot.status_for_file(project_path.join(E_TXT)),
1954 Some(GitFileStatus::Modified)
1955 );
1956 });
1957
1958 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1959 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1960 std::fs::write(
1961 work_dir.join(DOTGITIGNORE),
1962 [IGNORE_RULE, "f.txt"].join("\n"),
1963 )
1964 .unwrap();
1965
1966 git_add(Path::new(DOTGITIGNORE), &repo);
1967 git_commit("Committing modified git ignore", &repo);
1968
1969 tree.flush_fs_events(cx).await;
1970 deterministic.run_until_parked();
1971
1972 let mut renamed_dir_name = "first_directory/second_directory";
1973 const RENAMED_FILE: &'static str = "rf.txt";
1974
1975 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
1976 std::fs::write(
1977 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
1978 "new-contents",
1979 )
1980 .unwrap();
1981
1982 tree.flush_fs_events(cx).await;
1983 deterministic.run_until_parked();
1984
1985 tree.read_with(cx, |tree, _cx| {
1986 let snapshot = tree.snapshot();
1987 assert_eq!(
1988 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
1989 Some(GitFileStatus::Added)
1990 );
1991 });
1992
1993 renamed_dir_name = "new_first_directory/second_directory";
1994
1995 std::fs::rename(
1996 work_dir.join("first_directory"),
1997 work_dir.join("new_first_directory"),
1998 )
1999 .unwrap();
2000
2001 tree.flush_fs_events(cx).await;
2002 deterministic.run_until_parked();
2003
2004 tree.read_with(cx, |tree, _cx| {
2005 let snapshot = tree.snapshot();
2006
2007 assert_eq!(
2008 snapshot.status_for_file(
2009 project_path
2010 .join(Path::new(renamed_dir_name))
2011 .join(RENAMED_FILE)
2012 ),
2013 Some(GitFileStatus::Added)
2014 );
2015 });
2016}
2017
2018#[gpui::test]
2019async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
2020 let fs = FakeFs::new(cx.background());
2021 fs.insert_tree(
2022 "/root",
2023 json!({
2024 ".git": {},
2025 "a": {
2026 "b": {
2027 "c1.txt": "",
2028 "c2.txt": "",
2029 },
2030 "d": {
2031 "e1.txt": "",
2032 "e2.txt": "",
2033 "e3.txt": "",
2034 }
2035 },
2036 "f": {
2037 "no-status.txt": ""
2038 },
2039 "g": {
2040 "h1.txt": "",
2041 "h2.txt": ""
2042 },
2043
2044 }),
2045 )
2046 .await;
2047
2048 fs.set_status_for_repo_via_git_operation(
2049 &Path::new("/root/.git"),
2050 &[
2051 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
2052 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
2053 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
2054 ],
2055 );
2056
2057 let tree = Worktree::local(
2058 build_client(cx),
2059 Path::new("/root"),
2060 true,
2061 fs.clone(),
2062 Default::default(),
2063 &mut cx.to_async(),
2064 )
2065 .await
2066 .unwrap();
2067
2068 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2069 .await;
2070
2071 cx.foreground().run_until_parked();
2072 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
2073
2074 check_propagated_statuses(
2075 &snapshot,
2076 &[
2077 (Path::new(""), Some(GitFileStatus::Conflict)),
2078 (Path::new("a"), Some(GitFileStatus::Modified)),
2079 (Path::new("a/b"), Some(GitFileStatus::Added)),
2080 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2081 (Path::new("a/b/c2.txt"), None),
2082 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2083 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2084 (Path::new("f"), None),
2085 (Path::new("f/no-status.txt"), None),
2086 (Path::new("g"), Some(GitFileStatus::Conflict)),
2087 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
2088 ],
2089 );
2090
2091 check_propagated_statuses(
2092 &snapshot,
2093 &[
2094 (Path::new("a/b"), Some(GitFileStatus::Added)),
2095 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2096 (Path::new("a/b/c2.txt"), None),
2097 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2098 (Path::new("a/d/e1.txt"), None),
2099 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2100 (Path::new("f"), None),
2101 (Path::new("f/no-status.txt"), None),
2102 (Path::new("g"), Some(GitFileStatus::Conflict)),
2103 ],
2104 );
2105
2106 check_propagated_statuses(
2107 &snapshot,
2108 &[
2109 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2110 (Path::new("a/b/c2.txt"), None),
2111 (Path::new("a/d/e1.txt"), None),
2112 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2113 (Path::new("f/no-status.txt"), None),
2114 ],
2115 );
2116
2117 #[track_caller]
2118 fn check_propagated_statuses(
2119 snapshot: &Snapshot,
2120 expected_statuses: &[(&Path, Option<GitFileStatus>)],
2121 ) {
2122 let mut entries = expected_statuses
2123 .iter()
2124 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
2125 .collect::<Vec<_>>();
2126 snapshot.propagate_git_statuses(&mut entries);
2127 assert_eq!(
2128 entries
2129 .iter()
2130 .map(|e| (e.path.as_ref(), e.git_status))
2131 .collect::<Vec<_>>(),
2132 expected_statuses
2133 );
2134 }
2135}
2136
2137fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
2138 let http_client = FakeHttpClient::with_404_response();
2139 cx.read(|cx| Client::new(http_client, cx))
2140}
2141
2142#[track_caller]
2143fn git_init(path: &Path) -> git2::Repository {
2144 git2::Repository::init(path).expect("Failed to initialize git repository")
2145}
2146
2147#[track_caller]
2148fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
2149 let path = path.as_ref();
2150 let mut index = repo.index().expect("Failed to get index");
2151 index.add_path(path).expect("Failed to add a.txt");
2152 index.write().expect("Failed to write index");
2153}
2154
2155#[track_caller]
2156fn git_remove_index(path: &Path, repo: &git2::Repository) {
2157 let mut index = repo.index().expect("Failed to get index");
2158 index.remove_path(path).expect("Failed to add a.txt");
2159 index.write().expect("Failed to write index");
2160}
2161
2162#[track_caller]
2163fn git_commit(msg: &'static str, repo: &git2::Repository) {
2164 use git2::Signature;
2165
2166 let signature = Signature::now("test", "test@zed.dev").unwrap();
2167 let oid = repo.index().unwrap().write_tree().unwrap();
2168 let tree = repo.find_tree(oid).unwrap();
2169 if let Some(head) = repo.head().ok() {
2170 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
2171
2172 let parent_commit = parent_obj.as_commit().unwrap();
2173
2174 repo.commit(
2175 Some("HEAD"),
2176 &signature,
2177 &signature,
2178 msg,
2179 &tree,
2180 &[parent_commit],
2181 )
2182 .expect("Failed to commit with parent");
2183 } else {
2184 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
2185 .expect("Failed to commit");
2186 }
2187}
2188
2189#[track_caller]
2190fn git_stash(repo: &mut git2::Repository) {
2191 use git2::Signature;
2192
2193 let signature = Signature::now("test", "test@zed.dev").unwrap();
2194 repo.stash_save(&signature, "N/A", None)
2195 .expect("Failed to stash");
2196}
2197
2198#[track_caller]
2199fn git_reset(offset: usize, repo: &git2::Repository) {
2200 let head = repo.head().expect("Couldn't get repo head");
2201 let object = head.peel(git2::ObjectType::Commit).unwrap();
2202 let commit = object.as_commit().unwrap();
2203 let new_head = commit
2204 .parents()
2205 .inspect(|parnet| {
2206 parnet.message();
2207 })
2208 .skip(offset)
2209 .next()
2210 .expect("Not enough history");
2211 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2212 .expect("Could not reset");
2213}
2214
2215#[allow(dead_code)]
2216#[track_caller]
2217fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2218 repo.statuses(None)
2219 .unwrap()
2220 .iter()
2221 .map(|status| (status.path().unwrap().to_string(), status.status()))
2222 .collect()
2223}