1use crate::{
2 project_settings::ProjectSettings,
3 worktree::{Event, Snapshot, WorktreeModelHandle},
4 Entry, EntryKind, PathChange, Project, Worktree,
5};
6use anyhow::Result;
7use client::Client;
8use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
9use git::GITIGNORE;
10use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
11use parking_lot::Mutex;
12use postage::stream::Stream;
13use pretty_assertions::assert_eq;
14use rand::prelude::*;
15use serde_json::json;
16use settings::SettingsStore;
17use std::{
18 env,
19 fmt::Write,
20 mem,
21 path::{Path, PathBuf},
22 sync::Arc,
23};
24use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
25
26#[gpui::test]
27async fn test_traversal(cx: &mut TestAppContext) {
28 let fs = FakeFs::new(cx.background());
29 fs.insert_tree(
30 "/root",
31 json!({
32 ".gitignore": "a/b\n",
33 "a": {
34 "b": "",
35 "c": "",
36 }
37 }),
38 )
39 .await;
40
41 let tree = Worktree::local(
42 build_client(cx),
43 Path::new("/root"),
44 true,
45 fs,
46 Default::default(),
47 &mut cx.to_async(),
48 )
49 .await
50 .unwrap();
51 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
52 .await;
53
54 tree.read_with(cx, |tree, _| {
55 assert_eq!(
56 tree.entries(false)
57 .map(|entry| entry.path.as_ref())
58 .collect::<Vec<_>>(),
59 vec![
60 Path::new(""),
61 Path::new(".gitignore"),
62 Path::new("a"),
63 Path::new("a/c"),
64 ]
65 );
66 assert_eq!(
67 tree.entries(true)
68 .map(|entry| entry.path.as_ref())
69 .collect::<Vec<_>>(),
70 vec![
71 Path::new(""),
72 Path::new(".gitignore"),
73 Path::new("a"),
74 Path::new("a/b"),
75 Path::new("a/c"),
76 ]
77 );
78 })
79}
80
81#[gpui::test]
82async fn test_descendent_entries(cx: &mut TestAppContext) {
83 let fs = FakeFs::new(cx.background());
84 fs.insert_tree(
85 "/root",
86 json!({
87 "a": "",
88 "b": {
89 "c": {
90 "d": ""
91 },
92 "e": {}
93 },
94 "f": "",
95 "g": {
96 "h": {}
97 },
98 "i": {
99 "j": {
100 "k": ""
101 },
102 "l": {
103
104 }
105 },
106 ".gitignore": "i/j\n",
107 }),
108 )
109 .await;
110
111 let tree = Worktree::local(
112 build_client(cx),
113 Path::new("/root"),
114 true,
115 fs,
116 Default::default(),
117 &mut cx.to_async(),
118 )
119 .await
120 .unwrap();
121 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
122 .await;
123
124 tree.read_with(cx, |tree, _| {
125 assert_eq!(
126 tree.descendent_entries(false, false, Path::new("b"))
127 .map(|entry| entry.path.as_ref())
128 .collect::<Vec<_>>(),
129 vec![Path::new("b/c/d"),]
130 );
131 assert_eq!(
132 tree.descendent_entries(true, false, Path::new("b"))
133 .map(|entry| entry.path.as_ref())
134 .collect::<Vec<_>>(),
135 vec![
136 Path::new("b"),
137 Path::new("b/c"),
138 Path::new("b/c/d"),
139 Path::new("b/e"),
140 ]
141 );
142
143 assert_eq!(
144 tree.descendent_entries(false, false, Path::new("g"))
145 .map(|entry| entry.path.as_ref())
146 .collect::<Vec<_>>(),
147 Vec::<PathBuf>::new()
148 );
149 assert_eq!(
150 tree.descendent_entries(true, false, Path::new("g"))
151 .map(|entry| entry.path.as_ref())
152 .collect::<Vec<_>>(),
153 vec![Path::new("g"), Path::new("g/h"),]
154 );
155 });
156
157 // Expand gitignored directory.
158 tree.read_with(cx, |tree, _| {
159 tree.as_local()
160 .unwrap()
161 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
162 })
163 .recv()
164 .await;
165
166 tree.read_with(cx, |tree, _| {
167 assert_eq!(
168 tree.descendent_entries(false, false, Path::new("i"))
169 .map(|entry| entry.path.as_ref())
170 .collect::<Vec<_>>(),
171 Vec::<PathBuf>::new()
172 );
173 assert_eq!(
174 tree.descendent_entries(false, true, Path::new("i"))
175 .map(|entry| entry.path.as_ref())
176 .collect::<Vec<_>>(),
177 vec![Path::new("i/j/k")]
178 );
179 assert_eq!(
180 tree.descendent_entries(true, false, Path::new("i"))
181 .map(|entry| entry.path.as_ref())
182 .collect::<Vec<_>>(),
183 vec![Path::new("i"), Path::new("i/l"),]
184 );
185 })
186}
187
188#[gpui::test(iterations = 10)]
189async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
190 let fs = FakeFs::new(cx.background());
191 fs.insert_tree(
192 "/root",
193 json!({
194 "lib": {
195 "a": {
196 "a.txt": ""
197 },
198 "b": {
199 "b.txt": ""
200 }
201 }
202 }),
203 )
204 .await;
205 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
206 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
207
208 let tree = Worktree::local(
209 build_client(cx),
210 Path::new("/root"),
211 true,
212 fs.clone(),
213 Default::default(),
214 &mut cx.to_async(),
215 )
216 .await
217 .unwrap();
218
219 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
220 .await;
221
222 tree.read_with(cx, |tree, _| {
223 assert_eq!(
224 tree.entries(false)
225 .map(|entry| entry.path.as_ref())
226 .collect::<Vec<_>>(),
227 vec![
228 Path::new(""),
229 Path::new("lib"),
230 Path::new("lib/a"),
231 Path::new("lib/a/a.txt"),
232 Path::new("lib/a/lib"),
233 Path::new("lib/b"),
234 Path::new("lib/b/b.txt"),
235 Path::new("lib/b/lib"),
236 ]
237 );
238 });
239
240 fs.rename(
241 Path::new("/root/lib/a/lib"),
242 Path::new("/root/lib/a/lib-2"),
243 Default::default(),
244 )
245 .await
246 .unwrap();
247 executor.run_until_parked();
248 tree.read_with(cx, |tree, _| {
249 assert_eq!(
250 tree.entries(false)
251 .map(|entry| entry.path.as_ref())
252 .collect::<Vec<_>>(),
253 vec![
254 Path::new(""),
255 Path::new("lib"),
256 Path::new("lib/a"),
257 Path::new("lib/a/a.txt"),
258 Path::new("lib/a/lib-2"),
259 Path::new("lib/b"),
260 Path::new("lib/b/b.txt"),
261 Path::new("lib/b/lib"),
262 ]
263 );
264 });
265}
266
267#[gpui::test]
268async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
269 let fs = FakeFs::new(cx.background());
270 fs.insert_tree(
271 "/root",
272 json!({
273 "dir1": {
274 "deps": {
275 // symlinks here
276 },
277 "src": {
278 "a.rs": "",
279 "b.rs": "",
280 },
281 },
282 "dir2": {
283 "src": {
284 "c.rs": "",
285 "d.rs": "",
286 }
287 },
288 "dir3": {
289 "deps": {},
290 "src": {
291 "e.rs": "",
292 "f.rs": "",
293 },
294 }
295 }),
296 )
297 .await;
298
299 // These symlinks point to directories outside of the worktree's root, dir1.
300 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
301 .await;
302 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
303 .await;
304
305 let tree = Worktree::local(
306 build_client(cx),
307 Path::new("/root/dir1"),
308 true,
309 fs.clone(),
310 Default::default(),
311 &mut cx.to_async(),
312 )
313 .await
314 .unwrap();
315
316 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
317 .await;
318
319 let tree_updates = Arc::new(Mutex::new(Vec::new()));
320 tree.update(cx, |_, cx| {
321 let tree_updates = tree_updates.clone();
322 cx.subscribe(&tree, move |_, _, event, _| {
323 if let Event::UpdatedEntries(update) = event {
324 tree_updates.lock().extend(
325 update
326 .iter()
327 .map(|(path, _, change)| (path.clone(), *change)),
328 );
329 }
330 })
331 .detach();
332 });
333
334 // The symlinked directories are not scanned by default.
335 tree.read_with(cx, |tree, _| {
336 assert_eq!(
337 tree.entries(true)
338 .map(|entry| (entry.path.as_ref(), entry.is_external))
339 .collect::<Vec<_>>(),
340 vec![
341 (Path::new(""), false),
342 (Path::new("deps"), false),
343 (Path::new("deps/dep-dir2"), true),
344 (Path::new("deps/dep-dir3"), true),
345 (Path::new("src"), false),
346 (Path::new("src/a.rs"), false),
347 (Path::new("src/b.rs"), false),
348 ]
349 );
350
351 assert_eq!(
352 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
353 EntryKind::UnloadedDir
354 );
355 });
356
357 // Expand one of the symlinked directories.
358 tree.read_with(cx, |tree, _| {
359 tree.as_local()
360 .unwrap()
361 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
362 })
363 .recv()
364 .await;
365
366 // The expanded directory's contents are loaded. Subdirectories are
367 // not scanned yet.
368 tree.read_with(cx, |tree, _| {
369 assert_eq!(
370 tree.entries(true)
371 .map(|entry| (entry.path.as_ref(), entry.is_external))
372 .collect::<Vec<_>>(),
373 vec![
374 (Path::new(""), false),
375 (Path::new("deps"), false),
376 (Path::new("deps/dep-dir2"), true),
377 (Path::new("deps/dep-dir3"), true),
378 (Path::new("deps/dep-dir3/deps"), true),
379 (Path::new("deps/dep-dir3/src"), true),
380 (Path::new("src"), false),
381 (Path::new("src/a.rs"), false),
382 (Path::new("src/b.rs"), false),
383 ]
384 );
385 });
386 assert_eq!(
387 mem::take(&mut *tree_updates.lock()),
388 &[
389 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
390 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
391 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
392 ]
393 );
394
395 // Expand a subdirectory of one of the symlinked directories.
396 tree.read_with(cx, |tree, _| {
397 tree.as_local()
398 .unwrap()
399 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
400 })
401 .recv()
402 .await;
403
404 // The expanded subdirectory's contents are loaded.
405 tree.read_with(cx, |tree, _| {
406 assert_eq!(
407 tree.entries(true)
408 .map(|entry| (entry.path.as_ref(), entry.is_external))
409 .collect::<Vec<_>>(),
410 vec![
411 (Path::new(""), false),
412 (Path::new("deps"), false),
413 (Path::new("deps/dep-dir2"), true),
414 (Path::new("deps/dep-dir3"), true),
415 (Path::new("deps/dep-dir3/deps"), true),
416 (Path::new("deps/dep-dir3/src"), true),
417 (Path::new("deps/dep-dir3/src/e.rs"), true),
418 (Path::new("deps/dep-dir3/src/f.rs"), true),
419 (Path::new("src"), false),
420 (Path::new("src/a.rs"), false),
421 (Path::new("src/b.rs"), false),
422 ]
423 );
424 });
425
426 assert_eq!(
427 mem::take(&mut *tree_updates.lock()),
428 &[
429 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
430 (
431 Path::new("deps/dep-dir3/src/e.rs").into(),
432 PathChange::Loaded
433 ),
434 (
435 Path::new("deps/dep-dir3/src/f.rs").into(),
436 PathChange::Loaded
437 )
438 ]
439 );
440}
441
442#[gpui::test]
443async fn test_open_gitignored_files(cx: &mut TestAppContext) {
444 let fs = FakeFs::new(cx.background());
445 fs.insert_tree(
446 "/root",
447 json!({
448 ".gitignore": "node_modules\n",
449 "one": {
450 "node_modules": {
451 "a": {
452 "a1.js": "a1",
453 "a2.js": "a2",
454 },
455 "b": {
456 "b1.js": "b1",
457 "b2.js": "b2",
458 },
459 "c": {
460 "c1.js": "c1",
461 "c2.js": "c2",
462 }
463 },
464 },
465 "two": {
466 "x.js": "",
467 "y.js": "",
468 },
469 }),
470 )
471 .await;
472
473 let tree = Worktree::local(
474 build_client(cx),
475 Path::new("/root"),
476 true,
477 fs.clone(),
478 Default::default(),
479 &mut cx.to_async(),
480 )
481 .await
482 .unwrap();
483
484 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
485 .await;
486
487 tree.read_with(cx, |tree, _| {
488 assert_eq!(
489 tree.entries(true)
490 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
491 .collect::<Vec<_>>(),
492 vec![
493 (Path::new(""), false),
494 (Path::new(".gitignore"), false),
495 (Path::new("one"), false),
496 (Path::new("one/node_modules"), true),
497 (Path::new("two"), false),
498 (Path::new("two/x.js"), false),
499 (Path::new("two/y.js"), false),
500 ]
501 );
502 });
503
504 // Open a file that is nested inside of a gitignored directory that
505 // has not yet been expanded.
506 let prev_read_dir_count = fs.read_dir_call_count();
507 let buffer = tree
508 .update(cx, |tree, cx| {
509 tree.as_local_mut()
510 .unwrap()
511 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
512 })
513 .await
514 .unwrap();
515
516 tree.read_with(cx, |tree, cx| {
517 assert_eq!(
518 tree.entries(true)
519 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
520 .collect::<Vec<_>>(),
521 vec![
522 (Path::new(""), false),
523 (Path::new(".gitignore"), false),
524 (Path::new("one"), false),
525 (Path::new("one/node_modules"), true),
526 (Path::new("one/node_modules/a"), true),
527 (Path::new("one/node_modules/b"), true),
528 (Path::new("one/node_modules/b/b1.js"), true),
529 (Path::new("one/node_modules/b/b2.js"), true),
530 (Path::new("one/node_modules/c"), true),
531 (Path::new("two"), false),
532 (Path::new("two/x.js"), false),
533 (Path::new("two/y.js"), false),
534 ]
535 );
536
537 assert_eq!(
538 buffer.read(cx).file().unwrap().path().as_ref(),
539 Path::new("one/node_modules/b/b1.js")
540 );
541
542 // Only the newly-expanded directories are scanned.
543 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
544 });
545
546 // Open another file in a different subdirectory of the same
547 // gitignored directory.
548 let prev_read_dir_count = fs.read_dir_call_count();
549 let buffer = tree
550 .update(cx, |tree, cx| {
551 tree.as_local_mut()
552 .unwrap()
553 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
554 })
555 .await
556 .unwrap();
557
558 tree.read_with(cx, |tree, cx| {
559 assert_eq!(
560 tree.entries(true)
561 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
562 .collect::<Vec<_>>(),
563 vec![
564 (Path::new(""), false),
565 (Path::new(".gitignore"), false),
566 (Path::new("one"), false),
567 (Path::new("one/node_modules"), true),
568 (Path::new("one/node_modules/a"), true),
569 (Path::new("one/node_modules/a/a1.js"), true),
570 (Path::new("one/node_modules/a/a2.js"), true),
571 (Path::new("one/node_modules/b"), true),
572 (Path::new("one/node_modules/b/b1.js"), true),
573 (Path::new("one/node_modules/b/b2.js"), true),
574 (Path::new("one/node_modules/c"), true),
575 (Path::new("two"), false),
576 (Path::new("two/x.js"), false),
577 (Path::new("two/y.js"), false),
578 ]
579 );
580
581 assert_eq!(
582 buffer.read(cx).file().unwrap().path().as_ref(),
583 Path::new("one/node_modules/a/a2.js")
584 );
585
586 // Only the newly-expanded directory is scanned.
587 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
588 });
589
590 // No work happens when files and directories change within an unloaded directory.
591 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
592 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
593 .await
594 .unwrap();
595 cx.foreground().run_until_parked();
596 assert_eq!(
597 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
598 0
599 );
600}
601
602#[gpui::test]
603async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
604 let fs = FakeFs::new(cx.background());
605 fs.insert_tree(
606 "/root",
607 json!({
608 ".gitignore": "node_modules\n",
609 "a": {
610 "a.js": "",
611 },
612 "b": {
613 "b.js": "",
614 },
615 "node_modules": {
616 "c": {
617 "c.js": "",
618 },
619 "d": {
620 "d.js": "",
621 "e": {
622 "e1.js": "",
623 "e2.js": "",
624 },
625 "f": {
626 "f1.js": "",
627 "f2.js": "",
628 }
629 },
630 },
631 }),
632 )
633 .await;
634
635 let tree = Worktree::local(
636 build_client(cx),
637 Path::new("/root"),
638 true,
639 fs.clone(),
640 Default::default(),
641 &mut cx.to_async(),
642 )
643 .await
644 .unwrap();
645
646 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
647 .await;
648
649 // Open a file within the gitignored directory, forcing some of its
650 // subdirectories to be read, but not all.
651 let read_dir_count_1 = fs.read_dir_call_count();
652 tree.read_with(cx, |tree, _| {
653 tree.as_local()
654 .unwrap()
655 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
656 })
657 .recv()
658 .await;
659
660 // Those subdirectories are now loaded.
661 tree.read_with(cx, |tree, _| {
662 assert_eq!(
663 tree.entries(true)
664 .map(|e| (e.path.as_ref(), e.is_ignored))
665 .collect::<Vec<_>>(),
666 &[
667 (Path::new(""), false),
668 (Path::new(".gitignore"), false),
669 (Path::new("a"), false),
670 (Path::new("a/a.js"), false),
671 (Path::new("b"), false),
672 (Path::new("b/b.js"), false),
673 (Path::new("node_modules"), true),
674 (Path::new("node_modules/c"), true),
675 (Path::new("node_modules/d"), true),
676 (Path::new("node_modules/d/d.js"), true),
677 (Path::new("node_modules/d/e"), true),
678 (Path::new("node_modules/d/f"), true),
679 ]
680 );
681 });
682 let read_dir_count_2 = fs.read_dir_call_count();
683 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
684
685 // Update the gitignore so that node_modules is no longer ignored,
686 // but a subdirectory is ignored
687 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
688 .await
689 .unwrap();
690 cx.foreground().run_until_parked();
691
692 // All of the directories that are no longer ignored are now loaded.
693 tree.read_with(cx, |tree, _| {
694 assert_eq!(
695 tree.entries(true)
696 .map(|e| (e.path.as_ref(), e.is_ignored))
697 .collect::<Vec<_>>(),
698 &[
699 (Path::new(""), false),
700 (Path::new(".gitignore"), false),
701 (Path::new("a"), false),
702 (Path::new("a/a.js"), false),
703 (Path::new("b"), false),
704 (Path::new("b/b.js"), false),
705 // This directory is no longer ignored
706 (Path::new("node_modules"), false),
707 (Path::new("node_modules/c"), false),
708 (Path::new("node_modules/c/c.js"), false),
709 (Path::new("node_modules/d"), false),
710 (Path::new("node_modules/d/d.js"), false),
711 // This subdirectory is now ignored
712 (Path::new("node_modules/d/e"), true),
713 (Path::new("node_modules/d/f"), false),
714 (Path::new("node_modules/d/f/f1.js"), false),
715 (Path::new("node_modules/d/f/f2.js"), false),
716 ]
717 );
718 });
719
720 // Each of the newly-loaded directories is scanned only once.
721 let read_dir_count_3 = fs.read_dir_call_count();
722 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
723}
724
725#[gpui::test(iterations = 10)]
726async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
727 let fs = FakeFs::new(cx.background());
728 fs.insert_tree(
729 "/root",
730 json!({
731 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
732 "tree": {
733 ".git": {},
734 ".gitignore": "ignored-dir\n",
735 "tracked-dir": {
736 "tracked-file1": "",
737 "ancestor-ignored-file1": "",
738 },
739 "ignored-dir": {
740 "ignored-file1": ""
741 }
742 }
743 }),
744 )
745 .await;
746
747 let tree = Worktree::local(
748 build_client(cx),
749 "/root/tree".as_ref(),
750 true,
751 fs.clone(),
752 Default::default(),
753 &mut cx.to_async(),
754 )
755 .await
756 .unwrap();
757 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
758 .await;
759
760 tree.read_with(cx, |tree, _| {
761 tree.as_local()
762 .unwrap()
763 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
764 })
765 .recv()
766 .await;
767
768 cx.read(|cx| {
769 let tree = tree.read(cx);
770 assert!(
771 !tree
772 .entry_for_path("tracked-dir/tracked-file1")
773 .unwrap()
774 .is_ignored
775 );
776 assert!(
777 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
778 .unwrap()
779 .is_ignored
780 );
781 assert!(
782 tree.entry_for_path("ignored-dir/ignored-file1")
783 .unwrap()
784 .is_ignored
785 );
786 });
787
788 fs.create_file(
789 "/root/tree/tracked-dir/tracked-file2".as_ref(),
790 Default::default(),
791 )
792 .await
793 .unwrap();
794 fs.create_file(
795 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
796 Default::default(),
797 )
798 .await
799 .unwrap();
800 fs.create_file(
801 "/root/tree/ignored-dir/ignored-file2".as_ref(),
802 Default::default(),
803 )
804 .await
805 .unwrap();
806
807 cx.foreground().run_until_parked();
808 cx.read(|cx| {
809 let tree = tree.read(cx);
810 assert!(
811 !tree
812 .entry_for_path("tracked-dir/tracked-file2")
813 .unwrap()
814 .is_ignored
815 );
816 assert!(
817 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
818 .unwrap()
819 .is_ignored
820 );
821 assert!(
822 tree.entry_for_path("ignored-dir/ignored-file2")
823 .unwrap()
824 .is_ignored
825 );
826 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
827 });
828}
829
830#[gpui::test]
831async fn test_write_file(cx: &mut TestAppContext) {
832 let dir = temp_tree(json!({
833 ".git": {},
834 ".gitignore": "ignored-dir\n",
835 "tracked-dir": {},
836 "ignored-dir": {}
837 }));
838
839 let tree = Worktree::local(
840 build_client(cx),
841 dir.path(),
842 true,
843 Arc::new(RealFs),
844 Default::default(),
845 &mut cx.to_async(),
846 )
847 .await
848 .unwrap();
849 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
850 .await;
851 tree.flush_fs_events(cx).await;
852
853 tree.update(cx, |tree, cx| {
854 tree.as_local().unwrap().write_file(
855 Path::new("tracked-dir/file.txt"),
856 "hello".into(),
857 Default::default(),
858 cx,
859 )
860 })
861 .await
862 .unwrap();
863 tree.update(cx, |tree, cx| {
864 tree.as_local().unwrap().write_file(
865 Path::new("ignored-dir/file.txt"),
866 "world".into(),
867 Default::default(),
868 cx,
869 )
870 })
871 .await
872 .unwrap();
873
874 tree.read_with(cx, |tree, _| {
875 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
876 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
877 assert!(!tracked.is_ignored);
878 assert!(ignored.is_ignored);
879 });
880}
881
882#[gpui::test]
883async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) {
884 let dir = temp_tree(json!({
885 ".git": {},
886 ".gitignore": "**/target\n/node_modules\n",
887 "target": {},
888 "node_modules": {
889 ".DS_Store": "",
890 "prettier": {
891 "package.json": "{}",
892 },
893 },
894 "src": {
895 ".DS_Store": "",
896 "foo": {
897 "foo.rs": "mod another;\n",
898 "another.rs": "// another",
899 },
900 "bar": {
901 "bar.rs": "// bar",
902 },
903 "lib.rs": "mod foo;\nmod bar;\n",
904 },
905 ".DS_Store": "",
906 }));
907 cx.update(|cx| {
908 cx.set_global(SettingsStore::test(cx));
909 Project::init_settings(cx);
910 cx.update_global::<SettingsStore, _, _>(|store, cx| {
911 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
912 project_settings.scan_exclude_files =
913 vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()];
914 project_settings.scan_include_files = vec!["**/node_modules".to_string()];
915 });
916 });
917 });
918
919 let tree = Worktree::local(
920 build_client(cx),
921 dir.path(),
922 true,
923 Arc::new(RealFs),
924 Default::default(),
925 &mut cx.to_async(),
926 )
927 .await
928 .unwrap();
929 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
930 .await;
931 tree.flush_fs_events(cx).await;
932
933 // tree.update(cx, |tree, cx| {
934 // tree.as_local().unwrap().write_file(
935 // Path::new("tracked-dir/file.txt"),
936 // "hello".into(),
937 // Default::default(),
938 // cx,
939 // )
940 // })
941 // .await
942 // .unwrap();
943 // tree.update(cx, |tree, cx| {
944 // tree.as_local().unwrap().write_file(
945 // Path::new("ignored-dir/file.txt"),
946 // "world".into(),
947 // Default::default(),
948 // cx,
949 // )
950 // })
951 // .await
952 // .unwrap();
953
954 // tree.read_with(cx, |tree, _| {
955 // let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
956 // let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
957 // assert!(!tracked.is_ignored);
958 // assert!(ignored.is_ignored);
959 // });
960 dbg!("!!!!!!!!!!!!");
961}
962
963#[gpui::test(iterations = 30)]
964async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
965 let fs = FakeFs::new(cx.background());
966 fs.insert_tree(
967 "/root",
968 json!({
969 "b": {},
970 "c": {},
971 "d": {},
972 }),
973 )
974 .await;
975
976 let tree = Worktree::local(
977 build_client(cx),
978 "/root".as_ref(),
979 true,
980 fs,
981 Default::default(),
982 &mut cx.to_async(),
983 )
984 .await
985 .unwrap();
986
987 let snapshot1 = tree.update(cx, |tree, cx| {
988 let tree = tree.as_local_mut().unwrap();
989 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
990 let _ = tree.observe_updates(0, cx, {
991 let snapshot = snapshot.clone();
992 move |update| {
993 snapshot.lock().apply_remote_update(update).unwrap();
994 async { true }
995 }
996 });
997 snapshot
998 });
999
1000 let entry = tree
1001 .update(cx, |tree, cx| {
1002 tree.as_local_mut()
1003 .unwrap()
1004 .create_entry("a/e".as_ref(), true, cx)
1005 })
1006 .await
1007 .unwrap();
1008 assert!(entry.is_dir());
1009
1010 cx.foreground().run_until_parked();
1011 tree.read_with(cx, |tree, _| {
1012 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
1013 });
1014
1015 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1016 assert_eq!(
1017 snapshot1.lock().entries(true).collect::<Vec<_>>(),
1018 snapshot2.entries(true).collect::<Vec<_>>()
1019 );
1020}
1021
1022#[gpui::test]
1023async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1024 let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1025
1026 let fs_fake = FakeFs::new(cx.background());
1027 fs_fake
1028 .insert_tree(
1029 "/root",
1030 json!({
1031 "a": {},
1032 }),
1033 )
1034 .await;
1035
1036 let tree_fake = Worktree::local(
1037 client_fake,
1038 "/root".as_ref(),
1039 true,
1040 fs_fake,
1041 Default::default(),
1042 &mut cx.to_async(),
1043 )
1044 .await
1045 .unwrap();
1046
1047 let entry = tree_fake
1048 .update(cx, |tree, cx| {
1049 tree.as_local_mut()
1050 .unwrap()
1051 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1052 })
1053 .await
1054 .unwrap();
1055 assert!(entry.is_file());
1056
1057 cx.foreground().run_until_parked();
1058 tree_fake.read_with(cx, |tree, _| {
1059 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1060 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1061 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1062 });
1063
1064 let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1065
1066 let fs_real = Arc::new(RealFs);
1067 let temp_root = temp_tree(json!({
1068 "a": {}
1069 }));
1070
1071 let tree_real = Worktree::local(
1072 client_real,
1073 temp_root.path(),
1074 true,
1075 fs_real,
1076 Default::default(),
1077 &mut cx.to_async(),
1078 )
1079 .await
1080 .unwrap();
1081
1082 let entry = tree_real
1083 .update(cx, |tree, cx| {
1084 tree.as_local_mut()
1085 .unwrap()
1086 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1087 })
1088 .await
1089 .unwrap();
1090 assert!(entry.is_file());
1091
1092 cx.foreground().run_until_parked();
1093 tree_real.read_with(cx, |tree, _| {
1094 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1095 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1096 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1097 });
1098
1099 // Test smallest change
1100 let entry = tree_real
1101 .update(cx, |tree, cx| {
1102 tree.as_local_mut()
1103 .unwrap()
1104 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
1105 })
1106 .await
1107 .unwrap();
1108 assert!(entry.is_file());
1109
1110 cx.foreground().run_until_parked();
1111 tree_real.read_with(cx, |tree, _| {
1112 assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
1113 });
1114
1115 // Test largest change
1116 let entry = tree_real
1117 .update(cx, |tree, cx| {
1118 tree.as_local_mut()
1119 .unwrap()
1120 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
1121 })
1122 .await
1123 .unwrap();
1124 assert!(entry.is_file());
1125
1126 cx.foreground().run_until_parked();
1127 tree_real.read_with(cx, |tree, _| {
1128 assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
1129 assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
1130 assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
1131 assert!(tree.entry_for_path("d/").unwrap().is_dir());
1132 });
1133}
1134
1135#[gpui::test(iterations = 100)]
1136async fn test_random_worktree_operations_during_initial_scan(
1137 cx: &mut TestAppContext,
1138 mut rng: StdRng,
1139) {
1140 let operations = env::var("OPERATIONS")
1141 .map(|o| o.parse().unwrap())
1142 .unwrap_or(5);
1143 let initial_entries = env::var("INITIAL_ENTRIES")
1144 .map(|o| o.parse().unwrap())
1145 .unwrap_or(20);
1146
1147 let root_dir = Path::new("/test");
1148 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1149 fs.as_fake().insert_tree(root_dir, json!({})).await;
1150 for _ in 0..initial_entries {
1151 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1152 }
1153 log::info!("generated initial tree");
1154
1155 let worktree = Worktree::local(
1156 build_client(cx),
1157 root_dir,
1158 true,
1159 fs.clone(),
1160 Default::default(),
1161 &mut cx.to_async(),
1162 )
1163 .await
1164 .unwrap();
1165
1166 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1167 let updates = Arc::new(Mutex::new(Vec::new()));
1168 worktree.update(cx, |tree, cx| {
1169 check_worktree_change_events(tree, cx);
1170
1171 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1172 let updates = updates.clone();
1173 move |update| {
1174 updates.lock().push(update);
1175 async { true }
1176 }
1177 });
1178 });
1179
1180 for _ in 0..operations {
1181 worktree
1182 .update(cx, |worktree, cx| {
1183 randomly_mutate_worktree(worktree, &mut rng, cx)
1184 })
1185 .await
1186 .log_err();
1187 worktree.read_with(cx, |tree, _| {
1188 tree.as_local().unwrap().snapshot().check_invariants(true)
1189 });
1190
1191 if rng.gen_bool(0.6) {
1192 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1193 }
1194 }
1195
1196 worktree
1197 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1198 .await;
1199
1200 cx.foreground().run_until_parked();
1201
1202 let final_snapshot = worktree.read_with(cx, |tree, _| {
1203 let tree = tree.as_local().unwrap();
1204 let snapshot = tree.snapshot();
1205 snapshot.check_invariants(true);
1206 snapshot
1207 });
1208
1209 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1210 let mut updated_snapshot = snapshot.clone();
1211 for update in updates.lock().iter() {
1212 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1213 updated_snapshot
1214 .apply_remote_update(update.clone())
1215 .unwrap();
1216 }
1217 }
1218
1219 assert_eq!(
1220 updated_snapshot.entries(true).collect::<Vec<_>>(),
1221 final_snapshot.entries(true).collect::<Vec<_>>(),
1222 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1223 );
1224 }
1225}
1226
1227#[gpui::test(iterations = 100)]
1228async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1229 let operations = env::var("OPERATIONS")
1230 .map(|o| o.parse().unwrap())
1231 .unwrap_or(40);
1232 let initial_entries = env::var("INITIAL_ENTRIES")
1233 .map(|o| o.parse().unwrap())
1234 .unwrap_or(20);
1235
1236 let root_dir = Path::new("/test");
1237 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1238 fs.as_fake().insert_tree(root_dir, json!({})).await;
1239 for _ in 0..initial_entries {
1240 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1241 }
1242 log::info!("generated initial tree");
1243
1244 let worktree = Worktree::local(
1245 build_client(cx),
1246 root_dir,
1247 true,
1248 fs.clone(),
1249 Default::default(),
1250 &mut cx.to_async(),
1251 )
1252 .await
1253 .unwrap();
1254
1255 let updates = Arc::new(Mutex::new(Vec::new()));
1256 worktree.update(cx, |tree, cx| {
1257 check_worktree_change_events(tree, cx);
1258
1259 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1260 let updates = updates.clone();
1261 move |update| {
1262 updates.lock().push(update);
1263 async { true }
1264 }
1265 });
1266 });
1267
1268 worktree
1269 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1270 .await;
1271
1272 fs.as_fake().pause_events();
1273 let mut snapshots = Vec::new();
1274 let mut mutations_len = operations;
1275 while mutations_len > 1 {
1276 if rng.gen_bool(0.2) {
1277 worktree
1278 .update(cx, |worktree, cx| {
1279 randomly_mutate_worktree(worktree, &mut rng, cx)
1280 })
1281 .await
1282 .log_err();
1283 } else {
1284 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1285 }
1286
1287 let buffered_event_count = fs.as_fake().buffered_event_count();
1288 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1289 let len = rng.gen_range(0..=buffered_event_count);
1290 log::info!("flushing {} events", len);
1291 fs.as_fake().flush_events(len);
1292 } else {
1293 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1294 mutations_len -= 1;
1295 }
1296
1297 cx.foreground().run_until_parked();
1298 if rng.gen_bool(0.2) {
1299 log::info!("storing snapshot {}", snapshots.len());
1300 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1301 snapshots.push(snapshot);
1302 }
1303 }
1304
1305 log::info!("quiescing");
1306 fs.as_fake().flush_events(usize::MAX);
1307 cx.foreground().run_until_parked();
1308
1309 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1310 snapshot.check_invariants(true);
1311 let expanded_paths = snapshot
1312 .expanded_entries()
1313 .map(|e| e.path.clone())
1314 .collect::<Vec<_>>();
1315
1316 {
1317 let new_worktree = Worktree::local(
1318 build_client(cx),
1319 root_dir,
1320 true,
1321 fs.clone(),
1322 Default::default(),
1323 &mut cx.to_async(),
1324 )
1325 .await
1326 .unwrap();
1327 new_worktree
1328 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1329 .await;
1330 new_worktree
1331 .update(cx, |tree, _| {
1332 tree.as_local_mut()
1333 .unwrap()
1334 .refresh_entries_for_paths(expanded_paths)
1335 })
1336 .recv()
1337 .await;
1338 let new_snapshot =
1339 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1340 assert_eq!(
1341 snapshot.entries_without_ids(true),
1342 new_snapshot.entries_without_ids(true)
1343 );
1344 }
1345
1346 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1347 for update in updates.lock().iter() {
1348 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1349 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1350 }
1351 }
1352
1353 assert_eq!(
1354 prev_snapshot
1355 .entries(true)
1356 .map(ignore_pending_dir)
1357 .collect::<Vec<_>>(),
1358 snapshot
1359 .entries(true)
1360 .map(ignore_pending_dir)
1361 .collect::<Vec<_>>(),
1362 "wrong updates after snapshot {i}: {updates:#?}",
1363 );
1364 }
1365
1366 fn ignore_pending_dir(entry: &Entry) -> Entry {
1367 let mut entry = entry.clone();
1368 if entry.kind.is_dir() {
1369 entry.kind = EntryKind::Dir
1370 }
1371 entry
1372 }
1373}
1374
1375// The worktree's `UpdatedEntries` event can be used to follow along with
1376// all changes to the worktree's snapshot.
1377fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1378 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1379 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1380 if let Event::UpdatedEntries(changes) = event {
1381 for (path, _, change_type) in changes.iter() {
1382 let entry = tree.entry_for_path(&path).cloned();
1383 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1384 Ok(ix) | Err(ix) => ix,
1385 };
1386 match change_type {
1387 PathChange::Added => entries.insert(ix, entry.unwrap()),
1388 PathChange::Removed => drop(entries.remove(ix)),
1389 PathChange::Updated => {
1390 let entry = entry.unwrap();
1391 let existing_entry = entries.get_mut(ix).unwrap();
1392 assert_eq!(existing_entry.path, entry.path);
1393 *existing_entry = entry;
1394 }
1395 PathChange::AddedOrUpdated | PathChange::Loaded => {
1396 let entry = entry.unwrap();
1397 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1398 *entries.get_mut(ix).unwrap() = entry;
1399 } else {
1400 entries.insert(ix, entry);
1401 }
1402 }
1403 }
1404 }
1405
1406 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1407 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1408 }
1409 })
1410 .detach();
1411}
1412
1413fn randomly_mutate_worktree(
1414 worktree: &mut Worktree,
1415 rng: &mut impl Rng,
1416 cx: &mut ModelContext<Worktree>,
1417) -> Task<Result<()>> {
1418 log::info!("mutating worktree");
1419 let worktree = worktree.as_local_mut().unwrap();
1420 let snapshot = worktree.snapshot();
1421 let entry = snapshot.entries(false).choose(rng).unwrap();
1422
1423 match rng.gen_range(0_u32..100) {
1424 0..=33 if entry.path.as_ref() != Path::new("") => {
1425 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1426 worktree.delete_entry(entry.id, cx).unwrap()
1427 }
1428 ..=66 if entry.path.as_ref() != Path::new("") => {
1429 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1430 let new_parent_path = if other_entry.is_dir() {
1431 other_entry.path.clone()
1432 } else {
1433 other_entry.path.parent().unwrap().into()
1434 };
1435 let mut new_path = new_parent_path.join(random_filename(rng));
1436 if new_path.starts_with(&entry.path) {
1437 new_path = random_filename(rng).into();
1438 }
1439
1440 log::info!(
1441 "renaming entry {:?} ({}) to {:?}",
1442 entry.path,
1443 entry.id.0,
1444 new_path
1445 );
1446 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1447 cx.foreground().spawn(async move {
1448 task.await?;
1449 Ok(())
1450 })
1451 }
1452 _ => {
1453 let task = if entry.is_dir() {
1454 let child_path = entry.path.join(random_filename(rng));
1455 let is_dir = rng.gen_bool(0.3);
1456 log::info!(
1457 "creating {} at {:?}",
1458 if is_dir { "dir" } else { "file" },
1459 child_path,
1460 );
1461 worktree.create_entry(child_path, is_dir, cx)
1462 } else {
1463 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1464 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1465 };
1466 cx.foreground().spawn(async move {
1467 task.await?;
1468 Ok(())
1469 })
1470 }
1471 }
1472}
1473
1474async fn randomly_mutate_fs(
1475 fs: &Arc<dyn Fs>,
1476 root_path: &Path,
1477 insertion_probability: f64,
1478 rng: &mut impl Rng,
1479) {
1480 log::info!("mutating fs");
1481 let mut files = Vec::new();
1482 let mut dirs = Vec::new();
1483 for path in fs.as_fake().paths(false) {
1484 if path.starts_with(root_path) {
1485 if fs.is_file(&path).await {
1486 files.push(path);
1487 } else {
1488 dirs.push(path);
1489 }
1490 }
1491 }
1492
1493 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1494 let path = dirs.choose(rng).unwrap();
1495 let new_path = path.join(random_filename(rng));
1496
1497 if rng.gen() {
1498 log::info!(
1499 "creating dir {:?}",
1500 new_path.strip_prefix(root_path).unwrap()
1501 );
1502 fs.create_dir(&new_path).await.unwrap();
1503 } else {
1504 log::info!(
1505 "creating file {:?}",
1506 new_path.strip_prefix(root_path).unwrap()
1507 );
1508 fs.create_file(&new_path, Default::default()).await.unwrap();
1509 }
1510 } else if rng.gen_bool(0.05) {
1511 let ignore_dir_path = dirs.choose(rng).unwrap();
1512 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1513
1514 let subdirs = dirs
1515 .iter()
1516 .filter(|d| d.starts_with(&ignore_dir_path))
1517 .cloned()
1518 .collect::<Vec<_>>();
1519 let subfiles = files
1520 .iter()
1521 .filter(|d| d.starts_with(&ignore_dir_path))
1522 .cloned()
1523 .collect::<Vec<_>>();
1524 let files_to_ignore = {
1525 let len = rng.gen_range(0..=subfiles.len());
1526 subfiles.choose_multiple(rng, len)
1527 };
1528 let dirs_to_ignore = {
1529 let len = rng.gen_range(0..subdirs.len());
1530 subdirs.choose_multiple(rng, len)
1531 };
1532
1533 let mut ignore_contents = String::new();
1534 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1535 writeln!(
1536 ignore_contents,
1537 "{}",
1538 path_to_ignore
1539 .strip_prefix(&ignore_dir_path)
1540 .unwrap()
1541 .to_str()
1542 .unwrap()
1543 )
1544 .unwrap();
1545 }
1546 log::info!(
1547 "creating gitignore {:?} with contents:\n{}",
1548 ignore_path.strip_prefix(&root_path).unwrap(),
1549 ignore_contents
1550 );
1551 fs.save(
1552 &ignore_path,
1553 &ignore_contents.as_str().into(),
1554 Default::default(),
1555 )
1556 .await
1557 .unwrap();
1558 } else {
1559 let old_path = {
1560 let file_path = files.choose(rng);
1561 let dir_path = dirs[1..].choose(rng);
1562 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1563 };
1564
1565 let is_rename = rng.gen();
1566 if is_rename {
1567 let new_path_parent = dirs
1568 .iter()
1569 .filter(|d| !d.starts_with(old_path))
1570 .choose(rng)
1571 .unwrap();
1572
1573 let overwrite_existing_dir =
1574 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1575 let new_path = if overwrite_existing_dir {
1576 fs.remove_dir(
1577 &new_path_parent,
1578 RemoveOptions {
1579 recursive: true,
1580 ignore_if_not_exists: true,
1581 },
1582 )
1583 .await
1584 .unwrap();
1585 new_path_parent.to_path_buf()
1586 } else {
1587 new_path_parent.join(random_filename(rng))
1588 };
1589
1590 log::info!(
1591 "renaming {:?} to {}{:?}",
1592 old_path.strip_prefix(&root_path).unwrap(),
1593 if overwrite_existing_dir {
1594 "overwrite "
1595 } else {
1596 ""
1597 },
1598 new_path.strip_prefix(&root_path).unwrap()
1599 );
1600 fs.rename(
1601 &old_path,
1602 &new_path,
1603 fs::RenameOptions {
1604 overwrite: true,
1605 ignore_if_exists: true,
1606 },
1607 )
1608 .await
1609 .unwrap();
1610 } else if fs.is_file(&old_path).await {
1611 log::info!(
1612 "deleting file {:?}",
1613 old_path.strip_prefix(&root_path).unwrap()
1614 );
1615 fs.remove_file(old_path, Default::default()).await.unwrap();
1616 } else {
1617 log::info!(
1618 "deleting dir {:?}",
1619 old_path.strip_prefix(&root_path).unwrap()
1620 );
1621 fs.remove_dir(
1622 &old_path,
1623 RemoveOptions {
1624 recursive: true,
1625 ignore_if_not_exists: true,
1626 },
1627 )
1628 .await
1629 .unwrap();
1630 }
1631 }
1632}
1633
1634fn random_filename(rng: &mut impl Rng) -> String {
1635 (0..6)
1636 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1637 .map(char::from)
1638 .collect()
1639}
1640
1641#[gpui::test]
1642async fn test_rename_work_directory(cx: &mut TestAppContext) {
1643 let root = temp_tree(json!({
1644 "projects": {
1645 "project1": {
1646 "a": "",
1647 "b": "",
1648 }
1649 },
1650
1651 }));
1652 let root_path = root.path();
1653
1654 let tree = Worktree::local(
1655 build_client(cx),
1656 root_path,
1657 true,
1658 Arc::new(RealFs),
1659 Default::default(),
1660 &mut cx.to_async(),
1661 )
1662 .await
1663 .unwrap();
1664
1665 let repo = git_init(&root_path.join("projects/project1"));
1666 git_add("a", &repo);
1667 git_commit("init", &repo);
1668 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1669
1670 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1671 .await;
1672
1673 tree.flush_fs_events(cx).await;
1674
1675 cx.read(|cx| {
1676 let tree = tree.read(cx);
1677 let (work_dir, _) = tree.repositories().next().unwrap();
1678 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1679 assert_eq!(
1680 tree.status_for_file(Path::new("projects/project1/a")),
1681 Some(GitFileStatus::Modified)
1682 );
1683 assert_eq!(
1684 tree.status_for_file(Path::new("projects/project1/b")),
1685 Some(GitFileStatus::Added)
1686 );
1687 });
1688
1689 std::fs::rename(
1690 root_path.join("projects/project1"),
1691 root_path.join("projects/project2"),
1692 )
1693 .ok();
1694 tree.flush_fs_events(cx).await;
1695
1696 cx.read(|cx| {
1697 let tree = tree.read(cx);
1698 let (work_dir, _) = tree.repositories().next().unwrap();
1699 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1700 assert_eq!(
1701 tree.status_for_file(Path::new("projects/project2/a")),
1702 Some(GitFileStatus::Modified)
1703 );
1704 assert_eq!(
1705 tree.status_for_file(Path::new("projects/project2/b")),
1706 Some(GitFileStatus::Added)
1707 );
1708 });
1709}
1710
1711#[gpui::test]
1712async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1713 let root = temp_tree(json!({
1714 "c.txt": "",
1715 "dir1": {
1716 ".git": {},
1717 "deps": {
1718 "dep1": {
1719 ".git": {},
1720 "src": {
1721 "a.txt": ""
1722 }
1723 }
1724 },
1725 "src": {
1726 "b.txt": ""
1727 }
1728 },
1729 }));
1730
1731 let tree = Worktree::local(
1732 build_client(cx),
1733 root.path(),
1734 true,
1735 Arc::new(RealFs),
1736 Default::default(),
1737 &mut cx.to_async(),
1738 )
1739 .await
1740 .unwrap();
1741
1742 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1743 .await;
1744 tree.flush_fs_events(cx).await;
1745
1746 tree.read_with(cx, |tree, _cx| {
1747 let tree = tree.as_local().unwrap();
1748
1749 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1750
1751 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1752 assert_eq!(
1753 entry
1754 .work_directory(tree)
1755 .map(|directory| directory.as_ref().to_owned()),
1756 Some(Path::new("dir1").to_owned())
1757 );
1758
1759 let entry = tree
1760 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1761 .unwrap();
1762 assert_eq!(
1763 entry
1764 .work_directory(tree)
1765 .map(|directory| directory.as_ref().to_owned()),
1766 Some(Path::new("dir1/deps/dep1").to_owned())
1767 );
1768
1769 let entries = tree.files(false, 0);
1770
1771 let paths_with_repos = tree
1772 .entries_with_repositories(entries)
1773 .map(|(entry, repo)| {
1774 (
1775 entry.path.as_ref(),
1776 repo.and_then(|repo| {
1777 repo.work_directory(&tree)
1778 .map(|work_directory| work_directory.0.to_path_buf())
1779 }),
1780 )
1781 })
1782 .collect::<Vec<_>>();
1783
1784 assert_eq!(
1785 paths_with_repos,
1786 &[
1787 (Path::new("c.txt"), None),
1788 (
1789 Path::new("dir1/deps/dep1/src/a.txt"),
1790 Some(Path::new("dir1/deps/dep1").into())
1791 ),
1792 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1793 ]
1794 );
1795 });
1796
1797 let repo_update_events = Arc::new(Mutex::new(vec![]));
1798 tree.update(cx, |_, cx| {
1799 let repo_update_events = repo_update_events.clone();
1800 cx.subscribe(&tree, move |_, _, event, _| {
1801 if let Event::UpdatedGitRepositories(update) = event {
1802 repo_update_events.lock().push(update.clone());
1803 }
1804 })
1805 .detach();
1806 });
1807
1808 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1809 tree.flush_fs_events(cx).await;
1810
1811 assert_eq!(
1812 repo_update_events.lock()[0]
1813 .iter()
1814 .map(|e| e.0.clone())
1815 .collect::<Vec<Arc<Path>>>(),
1816 vec![Path::new("dir1").into()]
1817 );
1818
1819 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1820 tree.flush_fs_events(cx).await;
1821
1822 tree.read_with(cx, |tree, _cx| {
1823 let tree = tree.as_local().unwrap();
1824
1825 assert!(tree
1826 .repository_for_path("dir1/src/b.txt".as_ref())
1827 .is_none());
1828 });
1829}
1830
1831#[gpui::test]
1832async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1833 const IGNORE_RULE: &'static str = "**/target";
1834
1835 let root = temp_tree(json!({
1836 "project": {
1837 "a.txt": "a",
1838 "b.txt": "bb",
1839 "c": {
1840 "d": {
1841 "e.txt": "eee"
1842 }
1843 },
1844 "f.txt": "ffff",
1845 "target": {
1846 "build_file": "???"
1847 },
1848 ".gitignore": IGNORE_RULE
1849 },
1850
1851 }));
1852
1853 const A_TXT: &'static str = "a.txt";
1854 const B_TXT: &'static str = "b.txt";
1855 const E_TXT: &'static str = "c/d/e.txt";
1856 const F_TXT: &'static str = "f.txt";
1857 const DOTGITIGNORE: &'static str = ".gitignore";
1858 const BUILD_FILE: &'static str = "target/build_file";
1859 let project_path = Path::new("project");
1860
1861 // Set up git repository before creating the worktree.
1862 let work_dir = root.path().join("project");
1863 let mut repo = git_init(work_dir.as_path());
1864 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1865 git_add(A_TXT, &repo);
1866 git_add(E_TXT, &repo);
1867 git_add(DOTGITIGNORE, &repo);
1868 git_commit("Initial commit", &repo);
1869
1870 let tree = Worktree::local(
1871 build_client(cx),
1872 root.path(),
1873 true,
1874 Arc::new(RealFs),
1875 Default::default(),
1876 &mut cx.to_async(),
1877 )
1878 .await
1879 .unwrap();
1880
1881 tree.flush_fs_events(cx).await;
1882 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1883 .await;
1884 deterministic.run_until_parked();
1885
1886 // Check that the right git state is observed on startup
1887 tree.read_with(cx, |tree, _cx| {
1888 let snapshot = tree.snapshot();
1889 assert_eq!(snapshot.repositories().count(), 1);
1890 let (dir, _) = snapshot.repositories().next().unwrap();
1891 assert_eq!(dir.as_ref(), Path::new("project"));
1892
1893 assert_eq!(
1894 snapshot.status_for_file(project_path.join(B_TXT)),
1895 Some(GitFileStatus::Added)
1896 );
1897 assert_eq!(
1898 snapshot.status_for_file(project_path.join(F_TXT)),
1899 Some(GitFileStatus::Added)
1900 );
1901 });
1902
1903 // Modify a file in the working copy.
1904 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1905 tree.flush_fs_events(cx).await;
1906 deterministic.run_until_parked();
1907
1908 // The worktree detects that the file's git status has changed.
1909 tree.read_with(cx, |tree, _cx| {
1910 let snapshot = tree.snapshot();
1911 assert_eq!(
1912 snapshot.status_for_file(project_path.join(A_TXT)),
1913 Some(GitFileStatus::Modified)
1914 );
1915 });
1916
1917 // Create a commit in the git repository.
1918 git_add(A_TXT, &repo);
1919 git_add(B_TXT, &repo);
1920 git_commit("Committing modified and added", &repo);
1921 tree.flush_fs_events(cx).await;
1922 deterministic.run_until_parked();
1923
1924 // The worktree detects that the files' git status have changed.
1925 tree.read_with(cx, |tree, _cx| {
1926 let snapshot = tree.snapshot();
1927 assert_eq!(
1928 snapshot.status_for_file(project_path.join(F_TXT)),
1929 Some(GitFileStatus::Added)
1930 );
1931 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1932 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1933 });
1934
1935 // Modify files in the working copy and perform git operations on other files.
1936 git_reset(0, &repo);
1937 git_remove_index(Path::new(B_TXT), &repo);
1938 git_stash(&mut repo);
1939 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1940 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1941 tree.flush_fs_events(cx).await;
1942 deterministic.run_until_parked();
1943
1944 // Check that more complex repo changes are tracked
1945 tree.read_with(cx, |tree, _cx| {
1946 let snapshot = tree.snapshot();
1947
1948 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1949 assert_eq!(
1950 snapshot.status_for_file(project_path.join(B_TXT)),
1951 Some(GitFileStatus::Added)
1952 );
1953 assert_eq!(
1954 snapshot.status_for_file(project_path.join(E_TXT)),
1955 Some(GitFileStatus::Modified)
1956 );
1957 });
1958
1959 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1960 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1961 std::fs::write(
1962 work_dir.join(DOTGITIGNORE),
1963 [IGNORE_RULE, "f.txt"].join("\n"),
1964 )
1965 .unwrap();
1966
1967 git_add(Path::new(DOTGITIGNORE), &repo);
1968 git_commit("Committing modified git ignore", &repo);
1969
1970 tree.flush_fs_events(cx).await;
1971 deterministic.run_until_parked();
1972
1973 let mut renamed_dir_name = "first_directory/second_directory";
1974 const RENAMED_FILE: &'static str = "rf.txt";
1975
1976 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
1977 std::fs::write(
1978 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
1979 "new-contents",
1980 )
1981 .unwrap();
1982
1983 tree.flush_fs_events(cx).await;
1984 deterministic.run_until_parked();
1985
1986 tree.read_with(cx, |tree, _cx| {
1987 let snapshot = tree.snapshot();
1988 assert_eq!(
1989 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
1990 Some(GitFileStatus::Added)
1991 );
1992 });
1993
1994 renamed_dir_name = "new_first_directory/second_directory";
1995
1996 std::fs::rename(
1997 work_dir.join("first_directory"),
1998 work_dir.join("new_first_directory"),
1999 )
2000 .unwrap();
2001
2002 tree.flush_fs_events(cx).await;
2003 deterministic.run_until_parked();
2004
2005 tree.read_with(cx, |tree, _cx| {
2006 let snapshot = tree.snapshot();
2007
2008 assert_eq!(
2009 snapshot.status_for_file(
2010 project_path
2011 .join(Path::new(renamed_dir_name))
2012 .join(RENAMED_FILE)
2013 ),
2014 Some(GitFileStatus::Added)
2015 );
2016 });
2017}
2018
2019#[gpui::test]
2020async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
2021 let fs = FakeFs::new(cx.background());
2022 fs.insert_tree(
2023 "/root",
2024 json!({
2025 ".git": {},
2026 "a": {
2027 "b": {
2028 "c1.txt": "",
2029 "c2.txt": "",
2030 },
2031 "d": {
2032 "e1.txt": "",
2033 "e2.txt": "",
2034 "e3.txt": "",
2035 }
2036 },
2037 "f": {
2038 "no-status.txt": ""
2039 },
2040 "g": {
2041 "h1.txt": "",
2042 "h2.txt": ""
2043 },
2044
2045 }),
2046 )
2047 .await;
2048
2049 fs.set_status_for_repo_via_git_operation(
2050 &Path::new("/root/.git"),
2051 &[
2052 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
2053 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
2054 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
2055 ],
2056 );
2057
2058 let tree = Worktree::local(
2059 build_client(cx),
2060 Path::new("/root"),
2061 true,
2062 fs.clone(),
2063 Default::default(),
2064 &mut cx.to_async(),
2065 )
2066 .await
2067 .unwrap();
2068
2069 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2070 .await;
2071
2072 cx.foreground().run_until_parked();
2073 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
2074
2075 check_propagated_statuses(
2076 &snapshot,
2077 &[
2078 (Path::new(""), Some(GitFileStatus::Conflict)),
2079 (Path::new("a"), Some(GitFileStatus::Modified)),
2080 (Path::new("a/b"), Some(GitFileStatus::Added)),
2081 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2082 (Path::new("a/b/c2.txt"), None),
2083 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2084 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2085 (Path::new("f"), None),
2086 (Path::new("f/no-status.txt"), None),
2087 (Path::new("g"), Some(GitFileStatus::Conflict)),
2088 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
2089 ],
2090 );
2091
2092 check_propagated_statuses(
2093 &snapshot,
2094 &[
2095 (Path::new("a/b"), Some(GitFileStatus::Added)),
2096 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2097 (Path::new("a/b/c2.txt"), None),
2098 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2099 (Path::new("a/d/e1.txt"), None),
2100 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2101 (Path::new("f"), None),
2102 (Path::new("f/no-status.txt"), None),
2103 (Path::new("g"), Some(GitFileStatus::Conflict)),
2104 ],
2105 );
2106
2107 check_propagated_statuses(
2108 &snapshot,
2109 &[
2110 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2111 (Path::new("a/b/c2.txt"), None),
2112 (Path::new("a/d/e1.txt"), None),
2113 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2114 (Path::new("f/no-status.txt"), None),
2115 ],
2116 );
2117
2118 #[track_caller]
2119 fn check_propagated_statuses(
2120 snapshot: &Snapshot,
2121 expected_statuses: &[(&Path, Option<GitFileStatus>)],
2122 ) {
2123 let mut entries = expected_statuses
2124 .iter()
2125 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
2126 .collect::<Vec<_>>();
2127 snapshot.propagate_git_statuses(&mut entries);
2128 assert_eq!(
2129 entries
2130 .iter()
2131 .map(|e| (e.path.as_ref(), e.git_status))
2132 .collect::<Vec<_>>(),
2133 expected_statuses
2134 );
2135 }
2136}
2137
2138fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
2139 let http_client = FakeHttpClient::with_404_response();
2140 cx.read(|cx| Client::new(http_client, cx))
2141}
2142
2143#[track_caller]
2144fn git_init(path: &Path) -> git2::Repository {
2145 git2::Repository::init(path).expect("Failed to initialize git repository")
2146}
2147
2148#[track_caller]
2149fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
2150 let path = path.as_ref();
2151 let mut index = repo.index().expect("Failed to get index");
2152 index.add_path(path).expect("Failed to add a.txt");
2153 index.write().expect("Failed to write index");
2154}
2155
2156#[track_caller]
2157fn git_remove_index(path: &Path, repo: &git2::Repository) {
2158 let mut index = repo.index().expect("Failed to get index");
2159 index.remove_path(path).expect("Failed to add a.txt");
2160 index.write().expect("Failed to write index");
2161}
2162
2163#[track_caller]
2164fn git_commit(msg: &'static str, repo: &git2::Repository) {
2165 use git2::Signature;
2166
2167 let signature = Signature::now("test", "test@zed.dev").unwrap();
2168 let oid = repo.index().unwrap().write_tree().unwrap();
2169 let tree = repo.find_tree(oid).unwrap();
2170 if let Some(head) = repo.head().ok() {
2171 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
2172
2173 let parent_commit = parent_obj.as_commit().unwrap();
2174
2175 repo.commit(
2176 Some("HEAD"),
2177 &signature,
2178 &signature,
2179 msg,
2180 &tree,
2181 &[parent_commit],
2182 )
2183 .expect("Failed to commit with parent");
2184 } else {
2185 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
2186 .expect("Failed to commit");
2187 }
2188}
2189
2190#[track_caller]
2191fn git_stash(repo: &mut git2::Repository) {
2192 use git2::Signature;
2193
2194 let signature = Signature::now("test", "test@zed.dev").unwrap();
2195 repo.stash_save(&signature, "N/A", None)
2196 .expect("Failed to stash");
2197}
2198
2199#[track_caller]
2200fn git_reset(offset: usize, repo: &git2::Repository) {
2201 let head = repo.head().expect("Couldn't get repo head");
2202 let object = head.peel(git2::ObjectType::Commit).unwrap();
2203 let commit = object.as_commit().unwrap();
2204 let new_head = commit
2205 .parents()
2206 .inspect(|parnet| {
2207 parnet.message();
2208 })
2209 .skip(offset)
2210 .next()
2211 .expect("Not enough history");
2212 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2213 .expect("Could not reset");
2214}
2215
2216#[allow(dead_code)]
2217#[track_caller]
2218fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2219 repo.statuses(None)
2220 .unwrap()
2221 .iter()
2222 .map(|status| (status.path().unwrap().to_string(), status.status()))
2223 .collect()
2224}