1use crate::{
2 project_settings::ProjectSettings,
3 worktree::{Event, Snapshot, WorktreeModelHandle},
4 Entry, EntryKind, PathChange, Project, Worktree,
5};
6use anyhow::Result;
7use client::Client;
8use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
9use git::GITIGNORE;
10use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
11use parking_lot::Mutex;
12use postage::stream::Stream;
13use pretty_assertions::assert_eq;
14use rand::prelude::*;
15use serde_json::json;
16use settings::SettingsStore;
17use std::{
18 env,
19 fmt::Write,
20 mem,
21 path::{Path, PathBuf},
22 sync::Arc,
23};
24use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
25
26#[gpui::test]
27async fn test_traversal(cx: &mut TestAppContext) {
28 let fs = FakeFs::new(cx.background());
29 fs.insert_tree(
30 "/root",
31 json!({
32 ".gitignore": "a/b\n",
33 "a": {
34 "b": "",
35 "c": "",
36 }
37 }),
38 )
39 .await;
40
41 let tree = Worktree::local(
42 build_client(cx),
43 Path::new("/root"),
44 true,
45 fs,
46 Default::default(),
47 &mut cx.to_async(),
48 )
49 .await
50 .unwrap();
51 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
52 .await;
53
54 tree.read_with(cx, |tree, _| {
55 assert_eq!(
56 tree.entries(false)
57 .map(|entry| entry.path.as_ref())
58 .collect::<Vec<_>>(),
59 vec![
60 Path::new(""),
61 Path::new(".gitignore"),
62 Path::new("a"),
63 Path::new("a/c"),
64 ]
65 );
66 assert_eq!(
67 tree.entries(true)
68 .map(|entry| entry.path.as_ref())
69 .collect::<Vec<_>>(),
70 vec![
71 Path::new(""),
72 Path::new(".gitignore"),
73 Path::new("a"),
74 Path::new("a/b"),
75 Path::new("a/c"),
76 ]
77 );
78 })
79}
80
81#[gpui::test]
82async fn test_descendent_entries(cx: &mut TestAppContext) {
83 let fs = FakeFs::new(cx.background());
84 fs.insert_tree(
85 "/root",
86 json!({
87 "a": "",
88 "b": {
89 "c": {
90 "d": ""
91 },
92 "e": {}
93 },
94 "f": "",
95 "g": {
96 "h": {}
97 },
98 "i": {
99 "j": {
100 "k": ""
101 },
102 "l": {
103
104 }
105 },
106 ".gitignore": "i/j\n",
107 }),
108 )
109 .await;
110
111 let tree = Worktree::local(
112 build_client(cx),
113 Path::new("/root"),
114 true,
115 fs,
116 Default::default(),
117 &mut cx.to_async(),
118 )
119 .await
120 .unwrap();
121 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
122 .await;
123
124 tree.read_with(cx, |tree, _| {
125 assert_eq!(
126 tree.descendent_entries(false, false, Path::new("b"))
127 .map(|entry| entry.path.as_ref())
128 .collect::<Vec<_>>(),
129 vec![Path::new("b/c/d"),]
130 );
131 assert_eq!(
132 tree.descendent_entries(true, false, Path::new("b"))
133 .map(|entry| entry.path.as_ref())
134 .collect::<Vec<_>>(),
135 vec![
136 Path::new("b"),
137 Path::new("b/c"),
138 Path::new("b/c/d"),
139 Path::new("b/e"),
140 ]
141 );
142
143 assert_eq!(
144 tree.descendent_entries(false, false, Path::new("g"))
145 .map(|entry| entry.path.as_ref())
146 .collect::<Vec<_>>(),
147 Vec::<PathBuf>::new()
148 );
149 assert_eq!(
150 tree.descendent_entries(true, false, Path::new("g"))
151 .map(|entry| entry.path.as_ref())
152 .collect::<Vec<_>>(),
153 vec![Path::new("g"), Path::new("g/h"),]
154 );
155 });
156
157 // Expand gitignored directory.
158 tree.read_with(cx, |tree, _| {
159 tree.as_local()
160 .unwrap()
161 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
162 })
163 .recv()
164 .await;
165
166 tree.read_with(cx, |tree, _| {
167 assert_eq!(
168 tree.descendent_entries(false, false, Path::new("i"))
169 .map(|entry| entry.path.as_ref())
170 .collect::<Vec<_>>(),
171 Vec::<PathBuf>::new()
172 );
173 assert_eq!(
174 tree.descendent_entries(false, true, Path::new("i"))
175 .map(|entry| entry.path.as_ref())
176 .collect::<Vec<_>>(),
177 vec![Path::new("i/j/k")]
178 );
179 assert_eq!(
180 tree.descendent_entries(true, false, Path::new("i"))
181 .map(|entry| entry.path.as_ref())
182 .collect::<Vec<_>>(),
183 vec![Path::new("i"), Path::new("i/l"),]
184 );
185 })
186}
187
188#[gpui::test(iterations = 10)]
189async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
190 let fs = FakeFs::new(cx.background());
191 fs.insert_tree(
192 "/root",
193 json!({
194 "lib": {
195 "a": {
196 "a.txt": ""
197 },
198 "b": {
199 "b.txt": ""
200 }
201 }
202 }),
203 )
204 .await;
205 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
206 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
207
208 let tree = Worktree::local(
209 build_client(cx),
210 Path::new("/root"),
211 true,
212 fs.clone(),
213 Default::default(),
214 &mut cx.to_async(),
215 )
216 .await
217 .unwrap();
218
219 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
220 .await;
221
222 tree.read_with(cx, |tree, _| {
223 assert_eq!(
224 tree.entries(false)
225 .map(|entry| entry.path.as_ref())
226 .collect::<Vec<_>>(),
227 vec![
228 Path::new(""),
229 Path::new("lib"),
230 Path::new("lib/a"),
231 Path::new("lib/a/a.txt"),
232 Path::new("lib/a/lib"),
233 Path::new("lib/b"),
234 Path::new("lib/b/b.txt"),
235 Path::new("lib/b/lib"),
236 ]
237 );
238 });
239
240 fs.rename(
241 Path::new("/root/lib/a/lib"),
242 Path::new("/root/lib/a/lib-2"),
243 Default::default(),
244 )
245 .await
246 .unwrap();
247 executor.run_until_parked();
248 tree.read_with(cx, |tree, _| {
249 assert_eq!(
250 tree.entries(false)
251 .map(|entry| entry.path.as_ref())
252 .collect::<Vec<_>>(),
253 vec![
254 Path::new(""),
255 Path::new("lib"),
256 Path::new("lib/a"),
257 Path::new("lib/a/a.txt"),
258 Path::new("lib/a/lib-2"),
259 Path::new("lib/b"),
260 Path::new("lib/b/b.txt"),
261 Path::new("lib/b/lib"),
262 ]
263 );
264 });
265}
266
267#[gpui::test]
268async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
269 let fs = FakeFs::new(cx.background());
270 fs.insert_tree(
271 "/root",
272 json!({
273 "dir1": {
274 "deps": {
275 // symlinks here
276 },
277 "src": {
278 "a.rs": "",
279 "b.rs": "",
280 },
281 },
282 "dir2": {
283 "src": {
284 "c.rs": "",
285 "d.rs": "",
286 }
287 },
288 "dir3": {
289 "deps": {},
290 "src": {
291 "e.rs": "",
292 "f.rs": "",
293 },
294 }
295 }),
296 )
297 .await;
298
299 // These symlinks point to directories outside of the worktree's root, dir1.
300 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
301 .await;
302 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
303 .await;
304
305 let tree = Worktree::local(
306 build_client(cx),
307 Path::new("/root/dir1"),
308 true,
309 fs.clone(),
310 Default::default(),
311 &mut cx.to_async(),
312 )
313 .await
314 .unwrap();
315
316 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
317 .await;
318
319 let tree_updates = Arc::new(Mutex::new(Vec::new()));
320 tree.update(cx, |_, cx| {
321 let tree_updates = tree_updates.clone();
322 cx.subscribe(&tree, move |_, _, event, _| {
323 if let Event::UpdatedEntries(update) = event {
324 tree_updates.lock().extend(
325 update
326 .iter()
327 .map(|(path, _, change)| (path.clone(), *change)),
328 );
329 }
330 })
331 .detach();
332 });
333
334 // The symlinked directories are not scanned by default.
335 tree.read_with(cx, |tree, _| {
336 assert_eq!(
337 tree.entries(true)
338 .map(|entry| (entry.path.as_ref(), entry.is_external))
339 .collect::<Vec<_>>(),
340 vec![
341 (Path::new(""), false),
342 (Path::new("deps"), false),
343 (Path::new("deps/dep-dir2"), true),
344 (Path::new("deps/dep-dir3"), true),
345 (Path::new("src"), false),
346 (Path::new("src/a.rs"), false),
347 (Path::new("src/b.rs"), false),
348 ]
349 );
350
351 assert_eq!(
352 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
353 EntryKind::UnloadedDir
354 );
355 });
356
357 // Expand one of the symlinked directories.
358 tree.read_with(cx, |tree, _| {
359 tree.as_local()
360 .unwrap()
361 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
362 })
363 .recv()
364 .await;
365
366 // The expanded directory's contents are loaded. Subdirectories are
367 // not scanned yet.
368 tree.read_with(cx, |tree, _| {
369 assert_eq!(
370 tree.entries(true)
371 .map(|entry| (entry.path.as_ref(), entry.is_external))
372 .collect::<Vec<_>>(),
373 vec![
374 (Path::new(""), false),
375 (Path::new("deps"), false),
376 (Path::new("deps/dep-dir2"), true),
377 (Path::new("deps/dep-dir3"), true),
378 (Path::new("deps/dep-dir3/deps"), true),
379 (Path::new("deps/dep-dir3/src"), true),
380 (Path::new("src"), false),
381 (Path::new("src/a.rs"), false),
382 (Path::new("src/b.rs"), false),
383 ]
384 );
385 });
386 assert_eq!(
387 mem::take(&mut *tree_updates.lock()),
388 &[
389 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
390 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
391 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
392 ]
393 );
394
395 // Expand a subdirectory of one of the symlinked directories.
396 tree.read_with(cx, |tree, _| {
397 tree.as_local()
398 .unwrap()
399 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
400 })
401 .recv()
402 .await;
403
404 // The expanded subdirectory's contents are loaded.
405 tree.read_with(cx, |tree, _| {
406 assert_eq!(
407 tree.entries(true)
408 .map(|entry| (entry.path.as_ref(), entry.is_external))
409 .collect::<Vec<_>>(),
410 vec![
411 (Path::new(""), false),
412 (Path::new("deps"), false),
413 (Path::new("deps/dep-dir2"), true),
414 (Path::new("deps/dep-dir3"), true),
415 (Path::new("deps/dep-dir3/deps"), true),
416 (Path::new("deps/dep-dir3/src"), true),
417 (Path::new("deps/dep-dir3/src/e.rs"), true),
418 (Path::new("deps/dep-dir3/src/f.rs"), true),
419 (Path::new("src"), false),
420 (Path::new("src/a.rs"), false),
421 (Path::new("src/b.rs"), false),
422 ]
423 );
424 });
425
426 assert_eq!(
427 mem::take(&mut *tree_updates.lock()),
428 &[
429 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
430 (
431 Path::new("deps/dep-dir3/src/e.rs").into(),
432 PathChange::Loaded
433 ),
434 (
435 Path::new("deps/dep-dir3/src/f.rs").into(),
436 PathChange::Loaded
437 )
438 ]
439 );
440}
441
442#[gpui::test]
443async fn test_open_gitignored_files(cx: &mut TestAppContext) {
444 let fs = FakeFs::new(cx.background());
445 fs.insert_tree(
446 "/root",
447 json!({
448 ".gitignore": "node_modules\n",
449 "one": {
450 "node_modules": {
451 "a": {
452 "a1.js": "a1",
453 "a2.js": "a2",
454 },
455 "b": {
456 "b1.js": "b1",
457 "b2.js": "b2",
458 },
459 "c": {
460 "c1.js": "c1",
461 "c2.js": "c2",
462 }
463 },
464 },
465 "two": {
466 "x.js": "",
467 "y.js": "",
468 },
469 }),
470 )
471 .await;
472
473 let tree = Worktree::local(
474 build_client(cx),
475 Path::new("/root"),
476 true,
477 fs.clone(),
478 Default::default(),
479 &mut cx.to_async(),
480 )
481 .await
482 .unwrap();
483
484 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
485 .await;
486
487 tree.read_with(cx, |tree, _| {
488 assert_eq!(
489 tree.entries(true)
490 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
491 .collect::<Vec<_>>(),
492 vec![
493 (Path::new(""), false),
494 (Path::new(".gitignore"), false),
495 (Path::new("one"), false),
496 (Path::new("one/node_modules"), true),
497 (Path::new("two"), false),
498 (Path::new("two/x.js"), false),
499 (Path::new("two/y.js"), false),
500 ]
501 );
502 });
503
504 // Open a file that is nested inside of a gitignored directory that
505 // has not yet been expanded.
506 let prev_read_dir_count = fs.read_dir_call_count();
507 let buffer = tree
508 .update(cx, |tree, cx| {
509 tree.as_local_mut()
510 .unwrap()
511 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
512 })
513 .await
514 .unwrap();
515
516 tree.read_with(cx, |tree, cx| {
517 assert_eq!(
518 tree.entries(true)
519 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
520 .collect::<Vec<_>>(),
521 vec![
522 (Path::new(""), false),
523 (Path::new(".gitignore"), false),
524 (Path::new("one"), false),
525 (Path::new("one/node_modules"), true),
526 (Path::new("one/node_modules/a"), true),
527 (Path::new("one/node_modules/b"), true),
528 (Path::new("one/node_modules/b/b1.js"), true),
529 (Path::new("one/node_modules/b/b2.js"), true),
530 (Path::new("one/node_modules/c"), true),
531 (Path::new("two"), false),
532 (Path::new("two/x.js"), false),
533 (Path::new("two/y.js"), false),
534 ]
535 );
536
537 assert_eq!(
538 buffer.read(cx).file().unwrap().path().as_ref(),
539 Path::new("one/node_modules/b/b1.js")
540 );
541
542 // Only the newly-expanded directories are scanned.
543 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
544 });
545
546 // Open another file in a different subdirectory of the same
547 // gitignored directory.
548 let prev_read_dir_count = fs.read_dir_call_count();
549 let buffer = tree
550 .update(cx, |tree, cx| {
551 tree.as_local_mut()
552 .unwrap()
553 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
554 })
555 .await
556 .unwrap();
557
558 tree.read_with(cx, |tree, cx| {
559 assert_eq!(
560 tree.entries(true)
561 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
562 .collect::<Vec<_>>(),
563 vec![
564 (Path::new(""), false),
565 (Path::new(".gitignore"), false),
566 (Path::new("one"), false),
567 (Path::new("one/node_modules"), true),
568 (Path::new("one/node_modules/a"), true),
569 (Path::new("one/node_modules/a/a1.js"), true),
570 (Path::new("one/node_modules/a/a2.js"), true),
571 (Path::new("one/node_modules/b"), true),
572 (Path::new("one/node_modules/b/b1.js"), true),
573 (Path::new("one/node_modules/b/b2.js"), true),
574 (Path::new("one/node_modules/c"), true),
575 (Path::new("two"), false),
576 (Path::new("two/x.js"), false),
577 (Path::new("two/y.js"), false),
578 ]
579 );
580
581 assert_eq!(
582 buffer.read(cx).file().unwrap().path().as_ref(),
583 Path::new("one/node_modules/a/a2.js")
584 );
585
586 // Only the newly-expanded directory is scanned.
587 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
588 });
589
590 // No work happens when files and directories change within an unloaded directory.
591 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
592 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
593 .await
594 .unwrap();
595 cx.foreground().run_until_parked();
596 assert_eq!(
597 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
598 0
599 );
600}
601
602#[gpui::test]
603async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
604 let fs = FakeFs::new(cx.background());
605 fs.insert_tree(
606 "/root",
607 json!({
608 ".gitignore": "node_modules\n",
609 "a": {
610 "a.js": "",
611 },
612 "b": {
613 "b.js": "",
614 },
615 "node_modules": {
616 "c": {
617 "c.js": "",
618 },
619 "d": {
620 "d.js": "",
621 "e": {
622 "e1.js": "",
623 "e2.js": "",
624 },
625 "f": {
626 "f1.js": "",
627 "f2.js": "",
628 }
629 },
630 },
631 }),
632 )
633 .await;
634
635 let tree = Worktree::local(
636 build_client(cx),
637 Path::new("/root"),
638 true,
639 fs.clone(),
640 Default::default(),
641 &mut cx.to_async(),
642 )
643 .await
644 .unwrap();
645
646 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
647 .await;
648
649 // Open a file within the gitignored directory, forcing some of its
650 // subdirectories to be read, but not all.
651 let read_dir_count_1 = fs.read_dir_call_count();
652 tree.read_with(cx, |tree, _| {
653 tree.as_local()
654 .unwrap()
655 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
656 })
657 .recv()
658 .await;
659
660 // Those subdirectories are now loaded.
661 tree.read_with(cx, |tree, _| {
662 assert_eq!(
663 tree.entries(true)
664 .map(|e| (e.path.as_ref(), e.is_ignored))
665 .collect::<Vec<_>>(),
666 &[
667 (Path::new(""), false),
668 (Path::new(".gitignore"), false),
669 (Path::new("a"), false),
670 (Path::new("a/a.js"), false),
671 (Path::new("b"), false),
672 (Path::new("b/b.js"), false),
673 (Path::new("node_modules"), true),
674 (Path::new("node_modules/c"), true),
675 (Path::new("node_modules/d"), true),
676 (Path::new("node_modules/d/d.js"), true),
677 (Path::new("node_modules/d/e"), true),
678 (Path::new("node_modules/d/f"), true),
679 ]
680 );
681 });
682 let read_dir_count_2 = fs.read_dir_call_count();
683 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
684
685 // Update the gitignore so that node_modules is no longer ignored,
686 // but a subdirectory is ignored
687 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
688 .await
689 .unwrap();
690 cx.foreground().run_until_parked();
691
692 // All of the directories that are no longer ignored are now loaded.
693 tree.read_with(cx, |tree, _| {
694 assert_eq!(
695 tree.entries(true)
696 .map(|e| (e.path.as_ref(), e.is_ignored))
697 .collect::<Vec<_>>(),
698 &[
699 (Path::new(""), false),
700 (Path::new(".gitignore"), false),
701 (Path::new("a"), false),
702 (Path::new("a/a.js"), false),
703 (Path::new("b"), false),
704 (Path::new("b/b.js"), false),
705 // This directory is no longer ignored
706 (Path::new("node_modules"), false),
707 (Path::new("node_modules/c"), false),
708 (Path::new("node_modules/c/c.js"), false),
709 (Path::new("node_modules/d"), false),
710 (Path::new("node_modules/d/d.js"), false),
711 // This subdirectory is now ignored
712 (Path::new("node_modules/d/e"), true),
713 (Path::new("node_modules/d/f"), false),
714 (Path::new("node_modules/d/f/f1.js"), false),
715 (Path::new("node_modules/d/f/f2.js"), false),
716 ]
717 );
718 });
719
720 // Each of the newly-loaded directories is scanned only once.
721 let read_dir_count_3 = fs.read_dir_call_count();
722 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
723}
724
725#[gpui::test(iterations = 10)]
726async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
727 let fs = FakeFs::new(cx.background());
728 fs.insert_tree(
729 "/root",
730 json!({
731 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
732 "tree": {
733 ".git": {},
734 ".gitignore": "ignored-dir\n",
735 "tracked-dir": {
736 "tracked-file1": "",
737 "ancestor-ignored-file1": "",
738 },
739 "ignored-dir": {
740 "ignored-file1": ""
741 }
742 }
743 }),
744 )
745 .await;
746
747 let tree = Worktree::local(
748 build_client(cx),
749 "/root/tree".as_ref(),
750 true,
751 fs.clone(),
752 Default::default(),
753 &mut cx.to_async(),
754 )
755 .await
756 .unwrap();
757 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
758 .await;
759
760 tree.read_with(cx, |tree, _| {
761 tree.as_local()
762 .unwrap()
763 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
764 })
765 .recv()
766 .await;
767
768 cx.read(|cx| {
769 let tree = tree.read(cx);
770 assert!(
771 !tree
772 .entry_for_path("tracked-dir/tracked-file1")
773 .unwrap()
774 .is_ignored
775 );
776 assert!(
777 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
778 .unwrap()
779 .is_ignored
780 );
781 assert!(
782 tree.entry_for_path("ignored-dir/ignored-file1")
783 .unwrap()
784 .is_ignored
785 );
786 });
787
788 fs.create_file(
789 "/root/tree/tracked-dir/tracked-file2".as_ref(),
790 Default::default(),
791 )
792 .await
793 .unwrap();
794 fs.create_file(
795 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
796 Default::default(),
797 )
798 .await
799 .unwrap();
800 fs.create_file(
801 "/root/tree/ignored-dir/ignored-file2".as_ref(),
802 Default::default(),
803 )
804 .await
805 .unwrap();
806
807 cx.foreground().run_until_parked();
808 cx.read(|cx| {
809 let tree = tree.read(cx);
810 assert!(
811 !tree
812 .entry_for_path("tracked-dir/tracked-file2")
813 .unwrap()
814 .is_ignored
815 );
816 assert!(
817 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
818 .unwrap()
819 .is_ignored
820 );
821 assert!(
822 tree.entry_for_path("ignored-dir/ignored-file2")
823 .unwrap()
824 .is_ignored
825 );
826 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
827 });
828}
829
830#[gpui::test]
831async fn test_write_file(cx: &mut TestAppContext) {
832 let dir = temp_tree(json!({
833 ".git": {},
834 ".gitignore": "ignored-dir\n",
835 "tracked-dir": {},
836 "ignored-dir": {}
837 }));
838
839 let tree = Worktree::local(
840 build_client(cx),
841 dir.path(),
842 true,
843 Arc::new(RealFs),
844 Default::default(),
845 &mut cx.to_async(),
846 )
847 .await
848 .unwrap();
849 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
850 .await;
851 tree.flush_fs_events(cx).await;
852
853 tree.update(cx, |tree, cx| {
854 tree.as_local().unwrap().write_file(
855 Path::new("tracked-dir/file.txt"),
856 "hello".into(),
857 Default::default(),
858 cx,
859 )
860 })
861 .await
862 .unwrap();
863 tree.update(cx, |tree, cx| {
864 tree.as_local().unwrap().write_file(
865 Path::new("ignored-dir/file.txt"),
866 "world".into(),
867 Default::default(),
868 cx,
869 )
870 })
871 .await
872 .unwrap();
873
874 tree.read_with(cx, |tree, _| {
875 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
876 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
877 assert!(!tracked.is_ignored);
878 assert!(ignored.is_ignored);
879 });
880}
881
882#[gpui::test]
883async fn test_ignore_exclusions(cx: &mut TestAppContext) {
884 let dir = temp_tree(json!({
885 ".gitignore": "**/target\n/node_modules\n",
886 "target": {
887 "index": "blah2"
888 },
889 "node_modules": {
890 ".DS_Store": "",
891 "prettier": {
892 "package.json": "{}",
893 },
894 },
895 "src": {
896 ".DS_Store": "",
897 "foo": {
898 "foo.rs": "mod another;\n",
899 "another.rs": "// another",
900 },
901 "bar": {
902 "bar.rs": "// bar",
903 },
904 "lib.rs": "mod foo;\nmod bar;\n",
905 },
906 ".DS_Store": "",
907 }));
908 cx.update(|cx| {
909 cx.set_global(SettingsStore::test(cx));
910 Project::init_settings(cx);
911 cx.update_global::<SettingsStore, _, _>(|store, cx| {
912 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
913 project_settings.scan_exclude_files =
914 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
915 });
916 });
917 });
918
919 let tree = Worktree::local(
920 build_client(cx),
921 dir.path(),
922 true,
923 Arc::new(RealFs),
924 Default::default(),
925 &mut cx.to_async(),
926 )
927 .await
928 .unwrap();
929 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
930 .await;
931 tree.flush_fs_events(cx).await;
932 tree.read_with(cx, |tree, _| {
933 check_worktree_entries(
934 tree,
935 &[
936 "src/foo/foo.rs",
937 "src/foo/another.rs",
938 "node_modules/.DS_Store",
939 "src/.DS_Store",
940 ".DS_Store",
941 ],
942 &["target/index", "node_modules/prettier/package.json"],
943 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
944 )
945 });
946
947 cx.update(|cx| {
948 cx.update_global::<SettingsStore, _, _>(|store, cx| {
949 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
950 project_settings.scan_exclude_files = Some(vec!["**/node_modules/**".to_string()]);
951 });
952 });
953 });
954 tree.flush_fs_events(cx).await;
955 cx.foreground().run_until_parked();
956 tree.read_with(cx, |tree, _| {
957 check_worktree_entries(
958 tree,
959 &[
960 "node_modules/prettier/package.json",
961 "node_modules/.DS_Store",
962 ],
963 &["target/index"],
964 &[
965 ".gitignore",
966 "src/lib.rs",
967 "src/bar/bar.rs",
968 "src/foo/foo.rs",
969 "src/foo/another.rs",
970 "src/.DS_Store",
971 ".DS_Store",
972 ],
973 )
974 });
975}
976
977#[gpui::test(iterations = 30)]
978async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
979 let fs = FakeFs::new(cx.background());
980 fs.insert_tree(
981 "/root",
982 json!({
983 "b": {},
984 "c": {},
985 "d": {},
986 }),
987 )
988 .await;
989
990 let tree = Worktree::local(
991 build_client(cx),
992 "/root".as_ref(),
993 true,
994 fs,
995 Default::default(),
996 &mut cx.to_async(),
997 )
998 .await
999 .unwrap();
1000
1001 let snapshot1 = tree.update(cx, |tree, cx| {
1002 let tree = tree.as_local_mut().unwrap();
1003 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1004 let _ = tree.observe_updates(0, cx, {
1005 let snapshot = snapshot.clone();
1006 move |update| {
1007 snapshot.lock().apply_remote_update(update).unwrap();
1008 async { true }
1009 }
1010 });
1011 snapshot
1012 });
1013
1014 let entry = tree
1015 .update(cx, |tree, cx| {
1016 tree.as_local_mut()
1017 .unwrap()
1018 .create_entry("a/e".as_ref(), true, cx)
1019 })
1020 .await
1021 .unwrap();
1022 assert!(entry.is_dir());
1023
1024 cx.foreground().run_until_parked();
1025 tree.read_with(cx, |tree, _| {
1026 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
1027 });
1028
1029 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1030 assert_eq!(
1031 snapshot1.lock().entries(true).collect::<Vec<_>>(),
1032 snapshot2.entries(true).collect::<Vec<_>>()
1033 );
1034}
1035
1036#[gpui::test]
1037async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1038 let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1039
1040 let fs_fake = FakeFs::new(cx.background());
1041 fs_fake
1042 .insert_tree(
1043 "/root",
1044 json!({
1045 "a": {},
1046 }),
1047 )
1048 .await;
1049
1050 let tree_fake = Worktree::local(
1051 client_fake,
1052 "/root".as_ref(),
1053 true,
1054 fs_fake,
1055 Default::default(),
1056 &mut cx.to_async(),
1057 )
1058 .await
1059 .unwrap();
1060
1061 let entry = tree_fake
1062 .update(cx, |tree, cx| {
1063 tree.as_local_mut()
1064 .unwrap()
1065 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1066 })
1067 .await
1068 .unwrap();
1069 assert!(entry.is_file());
1070
1071 cx.foreground().run_until_parked();
1072 tree_fake.read_with(cx, |tree, _| {
1073 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1074 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1075 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1076 });
1077
1078 let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1079
1080 let fs_real = Arc::new(RealFs);
1081 let temp_root = temp_tree(json!({
1082 "a": {}
1083 }));
1084
1085 let tree_real = Worktree::local(
1086 client_real,
1087 temp_root.path(),
1088 true,
1089 fs_real,
1090 Default::default(),
1091 &mut cx.to_async(),
1092 )
1093 .await
1094 .unwrap();
1095
1096 let entry = tree_real
1097 .update(cx, |tree, cx| {
1098 tree.as_local_mut()
1099 .unwrap()
1100 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1101 })
1102 .await
1103 .unwrap();
1104 assert!(entry.is_file());
1105
1106 cx.foreground().run_until_parked();
1107 tree_real.read_with(cx, |tree, _| {
1108 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1109 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1110 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1111 });
1112
1113 // Test smallest change
1114 let entry = tree_real
1115 .update(cx, |tree, cx| {
1116 tree.as_local_mut()
1117 .unwrap()
1118 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
1119 })
1120 .await
1121 .unwrap();
1122 assert!(entry.is_file());
1123
1124 cx.foreground().run_until_parked();
1125 tree_real.read_with(cx, |tree, _| {
1126 assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
1127 });
1128
1129 // Test largest change
1130 let entry = tree_real
1131 .update(cx, |tree, cx| {
1132 tree.as_local_mut()
1133 .unwrap()
1134 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
1135 })
1136 .await
1137 .unwrap();
1138 assert!(entry.is_file());
1139
1140 cx.foreground().run_until_parked();
1141 tree_real.read_with(cx, |tree, _| {
1142 assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
1143 assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
1144 assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
1145 assert!(tree.entry_for_path("d/").unwrap().is_dir());
1146 });
1147}
1148
1149#[gpui::test(iterations = 100)]
1150async fn test_random_worktree_operations_during_initial_scan(
1151 cx: &mut TestAppContext,
1152 mut rng: StdRng,
1153) {
1154 let operations = env::var("OPERATIONS")
1155 .map(|o| o.parse().unwrap())
1156 .unwrap_or(5);
1157 let initial_entries = env::var("INITIAL_ENTRIES")
1158 .map(|o| o.parse().unwrap())
1159 .unwrap_or(20);
1160
1161 let root_dir = Path::new("/test");
1162 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1163 fs.as_fake().insert_tree(root_dir, json!({})).await;
1164 for _ in 0..initial_entries {
1165 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1166 }
1167 log::info!("generated initial tree");
1168
1169 let worktree = Worktree::local(
1170 build_client(cx),
1171 root_dir,
1172 true,
1173 fs.clone(),
1174 Default::default(),
1175 &mut cx.to_async(),
1176 )
1177 .await
1178 .unwrap();
1179
1180 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1181 let updates = Arc::new(Mutex::new(Vec::new()));
1182 worktree.update(cx, |tree, cx| {
1183 check_worktree_change_events(tree, cx);
1184
1185 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1186 let updates = updates.clone();
1187 move |update| {
1188 updates.lock().push(update);
1189 async { true }
1190 }
1191 });
1192 });
1193
1194 for _ in 0..operations {
1195 worktree
1196 .update(cx, |worktree, cx| {
1197 randomly_mutate_worktree(worktree, &mut rng, cx)
1198 })
1199 .await
1200 .log_err();
1201 worktree.read_with(cx, |tree, _| {
1202 tree.as_local().unwrap().snapshot().check_invariants(true)
1203 });
1204
1205 if rng.gen_bool(0.6) {
1206 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1207 }
1208 }
1209
1210 worktree
1211 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1212 .await;
1213
1214 cx.foreground().run_until_parked();
1215
1216 let final_snapshot = worktree.read_with(cx, |tree, _| {
1217 let tree = tree.as_local().unwrap();
1218 let snapshot = tree.snapshot();
1219 snapshot.check_invariants(true);
1220 snapshot
1221 });
1222
1223 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1224 let mut updated_snapshot = snapshot.clone();
1225 for update in updates.lock().iter() {
1226 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1227 updated_snapshot
1228 .apply_remote_update(update.clone())
1229 .unwrap();
1230 }
1231 }
1232
1233 assert_eq!(
1234 updated_snapshot.entries(true).collect::<Vec<_>>(),
1235 final_snapshot.entries(true).collect::<Vec<_>>(),
1236 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1237 );
1238 }
1239}
1240
1241#[gpui::test(iterations = 100)]
1242async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1243 let operations = env::var("OPERATIONS")
1244 .map(|o| o.parse().unwrap())
1245 .unwrap_or(40);
1246 let initial_entries = env::var("INITIAL_ENTRIES")
1247 .map(|o| o.parse().unwrap())
1248 .unwrap_or(20);
1249
1250 let root_dir = Path::new("/test");
1251 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1252 fs.as_fake().insert_tree(root_dir, json!({})).await;
1253 for _ in 0..initial_entries {
1254 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1255 }
1256 log::info!("generated initial tree");
1257
1258 let worktree = Worktree::local(
1259 build_client(cx),
1260 root_dir,
1261 true,
1262 fs.clone(),
1263 Default::default(),
1264 &mut cx.to_async(),
1265 )
1266 .await
1267 .unwrap();
1268
1269 let updates = Arc::new(Mutex::new(Vec::new()));
1270 worktree.update(cx, |tree, cx| {
1271 check_worktree_change_events(tree, cx);
1272
1273 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1274 let updates = updates.clone();
1275 move |update| {
1276 updates.lock().push(update);
1277 async { true }
1278 }
1279 });
1280 });
1281
1282 worktree
1283 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1284 .await;
1285
1286 fs.as_fake().pause_events();
1287 let mut snapshots = Vec::new();
1288 let mut mutations_len = operations;
1289 while mutations_len > 1 {
1290 if rng.gen_bool(0.2) {
1291 worktree
1292 .update(cx, |worktree, cx| {
1293 randomly_mutate_worktree(worktree, &mut rng, cx)
1294 })
1295 .await
1296 .log_err();
1297 } else {
1298 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1299 }
1300
1301 let buffered_event_count = fs.as_fake().buffered_event_count();
1302 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1303 let len = rng.gen_range(0..=buffered_event_count);
1304 log::info!("flushing {} events", len);
1305 fs.as_fake().flush_events(len);
1306 } else {
1307 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1308 mutations_len -= 1;
1309 }
1310
1311 cx.foreground().run_until_parked();
1312 if rng.gen_bool(0.2) {
1313 log::info!("storing snapshot {}", snapshots.len());
1314 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1315 snapshots.push(snapshot);
1316 }
1317 }
1318
1319 log::info!("quiescing");
1320 fs.as_fake().flush_events(usize::MAX);
1321 cx.foreground().run_until_parked();
1322
1323 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1324 snapshot.check_invariants(true);
1325 let expanded_paths = snapshot
1326 .expanded_entries()
1327 .map(|e| e.path.clone())
1328 .collect::<Vec<_>>();
1329
1330 {
1331 let new_worktree = Worktree::local(
1332 build_client(cx),
1333 root_dir,
1334 true,
1335 fs.clone(),
1336 Default::default(),
1337 &mut cx.to_async(),
1338 )
1339 .await
1340 .unwrap();
1341 new_worktree
1342 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1343 .await;
1344 new_worktree
1345 .update(cx, |tree, _| {
1346 tree.as_local_mut()
1347 .unwrap()
1348 .refresh_entries_for_paths(expanded_paths)
1349 })
1350 .recv()
1351 .await;
1352 let new_snapshot =
1353 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1354 assert_eq!(
1355 snapshot.entries_without_ids(true),
1356 new_snapshot.entries_without_ids(true)
1357 );
1358 }
1359
1360 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1361 for update in updates.lock().iter() {
1362 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1363 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1364 }
1365 }
1366
1367 assert_eq!(
1368 prev_snapshot
1369 .entries(true)
1370 .map(ignore_pending_dir)
1371 .collect::<Vec<_>>(),
1372 snapshot
1373 .entries(true)
1374 .map(ignore_pending_dir)
1375 .collect::<Vec<_>>(),
1376 "wrong updates after snapshot {i}: {updates:#?}",
1377 );
1378 }
1379
1380 fn ignore_pending_dir(entry: &Entry) -> Entry {
1381 let mut entry = entry.clone();
1382 if entry.kind.is_dir() {
1383 entry.kind = EntryKind::Dir
1384 }
1385 entry
1386 }
1387}
1388
1389// The worktree's `UpdatedEntries` event can be used to follow along with
1390// all changes to the worktree's snapshot.
1391fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1392 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1393 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1394 if let Event::UpdatedEntries(changes) = event {
1395 for (path, _, change_type) in changes.iter() {
1396 let entry = tree.entry_for_path(&path).cloned();
1397 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1398 Ok(ix) | Err(ix) => ix,
1399 };
1400 match change_type {
1401 PathChange::Added => entries.insert(ix, entry.unwrap()),
1402 PathChange::Removed => drop(entries.remove(ix)),
1403 PathChange::Updated => {
1404 let entry = entry.unwrap();
1405 let existing_entry = entries.get_mut(ix).unwrap();
1406 assert_eq!(existing_entry.path, entry.path);
1407 *existing_entry = entry;
1408 }
1409 PathChange::AddedOrUpdated | PathChange::Loaded => {
1410 let entry = entry.unwrap();
1411 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1412 *entries.get_mut(ix).unwrap() = entry;
1413 } else {
1414 entries.insert(ix, entry);
1415 }
1416 }
1417 }
1418 }
1419
1420 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1421 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1422 }
1423 })
1424 .detach();
1425}
1426
1427fn randomly_mutate_worktree(
1428 worktree: &mut Worktree,
1429 rng: &mut impl Rng,
1430 cx: &mut ModelContext<Worktree>,
1431) -> Task<Result<()>> {
1432 log::info!("mutating worktree");
1433 let worktree = worktree.as_local_mut().unwrap();
1434 let snapshot = worktree.snapshot();
1435 let entry = snapshot.entries(false).choose(rng).unwrap();
1436
1437 match rng.gen_range(0_u32..100) {
1438 0..=33 if entry.path.as_ref() != Path::new("") => {
1439 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1440 worktree.delete_entry(entry.id, cx).unwrap()
1441 }
1442 ..=66 if entry.path.as_ref() != Path::new("") => {
1443 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1444 let new_parent_path = if other_entry.is_dir() {
1445 other_entry.path.clone()
1446 } else {
1447 other_entry.path.parent().unwrap().into()
1448 };
1449 let mut new_path = new_parent_path.join(random_filename(rng));
1450 if new_path.starts_with(&entry.path) {
1451 new_path = random_filename(rng).into();
1452 }
1453
1454 log::info!(
1455 "renaming entry {:?} ({}) to {:?}",
1456 entry.path,
1457 entry.id.0,
1458 new_path
1459 );
1460 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1461 cx.foreground().spawn(async move {
1462 task.await?;
1463 Ok(())
1464 })
1465 }
1466 _ => {
1467 let task = if entry.is_dir() {
1468 let child_path = entry.path.join(random_filename(rng));
1469 let is_dir = rng.gen_bool(0.3);
1470 log::info!(
1471 "creating {} at {:?}",
1472 if is_dir { "dir" } else { "file" },
1473 child_path,
1474 );
1475 worktree.create_entry(child_path, is_dir, cx)
1476 } else {
1477 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1478 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1479 };
1480 cx.foreground().spawn(async move {
1481 task.await?;
1482 Ok(())
1483 })
1484 }
1485 }
1486}
1487
1488async fn randomly_mutate_fs(
1489 fs: &Arc<dyn Fs>,
1490 root_path: &Path,
1491 insertion_probability: f64,
1492 rng: &mut impl Rng,
1493) {
1494 log::info!("mutating fs");
1495 let mut files = Vec::new();
1496 let mut dirs = Vec::new();
1497 for path in fs.as_fake().paths(false) {
1498 if path.starts_with(root_path) {
1499 if fs.is_file(&path).await {
1500 files.push(path);
1501 } else {
1502 dirs.push(path);
1503 }
1504 }
1505 }
1506
1507 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1508 let path = dirs.choose(rng).unwrap();
1509 let new_path = path.join(random_filename(rng));
1510
1511 if rng.gen() {
1512 log::info!(
1513 "creating dir {:?}",
1514 new_path.strip_prefix(root_path).unwrap()
1515 );
1516 fs.create_dir(&new_path).await.unwrap();
1517 } else {
1518 log::info!(
1519 "creating file {:?}",
1520 new_path.strip_prefix(root_path).unwrap()
1521 );
1522 fs.create_file(&new_path, Default::default()).await.unwrap();
1523 }
1524 } else if rng.gen_bool(0.05) {
1525 let ignore_dir_path = dirs.choose(rng).unwrap();
1526 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1527
1528 let subdirs = dirs
1529 .iter()
1530 .filter(|d| d.starts_with(&ignore_dir_path))
1531 .cloned()
1532 .collect::<Vec<_>>();
1533 let subfiles = files
1534 .iter()
1535 .filter(|d| d.starts_with(&ignore_dir_path))
1536 .cloned()
1537 .collect::<Vec<_>>();
1538 let files_to_ignore = {
1539 let len = rng.gen_range(0..=subfiles.len());
1540 subfiles.choose_multiple(rng, len)
1541 };
1542 let dirs_to_ignore = {
1543 let len = rng.gen_range(0..subdirs.len());
1544 subdirs.choose_multiple(rng, len)
1545 };
1546
1547 let mut ignore_contents = String::new();
1548 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1549 writeln!(
1550 ignore_contents,
1551 "{}",
1552 path_to_ignore
1553 .strip_prefix(&ignore_dir_path)
1554 .unwrap()
1555 .to_str()
1556 .unwrap()
1557 )
1558 .unwrap();
1559 }
1560 log::info!(
1561 "creating gitignore {:?} with contents:\n{}",
1562 ignore_path.strip_prefix(&root_path).unwrap(),
1563 ignore_contents
1564 );
1565 fs.save(
1566 &ignore_path,
1567 &ignore_contents.as_str().into(),
1568 Default::default(),
1569 )
1570 .await
1571 .unwrap();
1572 } else {
1573 let old_path = {
1574 let file_path = files.choose(rng);
1575 let dir_path = dirs[1..].choose(rng);
1576 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1577 };
1578
1579 let is_rename = rng.gen();
1580 if is_rename {
1581 let new_path_parent = dirs
1582 .iter()
1583 .filter(|d| !d.starts_with(old_path))
1584 .choose(rng)
1585 .unwrap();
1586
1587 let overwrite_existing_dir =
1588 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1589 let new_path = if overwrite_existing_dir {
1590 fs.remove_dir(
1591 &new_path_parent,
1592 RemoveOptions {
1593 recursive: true,
1594 ignore_if_not_exists: true,
1595 },
1596 )
1597 .await
1598 .unwrap();
1599 new_path_parent.to_path_buf()
1600 } else {
1601 new_path_parent.join(random_filename(rng))
1602 };
1603
1604 log::info!(
1605 "renaming {:?} to {}{:?}",
1606 old_path.strip_prefix(&root_path).unwrap(),
1607 if overwrite_existing_dir {
1608 "overwrite "
1609 } else {
1610 ""
1611 },
1612 new_path.strip_prefix(&root_path).unwrap()
1613 );
1614 fs.rename(
1615 &old_path,
1616 &new_path,
1617 fs::RenameOptions {
1618 overwrite: true,
1619 ignore_if_exists: true,
1620 },
1621 )
1622 .await
1623 .unwrap();
1624 } else if fs.is_file(&old_path).await {
1625 log::info!(
1626 "deleting file {:?}",
1627 old_path.strip_prefix(&root_path).unwrap()
1628 );
1629 fs.remove_file(old_path, Default::default()).await.unwrap();
1630 } else {
1631 log::info!(
1632 "deleting dir {:?}",
1633 old_path.strip_prefix(&root_path).unwrap()
1634 );
1635 fs.remove_dir(
1636 &old_path,
1637 RemoveOptions {
1638 recursive: true,
1639 ignore_if_not_exists: true,
1640 },
1641 )
1642 .await
1643 .unwrap();
1644 }
1645 }
1646}
1647
1648fn random_filename(rng: &mut impl Rng) -> String {
1649 (0..6)
1650 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1651 .map(char::from)
1652 .collect()
1653}
1654
1655#[gpui::test]
1656async fn test_rename_work_directory(cx: &mut TestAppContext) {
1657 let root = temp_tree(json!({
1658 "projects": {
1659 "project1": {
1660 "a": "",
1661 "b": "",
1662 }
1663 },
1664
1665 }));
1666 let root_path = root.path();
1667
1668 let tree = Worktree::local(
1669 build_client(cx),
1670 root_path,
1671 true,
1672 Arc::new(RealFs),
1673 Default::default(),
1674 &mut cx.to_async(),
1675 )
1676 .await
1677 .unwrap();
1678
1679 let repo = git_init(&root_path.join("projects/project1"));
1680 git_add("a", &repo);
1681 git_commit("init", &repo);
1682 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1683
1684 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1685 .await;
1686
1687 tree.flush_fs_events(cx).await;
1688
1689 cx.read(|cx| {
1690 let tree = tree.read(cx);
1691 let (work_dir, _) = tree.repositories().next().unwrap();
1692 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1693 assert_eq!(
1694 tree.status_for_file(Path::new("projects/project1/a")),
1695 Some(GitFileStatus::Modified)
1696 );
1697 assert_eq!(
1698 tree.status_for_file(Path::new("projects/project1/b")),
1699 Some(GitFileStatus::Added)
1700 );
1701 });
1702
1703 std::fs::rename(
1704 root_path.join("projects/project1"),
1705 root_path.join("projects/project2"),
1706 )
1707 .ok();
1708 tree.flush_fs_events(cx).await;
1709
1710 cx.read(|cx| {
1711 let tree = tree.read(cx);
1712 let (work_dir, _) = tree.repositories().next().unwrap();
1713 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1714 assert_eq!(
1715 tree.status_for_file(Path::new("projects/project2/a")),
1716 Some(GitFileStatus::Modified)
1717 );
1718 assert_eq!(
1719 tree.status_for_file(Path::new("projects/project2/b")),
1720 Some(GitFileStatus::Added)
1721 );
1722 });
1723}
1724
1725#[gpui::test]
1726async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1727 let root = temp_tree(json!({
1728 "c.txt": "",
1729 "dir1": {
1730 ".git": {},
1731 "deps": {
1732 "dep1": {
1733 ".git": {},
1734 "src": {
1735 "a.txt": ""
1736 }
1737 }
1738 },
1739 "src": {
1740 "b.txt": ""
1741 }
1742 },
1743 }));
1744
1745 let tree = Worktree::local(
1746 build_client(cx),
1747 root.path(),
1748 true,
1749 Arc::new(RealFs),
1750 Default::default(),
1751 &mut cx.to_async(),
1752 )
1753 .await
1754 .unwrap();
1755
1756 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1757 .await;
1758 tree.flush_fs_events(cx).await;
1759
1760 tree.read_with(cx, |tree, _cx| {
1761 let tree = tree.as_local().unwrap();
1762
1763 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1764
1765 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1766 assert_eq!(
1767 entry
1768 .work_directory(tree)
1769 .map(|directory| directory.as_ref().to_owned()),
1770 Some(Path::new("dir1").to_owned())
1771 );
1772
1773 let entry = tree
1774 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1775 .unwrap();
1776 assert_eq!(
1777 entry
1778 .work_directory(tree)
1779 .map(|directory| directory.as_ref().to_owned()),
1780 Some(Path::new("dir1/deps/dep1").to_owned())
1781 );
1782
1783 let entries = tree.files(false, 0);
1784
1785 let paths_with_repos = tree
1786 .entries_with_repositories(entries)
1787 .map(|(entry, repo)| {
1788 (
1789 entry.path.as_ref(),
1790 repo.and_then(|repo| {
1791 repo.work_directory(&tree)
1792 .map(|work_directory| work_directory.0.to_path_buf())
1793 }),
1794 )
1795 })
1796 .collect::<Vec<_>>();
1797
1798 assert_eq!(
1799 paths_with_repos,
1800 &[
1801 (Path::new("c.txt"), None),
1802 (
1803 Path::new("dir1/deps/dep1/src/a.txt"),
1804 Some(Path::new("dir1/deps/dep1").into())
1805 ),
1806 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1807 ]
1808 );
1809 });
1810
1811 let repo_update_events = Arc::new(Mutex::new(vec![]));
1812 tree.update(cx, |_, cx| {
1813 let repo_update_events = repo_update_events.clone();
1814 cx.subscribe(&tree, move |_, _, event, _| {
1815 if let Event::UpdatedGitRepositories(update) = event {
1816 repo_update_events.lock().push(update.clone());
1817 }
1818 })
1819 .detach();
1820 });
1821
1822 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1823 tree.flush_fs_events(cx).await;
1824
1825 assert_eq!(
1826 repo_update_events.lock()[0]
1827 .iter()
1828 .map(|e| e.0.clone())
1829 .collect::<Vec<Arc<Path>>>(),
1830 vec![Path::new("dir1").into()]
1831 );
1832
1833 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1834 tree.flush_fs_events(cx).await;
1835
1836 tree.read_with(cx, |tree, _cx| {
1837 let tree = tree.as_local().unwrap();
1838
1839 assert!(tree
1840 .repository_for_path("dir1/src/b.txt".as_ref())
1841 .is_none());
1842 });
1843}
1844
1845#[gpui::test]
1846async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1847 const IGNORE_RULE: &'static str = "**/target";
1848
1849 let root = temp_tree(json!({
1850 "project": {
1851 "a.txt": "a",
1852 "b.txt": "bb",
1853 "c": {
1854 "d": {
1855 "e.txt": "eee"
1856 }
1857 },
1858 "f.txt": "ffff",
1859 "target": {
1860 "build_file": "???"
1861 },
1862 ".gitignore": IGNORE_RULE
1863 },
1864
1865 }));
1866
1867 const A_TXT: &'static str = "a.txt";
1868 const B_TXT: &'static str = "b.txt";
1869 const E_TXT: &'static str = "c/d/e.txt";
1870 const F_TXT: &'static str = "f.txt";
1871 const DOTGITIGNORE: &'static str = ".gitignore";
1872 const BUILD_FILE: &'static str = "target/build_file";
1873 let project_path = Path::new("project");
1874
1875 // Set up git repository before creating the worktree.
1876 let work_dir = root.path().join("project");
1877 let mut repo = git_init(work_dir.as_path());
1878 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1879 git_add(A_TXT, &repo);
1880 git_add(E_TXT, &repo);
1881 git_add(DOTGITIGNORE, &repo);
1882 git_commit("Initial commit", &repo);
1883
1884 let tree = Worktree::local(
1885 build_client(cx),
1886 root.path(),
1887 true,
1888 Arc::new(RealFs),
1889 Default::default(),
1890 &mut cx.to_async(),
1891 )
1892 .await
1893 .unwrap();
1894
1895 tree.flush_fs_events(cx).await;
1896 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1897 .await;
1898 deterministic.run_until_parked();
1899
1900 // Check that the right git state is observed on startup
1901 tree.read_with(cx, |tree, _cx| {
1902 let snapshot = tree.snapshot();
1903 assert_eq!(snapshot.repositories().count(), 1);
1904 let (dir, _) = snapshot.repositories().next().unwrap();
1905 assert_eq!(dir.as_ref(), Path::new("project"));
1906
1907 assert_eq!(
1908 snapshot.status_for_file(project_path.join(B_TXT)),
1909 Some(GitFileStatus::Added)
1910 );
1911 assert_eq!(
1912 snapshot.status_for_file(project_path.join(F_TXT)),
1913 Some(GitFileStatus::Added)
1914 );
1915 });
1916
1917 // Modify a file in the working copy.
1918 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1919 tree.flush_fs_events(cx).await;
1920 deterministic.run_until_parked();
1921
1922 // The worktree detects that the file's git status has changed.
1923 tree.read_with(cx, |tree, _cx| {
1924 let snapshot = tree.snapshot();
1925 assert_eq!(
1926 snapshot.status_for_file(project_path.join(A_TXT)),
1927 Some(GitFileStatus::Modified)
1928 );
1929 });
1930
1931 // Create a commit in the git repository.
1932 git_add(A_TXT, &repo);
1933 git_add(B_TXT, &repo);
1934 git_commit("Committing modified and added", &repo);
1935 tree.flush_fs_events(cx).await;
1936 deterministic.run_until_parked();
1937
1938 // The worktree detects that the files' git status have changed.
1939 tree.read_with(cx, |tree, _cx| {
1940 let snapshot = tree.snapshot();
1941 assert_eq!(
1942 snapshot.status_for_file(project_path.join(F_TXT)),
1943 Some(GitFileStatus::Added)
1944 );
1945 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1946 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1947 });
1948
1949 // Modify files in the working copy and perform git operations on other files.
1950 git_reset(0, &repo);
1951 git_remove_index(Path::new(B_TXT), &repo);
1952 git_stash(&mut repo);
1953 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1954 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1955 tree.flush_fs_events(cx).await;
1956 deterministic.run_until_parked();
1957
1958 // Check that more complex repo changes are tracked
1959 tree.read_with(cx, |tree, _cx| {
1960 let snapshot = tree.snapshot();
1961
1962 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1963 assert_eq!(
1964 snapshot.status_for_file(project_path.join(B_TXT)),
1965 Some(GitFileStatus::Added)
1966 );
1967 assert_eq!(
1968 snapshot.status_for_file(project_path.join(E_TXT)),
1969 Some(GitFileStatus::Modified)
1970 );
1971 });
1972
1973 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1974 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1975 std::fs::write(
1976 work_dir.join(DOTGITIGNORE),
1977 [IGNORE_RULE, "f.txt"].join("\n"),
1978 )
1979 .unwrap();
1980
1981 git_add(Path::new(DOTGITIGNORE), &repo);
1982 git_commit("Committing modified git ignore", &repo);
1983
1984 tree.flush_fs_events(cx).await;
1985 deterministic.run_until_parked();
1986
1987 let mut renamed_dir_name = "first_directory/second_directory";
1988 const RENAMED_FILE: &'static str = "rf.txt";
1989
1990 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
1991 std::fs::write(
1992 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
1993 "new-contents",
1994 )
1995 .unwrap();
1996
1997 tree.flush_fs_events(cx).await;
1998 deterministic.run_until_parked();
1999
2000 tree.read_with(cx, |tree, _cx| {
2001 let snapshot = tree.snapshot();
2002 assert_eq!(
2003 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
2004 Some(GitFileStatus::Added)
2005 );
2006 });
2007
2008 renamed_dir_name = "new_first_directory/second_directory";
2009
2010 std::fs::rename(
2011 work_dir.join("first_directory"),
2012 work_dir.join("new_first_directory"),
2013 )
2014 .unwrap();
2015
2016 tree.flush_fs_events(cx).await;
2017 deterministic.run_until_parked();
2018
2019 tree.read_with(cx, |tree, _cx| {
2020 let snapshot = tree.snapshot();
2021
2022 assert_eq!(
2023 snapshot.status_for_file(
2024 project_path
2025 .join(Path::new(renamed_dir_name))
2026 .join(RENAMED_FILE)
2027 ),
2028 Some(GitFileStatus::Added)
2029 );
2030 });
2031}
2032
2033#[gpui::test]
2034async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
2035 let fs = FakeFs::new(cx.background());
2036 fs.insert_tree(
2037 "/root",
2038 json!({
2039 ".git": {},
2040 "a": {
2041 "b": {
2042 "c1.txt": "",
2043 "c2.txt": "",
2044 },
2045 "d": {
2046 "e1.txt": "",
2047 "e2.txt": "",
2048 "e3.txt": "",
2049 }
2050 },
2051 "f": {
2052 "no-status.txt": ""
2053 },
2054 "g": {
2055 "h1.txt": "",
2056 "h2.txt": ""
2057 },
2058
2059 }),
2060 )
2061 .await;
2062
2063 fs.set_status_for_repo_via_git_operation(
2064 &Path::new("/root/.git"),
2065 &[
2066 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
2067 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
2068 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
2069 ],
2070 );
2071
2072 let tree = Worktree::local(
2073 build_client(cx),
2074 Path::new("/root"),
2075 true,
2076 fs.clone(),
2077 Default::default(),
2078 &mut cx.to_async(),
2079 )
2080 .await
2081 .unwrap();
2082
2083 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2084 .await;
2085
2086 cx.foreground().run_until_parked();
2087 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
2088
2089 check_propagated_statuses(
2090 &snapshot,
2091 &[
2092 (Path::new(""), Some(GitFileStatus::Conflict)),
2093 (Path::new("a"), Some(GitFileStatus::Modified)),
2094 (Path::new("a/b"), Some(GitFileStatus::Added)),
2095 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2096 (Path::new("a/b/c2.txt"), None),
2097 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2098 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2099 (Path::new("f"), None),
2100 (Path::new("f/no-status.txt"), None),
2101 (Path::new("g"), Some(GitFileStatus::Conflict)),
2102 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
2103 ],
2104 );
2105
2106 check_propagated_statuses(
2107 &snapshot,
2108 &[
2109 (Path::new("a/b"), Some(GitFileStatus::Added)),
2110 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2111 (Path::new("a/b/c2.txt"), None),
2112 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2113 (Path::new("a/d/e1.txt"), None),
2114 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2115 (Path::new("f"), None),
2116 (Path::new("f/no-status.txt"), None),
2117 (Path::new("g"), Some(GitFileStatus::Conflict)),
2118 ],
2119 );
2120
2121 check_propagated_statuses(
2122 &snapshot,
2123 &[
2124 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2125 (Path::new("a/b/c2.txt"), None),
2126 (Path::new("a/d/e1.txt"), None),
2127 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2128 (Path::new("f/no-status.txt"), None),
2129 ],
2130 );
2131
2132 #[track_caller]
2133 fn check_propagated_statuses(
2134 snapshot: &Snapshot,
2135 expected_statuses: &[(&Path, Option<GitFileStatus>)],
2136 ) {
2137 let mut entries = expected_statuses
2138 .iter()
2139 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
2140 .collect::<Vec<_>>();
2141 snapshot.propagate_git_statuses(&mut entries);
2142 assert_eq!(
2143 entries
2144 .iter()
2145 .map(|e| (e.path.as_ref(), e.git_status))
2146 .collect::<Vec<_>>(),
2147 expected_statuses
2148 );
2149 }
2150}
2151
2152fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
2153 let http_client = FakeHttpClient::with_404_response();
2154 cx.read(|cx| Client::new(http_client, cx))
2155}
2156
2157#[track_caller]
2158fn git_init(path: &Path) -> git2::Repository {
2159 git2::Repository::init(path).expect("Failed to initialize git repository")
2160}
2161
2162#[track_caller]
2163fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
2164 let path = path.as_ref();
2165 let mut index = repo.index().expect("Failed to get index");
2166 index.add_path(path).expect("Failed to add a.txt");
2167 index.write().expect("Failed to write index");
2168}
2169
2170#[track_caller]
2171fn git_remove_index(path: &Path, repo: &git2::Repository) {
2172 let mut index = repo.index().expect("Failed to get index");
2173 index.remove_path(path).expect("Failed to add a.txt");
2174 index.write().expect("Failed to write index");
2175}
2176
2177#[track_caller]
2178fn git_commit(msg: &'static str, repo: &git2::Repository) {
2179 use git2::Signature;
2180
2181 let signature = Signature::now("test", "test@zed.dev").unwrap();
2182 let oid = repo.index().unwrap().write_tree().unwrap();
2183 let tree = repo.find_tree(oid).unwrap();
2184 if let Some(head) = repo.head().ok() {
2185 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
2186
2187 let parent_commit = parent_obj.as_commit().unwrap();
2188
2189 repo.commit(
2190 Some("HEAD"),
2191 &signature,
2192 &signature,
2193 msg,
2194 &tree,
2195 &[parent_commit],
2196 )
2197 .expect("Failed to commit with parent");
2198 } else {
2199 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
2200 .expect("Failed to commit");
2201 }
2202}
2203
2204#[track_caller]
2205fn git_stash(repo: &mut git2::Repository) {
2206 use git2::Signature;
2207
2208 let signature = Signature::now("test", "test@zed.dev").unwrap();
2209 repo.stash_save(&signature, "N/A", None)
2210 .expect("Failed to stash");
2211}
2212
2213#[track_caller]
2214fn git_reset(offset: usize, repo: &git2::Repository) {
2215 let head = repo.head().expect("Couldn't get repo head");
2216 let object = head.peel(git2::ObjectType::Commit).unwrap();
2217 let commit = object.as_commit().unwrap();
2218 let new_head = commit
2219 .parents()
2220 .inspect(|parnet| {
2221 parnet.message();
2222 })
2223 .skip(offset)
2224 .next()
2225 .expect("Not enough history");
2226 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2227 .expect("Could not reset");
2228}
2229
2230#[allow(dead_code)]
2231#[track_caller]
2232fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2233 repo.statuses(None)
2234 .unwrap()
2235 .iter()
2236 .map(|status| (status.path().unwrap().to_string(), status.status()))
2237 .collect()
2238}
2239
2240#[track_caller]
2241fn check_worktree_entries(
2242 tree: &Worktree,
2243 expected_excluded_paths: &[&str],
2244 expected_ignored_paths: &[&str],
2245 expected_tracked_paths: &[&str],
2246) {
2247 for path in expected_excluded_paths {
2248 let entry = tree.entry_for_path(path);
2249 assert!(
2250 entry.is_none(),
2251 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2252 );
2253 }
2254 for path in expected_ignored_paths {
2255 let entry = tree.entry_for_path(path).unwrap();
2256 assert!(
2257 entry.is_ignored,
2258 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2259 );
2260 }
2261 for path in expected_tracked_paths {
2262 let entry = tree.entry_for_path(path).unwrap();
2263 assert!(
2264 !entry.is_ignored,
2265 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2266 );
2267 }
2268}