1use crate::{
2 project_settings::ProjectSettings,
3 worktree::{Event, Snapshot, WorktreeModelHandle},
4 Entry, EntryKind, PathChange, Project, Worktree,
5};
6use anyhow::Result;
7use client::Client;
8use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
9use git::GITIGNORE;
10use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
11use parking_lot::Mutex;
12use postage::stream::Stream;
13use pretty_assertions::assert_eq;
14use rand::prelude::*;
15use serde_json::json;
16use settings::SettingsStore;
17use std::{
18 env,
19 fmt::Write,
20 mem,
21 path::{Path, PathBuf},
22 sync::Arc,
23};
24use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
25
26#[gpui::test]
27async fn test_traversal(cx: &mut TestAppContext) {
28 let fs = FakeFs::new(cx.background());
29 fs.insert_tree(
30 "/root",
31 json!({
32 ".gitignore": "a/b\n",
33 "a": {
34 "b": "",
35 "c": "",
36 }
37 }),
38 )
39 .await;
40
41 let tree = Worktree::local(
42 build_client(cx),
43 Path::new("/root"),
44 true,
45 fs,
46 Default::default(),
47 &mut cx.to_async(),
48 )
49 .await
50 .unwrap();
51 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
52 .await;
53
54 tree.read_with(cx, |tree, _| {
55 assert_eq!(
56 tree.entries(false)
57 .map(|entry| entry.path.as_ref())
58 .collect::<Vec<_>>(),
59 vec![
60 Path::new(""),
61 Path::new(".gitignore"),
62 Path::new("a"),
63 Path::new("a/c"),
64 ]
65 );
66 assert_eq!(
67 tree.entries(true)
68 .map(|entry| entry.path.as_ref())
69 .collect::<Vec<_>>(),
70 vec![
71 Path::new(""),
72 Path::new(".gitignore"),
73 Path::new("a"),
74 Path::new("a/b"),
75 Path::new("a/c"),
76 ]
77 );
78 })
79}
80
81#[gpui::test]
82async fn test_descendent_entries(cx: &mut TestAppContext) {
83 let fs = FakeFs::new(cx.background());
84 fs.insert_tree(
85 "/root",
86 json!({
87 "a": "",
88 "b": {
89 "c": {
90 "d": ""
91 },
92 "e": {}
93 },
94 "f": "",
95 "g": {
96 "h": {}
97 },
98 "i": {
99 "j": {
100 "k": ""
101 },
102 "l": {
103
104 }
105 },
106 ".gitignore": "i/j\n",
107 }),
108 )
109 .await;
110
111 let tree = Worktree::local(
112 build_client(cx),
113 Path::new("/root"),
114 true,
115 fs,
116 Default::default(),
117 &mut cx.to_async(),
118 )
119 .await
120 .unwrap();
121 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
122 .await;
123
124 tree.read_with(cx, |tree, _| {
125 assert_eq!(
126 tree.descendent_entries(false, false, Path::new("b"))
127 .map(|entry| entry.path.as_ref())
128 .collect::<Vec<_>>(),
129 vec![Path::new("b/c/d"),]
130 );
131 assert_eq!(
132 tree.descendent_entries(true, false, Path::new("b"))
133 .map(|entry| entry.path.as_ref())
134 .collect::<Vec<_>>(),
135 vec![
136 Path::new("b"),
137 Path::new("b/c"),
138 Path::new("b/c/d"),
139 Path::new("b/e"),
140 ]
141 );
142
143 assert_eq!(
144 tree.descendent_entries(false, false, Path::new("g"))
145 .map(|entry| entry.path.as_ref())
146 .collect::<Vec<_>>(),
147 Vec::<PathBuf>::new()
148 );
149 assert_eq!(
150 tree.descendent_entries(true, false, Path::new("g"))
151 .map(|entry| entry.path.as_ref())
152 .collect::<Vec<_>>(),
153 vec![Path::new("g"), Path::new("g/h"),]
154 );
155 });
156
157 // Expand gitignored directory.
158 tree.read_with(cx, |tree, _| {
159 tree.as_local()
160 .unwrap()
161 .refresh_entries_for_paths(vec![Path::new("i/j").into()])
162 })
163 .recv()
164 .await;
165
166 tree.read_with(cx, |tree, _| {
167 assert_eq!(
168 tree.descendent_entries(false, false, Path::new("i"))
169 .map(|entry| entry.path.as_ref())
170 .collect::<Vec<_>>(),
171 Vec::<PathBuf>::new()
172 );
173 assert_eq!(
174 tree.descendent_entries(false, true, Path::new("i"))
175 .map(|entry| entry.path.as_ref())
176 .collect::<Vec<_>>(),
177 vec![Path::new("i/j/k")]
178 );
179 assert_eq!(
180 tree.descendent_entries(true, false, Path::new("i"))
181 .map(|entry| entry.path.as_ref())
182 .collect::<Vec<_>>(),
183 vec![Path::new("i"), Path::new("i/l"),]
184 );
185 })
186}
187
188#[gpui::test(iterations = 10)]
189async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
190 let fs = FakeFs::new(cx.background());
191 fs.insert_tree(
192 "/root",
193 json!({
194 "lib": {
195 "a": {
196 "a.txt": ""
197 },
198 "b": {
199 "b.txt": ""
200 }
201 }
202 }),
203 )
204 .await;
205 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
206 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
207
208 let tree = Worktree::local(
209 build_client(cx),
210 Path::new("/root"),
211 true,
212 fs.clone(),
213 Default::default(),
214 &mut cx.to_async(),
215 )
216 .await
217 .unwrap();
218
219 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
220 .await;
221
222 tree.read_with(cx, |tree, _| {
223 assert_eq!(
224 tree.entries(false)
225 .map(|entry| entry.path.as_ref())
226 .collect::<Vec<_>>(),
227 vec![
228 Path::new(""),
229 Path::new("lib"),
230 Path::new("lib/a"),
231 Path::new("lib/a/a.txt"),
232 Path::new("lib/a/lib"),
233 Path::new("lib/b"),
234 Path::new("lib/b/b.txt"),
235 Path::new("lib/b/lib"),
236 ]
237 );
238 });
239
240 fs.rename(
241 Path::new("/root/lib/a/lib"),
242 Path::new("/root/lib/a/lib-2"),
243 Default::default(),
244 )
245 .await
246 .unwrap();
247 executor.run_until_parked();
248 tree.read_with(cx, |tree, _| {
249 assert_eq!(
250 tree.entries(false)
251 .map(|entry| entry.path.as_ref())
252 .collect::<Vec<_>>(),
253 vec![
254 Path::new(""),
255 Path::new("lib"),
256 Path::new("lib/a"),
257 Path::new("lib/a/a.txt"),
258 Path::new("lib/a/lib-2"),
259 Path::new("lib/b"),
260 Path::new("lib/b/b.txt"),
261 Path::new("lib/b/lib"),
262 ]
263 );
264 });
265}
266
267#[gpui::test]
268async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
269 let fs = FakeFs::new(cx.background());
270 fs.insert_tree(
271 "/root",
272 json!({
273 "dir1": {
274 "deps": {
275 // symlinks here
276 },
277 "src": {
278 "a.rs": "",
279 "b.rs": "",
280 },
281 },
282 "dir2": {
283 "src": {
284 "c.rs": "",
285 "d.rs": "",
286 }
287 },
288 "dir3": {
289 "deps": {},
290 "src": {
291 "e.rs": "",
292 "f.rs": "",
293 },
294 }
295 }),
296 )
297 .await;
298
299 // These symlinks point to directories outside of the worktree's root, dir1.
300 fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
301 .await;
302 fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
303 .await;
304
305 let tree = Worktree::local(
306 build_client(cx),
307 Path::new("/root/dir1"),
308 true,
309 fs.clone(),
310 Default::default(),
311 &mut cx.to_async(),
312 )
313 .await
314 .unwrap();
315
316 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
317 .await;
318
319 let tree_updates = Arc::new(Mutex::new(Vec::new()));
320 tree.update(cx, |_, cx| {
321 let tree_updates = tree_updates.clone();
322 cx.subscribe(&tree, move |_, _, event, _| {
323 if let Event::UpdatedEntries(update) = event {
324 tree_updates.lock().extend(
325 update
326 .iter()
327 .map(|(path, _, change)| (path.clone(), *change)),
328 );
329 }
330 })
331 .detach();
332 });
333
334 // The symlinked directories are not scanned by default.
335 tree.read_with(cx, |tree, _| {
336 assert_eq!(
337 tree.entries(true)
338 .map(|entry| (entry.path.as_ref(), entry.is_external))
339 .collect::<Vec<_>>(),
340 vec![
341 (Path::new(""), false),
342 (Path::new("deps"), false),
343 (Path::new("deps/dep-dir2"), true),
344 (Path::new("deps/dep-dir3"), true),
345 (Path::new("src"), false),
346 (Path::new("src/a.rs"), false),
347 (Path::new("src/b.rs"), false),
348 ]
349 );
350
351 assert_eq!(
352 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
353 EntryKind::UnloadedDir
354 );
355 });
356
357 // Expand one of the symlinked directories.
358 tree.read_with(cx, |tree, _| {
359 tree.as_local()
360 .unwrap()
361 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
362 })
363 .recv()
364 .await;
365
366 // The expanded directory's contents are loaded. Subdirectories are
367 // not scanned yet.
368 tree.read_with(cx, |tree, _| {
369 assert_eq!(
370 tree.entries(true)
371 .map(|entry| (entry.path.as_ref(), entry.is_external))
372 .collect::<Vec<_>>(),
373 vec![
374 (Path::new(""), false),
375 (Path::new("deps"), false),
376 (Path::new("deps/dep-dir2"), true),
377 (Path::new("deps/dep-dir3"), true),
378 (Path::new("deps/dep-dir3/deps"), true),
379 (Path::new("deps/dep-dir3/src"), true),
380 (Path::new("src"), false),
381 (Path::new("src/a.rs"), false),
382 (Path::new("src/b.rs"), false),
383 ]
384 );
385 });
386 assert_eq!(
387 mem::take(&mut *tree_updates.lock()),
388 &[
389 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
390 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
391 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
392 ]
393 );
394
395 // Expand a subdirectory of one of the symlinked directories.
396 tree.read_with(cx, |tree, _| {
397 tree.as_local()
398 .unwrap()
399 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
400 })
401 .recv()
402 .await;
403
404 // The expanded subdirectory's contents are loaded.
405 tree.read_with(cx, |tree, _| {
406 assert_eq!(
407 tree.entries(true)
408 .map(|entry| (entry.path.as_ref(), entry.is_external))
409 .collect::<Vec<_>>(),
410 vec![
411 (Path::new(""), false),
412 (Path::new("deps"), false),
413 (Path::new("deps/dep-dir2"), true),
414 (Path::new("deps/dep-dir3"), true),
415 (Path::new("deps/dep-dir3/deps"), true),
416 (Path::new("deps/dep-dir3/src"), true),
417 (Path::new("deps/dep-dir3/src/e.rs"), true),
418 (Path::new("deps/dep-dir3/src/f.rs"), true),
419 (Path::new("src"), false),
420 (Path::new("src/a.rs"), false),
421 (Path::new("src/b.rs"), false),
422 ]
423 );
424 });
425
426 assert_eq!(
427 mem::take(&mut *tree_updates.lock()),
428 &[
429 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
430 (
431 Path::new("deps/dep-dir3/src/e.rs").into(),
432 PathChange::Loaded
433 ),
434 (
435 Path::new("deps/dep-dir3/src/f.rs").into(),
436 PathChange::Loaded
437 )
438 ]
439 );
440}
441
442#[gpui::test]
443async fn test_open_gitignored_files(cx: &mut TestAppContext) {
444 let fs = FakeFs::new(cx.background());
445 fs.insert_tree(
446 "/root",
447 json!({
448 ".gitignore": "node_modules\n",
449 "one": {
450 "node_modules": {
451 "a": {
452 "a1.js": "a1",
453 "a2.js": "a2",
454 },
455 "b": {
456 "b1.js": "b1",
457 "b2.js": "b2",
458 },
459 "c": {
460 "c1.js": "c1",
461 "c2.js": "c2",
462 }
463 },
464 },
465 "two": {
466 "x.js": "",
467 "y.js": "",
468 },
469 }),
470 )
471 .await;
472
473 let tree = Worktree::local(
474 build_client(cx),
475 Path::new("/root"),
476 true,
477 fs.clone(),
478 Default::default(),
479 &mut cx.to_async(),
480 )
481 .await
482 .unwrap();
483
484 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
485 .await;
486
487 tree.read_with(cx, |tree, _| {
488 assert_eq!(
489 tree.entries(true)
490 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
491 .collect::<Vec<_>>(),
492 vec![
493 (Path::new(""), false),
494 (Path::new(".gitignore"), false),
495 (Path::new("one"), false),
496 (Path::new("one/node_modules"), true),
497 (Path::new("two"), false),
498 (Path::new("two/x.js"), false),
499 (Path::new("two/y.js"), false),
500 ]
501 );
502 });
503
504 // Open a file that is nested inside of a gitignored directory that
505 // has not yet been expanded.
506 let prev_read_dir_count = fs.read_dir_call_count();
507 let buffer = tree
508 .update(cx, |tree, cx| {
509 tree.as_local_mut()
510 .unwrap()
511 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
512 })
513 .await
514 .unwrap();
515
516 tree.read_with(cx, |tree, cx| {
517 assert_eq!(
518 tree.entries(true)
519 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
520 .collect::<Vec<_>>(),
521 vec![
522 (Path::new(""), false),
523 (Path::new(".gitignore"), false),
524 (Path::new("one"), false),
525 (Path::new("one/node_modules"), true),
526 (Path::new("one/node_modules/a"), true),
527 (Path::new("one/node_modules/b"), true),
528 (Path::new("one/node_modules/b/b1.js"), true),
529 (Path::new("one/node_modules/b/b2.js"), true),
530 (Path::new("one/node_modules/c"), true),
531 (Path::new("two"), false),
532 (Path::new("two/x.js"), false),
533 (Path::new("two/y.js"), false),
534 ]
535 );
536
537 assert_eq!(
538 buffer.read(cx).file().unwrap().path().as_ref(),
539 Path::new("one/node_modules/b/b1.js")
540 );
541
542 // Only the newly-expanded directories are scanned.
543 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
544 });
545
546 // Open another file in a different subdirectory of the same
547 // gitignored directory.
548 let prev_read_dir_count = fs.read_dir_call_count();
549 let buffer = tree
550 .update(cx, |tree, cx| {
551 tree.as_local_mut()
552 .unwrap()
553 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
554 })
555 .await
556 .unwrap();
557
558 tree.read_with(cx, |tree, cx| {
559 assert_eq!(
560 tree.entries(true)
561 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
562 .collect::<Vec<_>>(),
563 vec![
564 (Path::new(""), false),
565 (Path::new(".gitignore"), false),
566 (Path::new("one"), false),
567 (Path::new("one/node_modules"), true),
568 (Path::new("one/node_modules/a"), true),
569 (Path::new("one/node_modules/a/a1.js"), true),
570 (Path::new("one/node_modules/a/a2.js"), true),
571 (Path::new("one/node_modules/b"), true),
572 (Path::new("one/node_modules/b/b1.js"), true),
573 (Path::new("one/node_modules/b/b2.js"), true),
574 (Path::new("one/node_modules/c"), true),
575 (Path::new("two"), false),
576 (Path::new("two/x.js"), false),
577 (Path::new("two/y.js"), false),
578 ]
579 );
580
581 assert_eq!(
582 buffer.read(cx).file().unwrap().path().as_ref(),
583 Path::new("one/node_modules/a/a2.js")
584 );
585
586 // Only the newly-expanded directory is scanned.
587 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
588 });
589
590 // No work happens when files and directories change within an unloaded directory.
591 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
592 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
593 .await
594 .unwrap();
595 cx.foreground().run_until_parked();
596 assert_eq!(
597 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
598 0
599 );
600}
601
602#[gpui::test]
603async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
604 let fs = FakeFs::new(cx.background());
605 fs.insert_tree(
606 "/root",
607 json!({
608 ".gitignore": "node_modules\n",
609 "a": {
610 "a.js": "",
611 },
612 "b": {
613 "b.js": "",
614 },
615 "node_modules": {
616 "c": {
617 "c.js": "",
618 },
619 "d": {
620 "d.js": "",
621 "e": {
622 "e1.js": "",
623 "e2.js": "",
624 },
625 "f": {
626 "f1.js": "",
627 "f2.js": "",
628 }
629 },
630 },
631 }),
632 )
633 .await;
634
635 let tree = Worktree::local(
636 build_client(cx),
637 Path::new("/root"),
638 true,
639 fs.clone(),
640 Default::default(),
641 &mut cx.to_async(),
642 )
643 .await
644 .unwrap();
645
646 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
647 .await;
648
649 // Open a file within the gitignored directory, forcing some of its
650 // subdirectories to be read, but not all.
651 let read_dir_count_1 = fs.read_dir_call_count();
652 tree.read_with(cx, |tree, _| {
653 tree.as_local()
654 .unwrap()
655 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
656 })
657 .recv()
658 .await;
659
660 // Those subdirectories are now loaded.
661 tree.read_with(cx, |tree, _| {
662 assert_eq!(
663 tree.entries(true)
664 .map(|e| (e.path.as_ref(), e.is_ignored))
665 .collect::<Vec<_>>(),
666 &[
667 (Path::new(""), false),
668 (Path::new(".gitignore"), false),
669 (Path::new("a"), false),
670 (Path::new("a/a.js"), false),
671 (Path::new("b"), false),
672 (Path::new("b/b.js"), false),
673 (Path::new("node_modules"), true),
674 (Path::new("node_modules/c"), true),
675 (Path::new("node_modules/d"), true),
676 (Path::new("node_modules/d/d.js"), true),
677 (Path::new("node_modules/d/e"), true),
678 (Path::new("node_modules/d/f"), true),
679 ]
680 );
681 });
682 let read_dir_count_2 = fs.read_dir_call_count();
683 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
684
685 // Update the gitignore so that node_modules is no longer ignored,
686 // but a subdirectory is ignored
687 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
688 .await
689 .unwrap();
690 cx.foreground().run_until_parked();
691
692 // All of the directories that are no longer ignored are now loaded.
693 tree.read_with(cx, |tree, _| {
694 assert_eq!(
695 tree.entries(true)
696 .map(|e| (e.path.as_ref(), e.is_ignored))
697 .collect::<Vec<_>>(),
698 &[
699 (Path::new(""), false),
700 (Path::new(".gitignore"), false),
701 (Path::new("a"), false),
702 (Path::new("a/a.js"), false),
703 (Path::new("b"), false),
704 (Path::new("b/b.js"), false),
705 // This directory is no longer ignored
706 (Path::new("node_modules"), false),
707 (Path::new("node_modules/c"), false),
708 (Path::new("node_modules/c/c.js"), false),
709 (Path::new("node_modules/d"), false),
710 (Path::new("node_modules/d/d.js"), false),
711 // This subdirectory is now ignored
712 (Path::new("node_modules/d/e"), true),
713 (Path::new("node_modules/d/f"), false),
714 (Path::new("node_modules/d/f/f1.js"), false),
715 (Path::new("node_modules/d/f/f2.js"), false),
716 ]
717 );
718 });
719
720 // Each of the newly-loaded directories is scanned only once.
721 let read_dir_count_3 = fs.read_dir_call_count();
722 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
723}
724
725#[gpui::test(iterations = 10)]
726async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
727 let fs = FakeFs::new(cx.background());
728 fs.insert_tree(
729 "/root",
730 json!({
731 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
732 "tree": {
733 ".git": {},
734 ".gitignore": "ignored-dir\n",
735 "tracked-dir": {
736 "tracked-file1": "",
737 "ancestor-ignored-file1": "",
738 },
739 "ignored-dir": {
740 "ignored-file1": ""
741 }
742 }
743 }),
744 )
745 .await;
746
747 let tree = Worktree::local(
748 build_client(cx),
749 "/root/tree".as_ref(),
750 true,
751 fs.clone(),
752 Default::default(),
753 &mut cx.to_async(),
754 )
755 .await
756 .unwrap();
757 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
758 .await;
759
760 tree.read_with(cx, |tree, _| {
761 tree.as_local()
762 .unwrap()
763 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
764 })
765 .recv()
766 .await;
767
768 cx.read(|cx| {
769 let tree = tree.read(cx);
770 assert!(
771 !tree
772 .entry_for_path("tracked-dir/tracked-file1")
773 .unwrap()
774 .is_ignored
775 );
776 assert!(
777 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
778 .unwrap()
779 .is_ignored
780 );
781 assert!(
782 tree.entry_for_path("ignored-dir/ignored-file1")
783 .unwrap()
784 .is_ignored
785 );
786 });
787
788 fs.create_file(
789 "/root/tree/tracked-dir/tracked-file2".as_ref(),
790 Default::default(),
791 )
792 .await
793 .unwrap();
794 fs.create_file(
795 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
796 Default::default(),
797 )
798 .await
799 .unwrap();
800 fs.create_file(
801 "/root/tree/ignored-dir/ignored-file2".as_ref(),
802 Default::default(),
803 )
804 .await
805 .unwrap();
806
807 cx.foreground().run_until_parked();
808 cx.read(|cx| {
809 let tree = tree.read(cx);
810 assert!(
811 !tree
812 .entry_for_path("tracked-dir/tracked-file2")
813 .unwrap()
814 .is_ignored
815 );
816 assert!(
817 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
818 .unwrap()
819 .is_ignored
820 );
821 assert!(
822 tree.entry_for_path("ignored-dir/ignored-file2")
823 .unwrap()
824 .is_ignored
825 );
826 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
827 });
828}
829
830#[gpui::test]
831async fn test_write_file(cx: &mut TestAppContext) {
832 let dir = temp_tree(json!({
833 ".git": {},
834 ".gitignore": "ignored-dir\n",
835 "tracked-dir": {},
836 "ignored-dir": {}
837 }));
838
839 let tree = Worktree::local(
840 build_client(cx),
841 dir.path(),
842 true,
843 Arc::new(RealFs),
844 Default::default(),
845 &mut cx.to_async(),
846 )
847 .await
848 .unwrap();
849 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
850 .await;
851 tree.flush_fs_events(cx).await;
852
853 tree.update(cx, |tree, cx| {
854 tree.as_local().unwrap().write_file(
855 Path::new("tracked-dir/file.txt"),
856 "hello".into(),
857 Default::default(),
858 cx,
859 )
860 })
861 .await
862 .unwrap();
863 tree.update(cx, |tree, cx| {
864 tree.as_local().unwrap().write_file(
865 Path::new("ignored-dir/file.txt"),
866 "world".into(),
867 Default::default(),
868 cx,
869 )
870 })
871 .await
872 .unwrap();
873
874 tree.read_with(cx, |tree, _| {
875 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
876 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
877 assert!(!tracked.is_ignored);
878 assert!(ignored.is_ignored);
879 });
880}
881
882#[gpui::test]
883async fn test_ignore_exclusions(cx: &mut TestAppContext) {
884 let dir = temp_tree(json!({
885 ".gitignore": "**/target\n/node_modules\n",
886 "target": {
887 "index": "blah2"
888 },
889 "node_modules": {
890 ".DS_Store": "",
891 "prettier": {
892 "package.json": "{}",
893 },
894 },
895 "src": {
896 ".DS_Store": "",
897 "foo": {
898 "foo.rs": "mod another;\n",
899 "another.rs": "// another",
900 },
901 "bar": {
902 "bar.rs": "// bar",
903 },
904 "lib.rs": "mod foo;\nmod bar;\n",
905 },
906 ".DS_Store": "",
907 }));
908 cx.update(|cx| {
909 cx.set_global(SettingsStore::test(cx));
910 Project::init_settings(cx);
911 cx.update_global::<SettingsStore, _, _>(|store, cx| {
912 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
913 project_settings.scan_exclude_files =
914 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
915 });
916 });
917 });
918
919 let tree = Worktree::local(
920 build_client(cx),
921 dir.path(),
922 true,
923 Arc::new(RealFs),
924 Default::default(),
925 &mut cx.to_async(),
926 )
927 .await
928 .unwrap();
929 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
930 .await;
931 tree.flush_fs_events(cx).await;
932 tree.read_with(cx, |tree, _| {
933 check_worktree_entries(
934 tree,
935 &[
936 "src/foo/foo.rs",
937 "src/foo/another.rs",
938 // TODO kb
939 // "node_modules/.DS_Store",
940 // "src/.DS_Store",
941 // ".DS_Store",
942 ],
943 &["target/index", "node_modules/prettier/package.json"],
944 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
945 )
946 });
947
948 cx.update(|cx| {
949 cx.update_global::<SettingsStore, _, _>(|store, cx| {
950 store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
951 project_settings.scan_exclude_files = Some(vec!["**/node_modules/**".to_string()]);
952 });
953 });
954 });
955 tree.flush_fs_events(cx).await;
956 cx.foreground().run_until_parked();
957 tree.read_with(cx, |tree, _| {
958 check_worktree_entries(
959 tree,
960 &[
961 "node_modules/prettier/package.json",
962 "node_modules/.DS_Store",
963 ],
964 &["target/index"],
965 &[
966 ".gitignore",
967 "src/lib.rs",
968 "src/bar/bar.rs",
969 "src/foo/foo.rs",
970 "src/foo/another.rs",
971 "src/.DS_Store",
972 ".DS_Store",
973 ],
974 )
975 });
976}
977
978#[gpui::test(iterations = 30)]
979async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
980 let fs = FakeFs::new(cx.background());
981 fs.insert_tree(
982 "/root",
983 json!({
984 "b": {},
985 "c": {},
986 "d": {},
987 }),
988 )
989 .await;
990
991 let tree = Worktree::local(
992 build_client(cx),
993 "/root".as_ref(),
994 true,
995 fs,
996 Default::default(),
997 &mut cx.to_async(),
998 )
999 .await
1000 .unwrap();
1001
1002 let snapshot1 = tree.update(cx, |tree, cx| {
1003 let tree = tree.as_local_mut().unwrap();
1004 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1005 let _ = tree.observe_updates(0, cx, {
1006 let snapshot = snapshot.clone();
1007 move |update| {
1008 snapshot.lock().apply_remote_update(update).unwrap();
1009 async { true }
1010 }
1011 });
1012 snapshot
1013 });
1014
1015 let entry = tree
1016 .update(cx, |tree, cx| {
1017 tree.as_local_mut()
1018 .unwrap()
1019 .create_entry("a/e".as_ref(), true, cx)
1020 })
1021 .await
1022 .unwrap();
1023 assert!(entry.is_dir());
1024
1025 cx.foreground().run_until_parked();
1026 tree.read_with(cx, |tree, _| {
1027 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
1028 });
1029
1030 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1031 assert_eq!(
1032 snapshot1.lock().entries(true).collect::<Vec<_>>(),
1033 snapshot2.entries(true).collect::<Vec<_>>()
1034 );
1035}
1036
1037#[gpui::test]
1038async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1039 let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1040
1041 let fs_fake = FakeFs::new(cx.background());
1042 fs_fake
1043 .insert_tree(
1044 "/root",
1045 json!({
1046 "a": {},
1047 }),
1048 )
1049 .await;
1050
1051 let tree_fake = Worktree::local(
1052 client_fake,
1053 "/root".as_ref(),
1054 true,
1055 fs_fake,
1056 Default::default(),
1057 &mut cx.to_async(),
1058 )
1059 .await
1060 .unwrap();
1061
1062 let entry = tree_fake
1063 .update(cx, |tree, cx| {
1064 tree.as_local_mut()
1065 .unwrap()
1066 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1067 })
1068 .await
1069 .unwrap();
1070 assert!(entry.is_file());
1071
1072 cx.foreground().run_until_parked();
1073 tree_fake.read_with(cx, |tree, _| {
1074 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1075 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1076 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1077 });
1078
1079 let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
1080
1081 let fs_real = Arc::new(RealFs);
1082 let temp_root = temp_tree(json!({
1083 "a": {}
1084 }));
1085
1086 let tree_real = Worktree::local(
1087 client_real,
1088 temp_root.path(),
1089 true,
1090 fs_real,
1091 Default::default(),
1092 &mut cx.to_async(),
1093 )
1094 .await
1095 .unwrap();
1096
1097 let entry = tree_real
1098 .update(cx, |tree, cx| {
1099 tree.as_local_mut()
1100 .unwrap()
1101 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1102 })
1103 .await
1104 .unwrap();
1105 assert!(entry.is_file());
1106
1107 cx.foreground().run_until_parked();
1108 tree_real.read_with(cx, |tree, _| {
1109 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1110 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1111 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1112 });
1113
1114 // Test smallest change
1115 let entry = tree_real
1116 .update(cx, |tree, cx| {
1117 tree.as_local_mut()
1118 .unwrap()
1119 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
1120 })
1121 .await
1122 .unwrap();
1123 assert!(entry.is_file());
1124
1125 cx.foreground().run_until_parked();
1126 tree_real.read_with(cx, |tree, _| {
1127 assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
1128 });
1129
1130 // Test largest change
1131 let entry = tree_real
1132 .update(cx, |tree, cx| {
1133 tree.as_local_mut()
1134 .unwrap()
1135 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
1136 })
1137 .await
1138 .unwrap();
1139 assert!(entry.is_file());
1140
1141 cx.foreground().run_until_parked();
1142 tree_real.read_with(cx, |tree, _| {
1143 assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
1144 assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
1145 assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
1146 assert!(tree.entry_for_path("d/").unwrap().is_dir());
1147 });
1148}
1149
1150#[gpui::test(iterations = 100)]
1151async fn test_random_worktree_operations_during_initial_scan(
1152 cx: &mut TestAppContext,
1153 mut rng: StdRng,
1154) {
1155 let operations = env::var("OPERATIONS")
1156 .map(|o| o.parse().unwrap())
1157 .unwrap_or(5);
1158 let initial_entries = env::var("INITIAL_ENTRIES")
1159 .map(|o| o.parse().unwrap())
1160 .unwrap_or(20);
1161
1162 let root_dir = Path::new("/test");
1163 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1164 fs.as_fake().insert_tree(root_dir, json!({})).await;
1165 for _ in 0..initial_entries {
1166 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1167 }
1168 log::info!("generated initial tree");
1169
1170 let worktree = Worktree::local(
1171 build_client(cx),
1172 root_dir,
1173 true,
1174 fs.clone(),
1175 Default::default(),
1176 &mut cx.to_async(),
1177 )
1178 .await
1179 .unwrap();
1180
1181 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1182 let updates = Arc::new(Mutex::new(Vec::new()));
1183 worktree.update(cx, |tree, cx| {
1184 check_worktree_change_events(tree, cx);
1185
1186 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1187 let updates = updates.clone();
1188 move |update| {
1189 updates.lock().push(update);
1190 async { true }
1191 }
1192 });
1193 });
1194
1195 for _ in 0..operations {
1196 worktree
1197 .update(cx, |worktree, cx| {
1198 randomly_mutate_worktree(worktree, &mut rng, cx)
1199 })
1200 .await
1201 .log_err();
1202 worktree.read_with(cx, |tree, _| {
1203 tree.as_local().unwrap().snapshot().check_invariants(true)
1204 });
1205
1206 if rng.gen_bool(0.6) {
1207 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1208 }
1209 }
1210
1211 worktree
1212 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1213 .await;
1214
1215 cx.foreground().run_until_parked();
1216
1217 let final_snapshot = worktree.read_with(cx, |tree, _| {
1218 let tree = tree.as_local().unwrap();
1219 let snapshot = tree.snapshot();
1220 snapshot.check_invariants(true);
1221 snapshot
1222 });
1223
1224 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1225 let mut updated_snapshot = snapshot.clone();
1226 for update in updates.lock().iter() {
1227 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1228 updated_snapshot
1229 .apply_remote_update(update.clone())
1230 .unwrap();
1231 }
1232 }
1233
1234 assert_eq!(
1235 updated_snapshot.entries(true).collect::<Vec<_>>(),
1236 final_snapshot.entries(true).collect::<Vec<_>>(),
1237 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1238 );
1239 }
1240}
1241
1242#[gpui::test(iterations = 100)]
1243async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1244 let operations = env::var("OPERATIONS")
1245 .map(|o| o.parse().unwrap())
1246 .unwrap_or(40);
1247 let initial_entries = env::var("INITIAL_ENTRIES")
1248 .map(|o| o.parse().unwrap())
1249 .unwrap_or(20);
1250
1251 let root_dir = Path::new("/test");
1252 let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
1253 fs.as_fake().insert_tree(root_dir, json!({})).await;
1254 for _ in 0..initial_entries {
1255 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1256 }
1257 log::info!("generated initial tree");
1258
1259 let worktree = Worktree::local(
1260 build_client(cx),
1261 root_dir,
1262 true,
1263 fs.clone(),
1264 Default::default(),
1265 &mut cx.to_async(),
1266 )
1267 .await
1268 .unwrap();
1269
1270 let updates = Arc::new(Mutex::new(Vec::new()));
1271 worktree.update(cx, |tree, cx| {
1272 check_worktree_change_events(tree, cx);
1273
1274 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1275 let updates = updates.clone();
1276 move |update| {
1277 updates.lock().push(update);
1278 async { true }
1279 }
1280 });
1281 });
1282
1283 worktree
1284 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1285 .await;
1286
1287 fs.as_fake().pause_events();
1288 let mut snapshots = Vec::new();
1289 let mut mutations_len = operations;
1290 while mutations_len > 1 {
1291 if rng.gen_bool(0.2) {
1292 worktree
1293 .update(cx, |worktree, cx| {
1294 randomly_mutate_worktree(worktree, &mut rng, cx)
1295 })
1296 .await
1297 .log_err();
1298 } else {
1299 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1300 }
1301
1302 let buffered_event_count = fs.as_fake().buffered_event_count();
1303 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1304 let len = rng.gen_range(0..=buffered_event_count);
1305 log::info!("flushing {} events", len);
1306 fs.as_fake().flush_events(len);
1307 } else {
1308 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1309 mutations_len -= 1;
1310 }
1311
1312 cx.foreground().run_until_parked();
1313 if rng.gen_bool(0.2) {
1314 log::info!("storing snapshot {}", snapshots.len());
1315 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1316 snapshots.push(snapshot);
1317 }
1318 }
1319
1320 log::info!("quiescing");
1321 fs.as_fake().flush_events(usize::MAX);
1322 cx.foreground().run_until_parked();
1323
1324 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1325 snapshot.check_invariants(true);
1326 let expanded_paths = snapshot
1327 .expanded_entries()
1328 .map(|e| e.path.clone())
1329 .collect::<Vec<_>>();
1330
1331 {
1332 let new_worktree = Worktree::local(
1333 build_client(cx),
1334 root_dir,
1335 true,
1336 fs.clone(),
1337 Default::default(),
1338 &mut cx.to_async(),
1339 )
1340 .await
1341 .unwrap();
1342 new_worktree
1343 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1344 .await;
1345 new_worktree
1346 .update(cx, |tree, _| {
1347 tree.as_local_mut()
1348 .unwrap()
1349 .refresh_entries_for_paths(expanded_paths)
1350 })
1351 .recv()
1352 .await;
1353 let new_snapshot =
1354 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1355 assert_eq!(
1356 snapshot.entries_without_ids(true),
1357 new_snapshot.entries_without_ids(true)
1358 );
1359 }
1360
1361 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1362 for update in updates.lock().iter() {
1363 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1364 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1365 }
1366 }
1367
1368 assert_eq!(
1369 prev_snapshot
1370 .entries(true)
1371 .map(ignore_pending_dir)
1372 .collect::<Vec<_>>(),
1373 snapshot
1374 .entries(true)
1375 .map(ignore_pending_dir)
1376 .collect::<Vec<_>>(),
1377 "wrong updates after snapshot {i}: {updates:#?}",
1378 );
1379 }
1380
1381 fn ignore_pending_dir(entry: &Entry) -> Entry {
1382 let mut entry = entry.clone();
1383 if entry.kind.is_dir() {
1384 entry.kind = EntryKind::Dir
1385 }
1386 entry
1387 }
1388}
1389
1390// The worktree's `UpdatedEntries` event can be used to follow along with
1391// all changes to the worktree's snapshot.
1392fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1393 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1394 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1395 if let Event::UpdatedEntries(changes) = event {
1396 for (path, _, change_type) in changes.iter() {
1397 let entry = tree.entry_for_path(&path).cloned();
1398 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1399 Ok(ix) | Err(ix) => ix,
1400 };
1401 match change_type {
1402 PathChange::Added => entries.insert(ix, entry.unwrap()),
1403 PathChange::Removed => drop(entries.remove(ix)),
1404 PathChange::Updated => {
1405 let entry = entry.unwrap();
1406 let existing_entry = entries.get_mut(ix).unwrap();
1407 assert_eq!(existing_entry.path, entry.path);
1408 *existing_entry = entry;
1409 }
1410 PathChange::AddedOrUpdated | PathChange::Loaded => {
1411 let entry = entry.unwrap();
1412 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1413 *entries.get_mut(ix).unwrap() = entry;
1414 } else {
1415 entries.insert(ix, entry);
1416 }
1417 }
1418 }
1419 }
1420
1421 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1422 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1423 }
1424 })
1425 .detach();
1426}
1427
1428fn randomly_mutate_worktree(
1429 worktree: &mut Worktree,
1430 rng: &mut impl Rng,
1431 cx: &mut ModelContext<Worktree>,
1432) -> Task<Result<()>> {
1433 log::info!("mutating worktree");
1434 let worktree = worktree.as_local_mut().unwrap();
1435 let snapshot = worktree.snapshot();
1436 let entry = snapshot.entries(false).choose(rng).unwrap();
1437
1438 match rng.gen_range(0_u32..100) {
1439 0..=33 if entry.path.as_ref() != Path::new("") => {
1440 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1441 worktree.delete_entry(entry.id, cx).unwrap()
1442 }
1443 ..=66 if entry.path.as_ref() != Path::new("") => {
1444 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1445 let new_parent_path = if other_entry.is_dir() {
1446 other_entry.path.clone()
1447 } else {
1448 other_entry.path.parent().unwrap().into()
1449 };
1450 let mut new_path = new_parent_path.join(random_filename(rng));
1451 if new_path.starts_with(&entry.path) {
1452 new_path = random_filename(rng).into();
1453 }
1454
1455 log::info!(
1456 "renaming entry {:?} ({}) to {:?}",
1457 entry.path,
1458 entry.id.0,
1459 new_path
1460 );
1461 let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
1462 cx.foreground().spawn(async move {
1463 task.await?;
1464 Ok(())
1465 })
1466 }
1467 _ => {
1468 let task = if entry.is_dir() {
1469 let child_path = entry.path.join(random_filename(rng));
1470 let is_dir = rng.gen_bool(0.3);
1471 log::info!(
1472 "creating {} at {:?}",
1473 if is_dir { "dir" } else { "file" },
1474 child_path,
1475 );
1476 worktree.create_entry(child_path, is_dir, cx)
1477 } else {
1478 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1479 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
1480 };
1481 cx.foreground().spawn(async move {
1482 task.await?;
1483 Ok(())
1484 })
1485 }
1486 }
1487}
1488
1489async fn randomly_mutate_fs(
1490 fs: &Arc<dyn Fs>,
1491 root_path: &Path,
1492 insertion_probability: f64,
1493 rng: &mut impl Rng,
1494) {
1495 log::info!("mutating fs");
1496 let mut files = Vec::new();
1497 let mut dirs = Vec::new();
1498 for path in fs.as_fake().paths(false) {
1499 if path.starts_with(root_path) {
1500 if fs.is_file(&path).await {
1501 files.push(path);
1502 } else {
1503 dirs.push(path);
1504 }
1505 }
1506 }
1507
1508 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1509 let path = dirs.choose(rng).unwrap();
1510 let new_path = path.join(random_filename(rng));
1511
1512 if rng.gen() {
1513 log::info!(
1514 "creating dir {:?}",
1515 new_path.strip_prefix(root_path).unwrap()
1516 );
1517 fs.create_dir(&new_path).await.unwrap();
1518 } else {
1519 log::info!(
1520 "creating file {:?}",
1521 new_path.strip_prefix(root_path).unwrap()
1522 );
1523 fs.create_file(&new_path, Default::default()).await.unwrap();
1524 }
1525 } else if rng.gen_bool(0.05) {
1526 let ignore_dir_path = dirs.choose(rng).unwrap();
1527 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1528
1529 let subdirs = dirs
1530 .iter()
1531 .filter(|d| d.starts_with(&ignore_dir_path))
1532 .cloned()
1533 .collect::<Vec<_>>();
1534 let subfiles = files
1535 .iter()
1536 .filter(|d| d.starts_with(&ignore_dir_path))
1537 .cloned()
1538 .collect::<Vec<_>>();
1539 let files_to_ignore = {
1540 let len = rng.gen_range(0..=subfiles.len());
1541 subfiles.choose_multiple(rng, len)
1542 };
1543 let dirs_to_ignore = {
1544 let len = rng.gen_range(0..subdirs.len());
1545 subdirs.choose_multiple(rng, len)
1546 };
1547
1548 let mut ignore_contents = String::new();
1549 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1550 writeln!(
1551 ignore_contents,
1552 "{}",
1553 path_to_ignore
1554 .strip_prefix(&ignore_dir_path)
1555 .unwrap()
1556 .to_str()
1557 .unwrap()
1558 )
1559 .unwrap();
1560 }
1561 log::info!(
1562 "creating gitignore {:?} with contents:\n{}",
1563 ignore_path.strip_prefix(&root_path).unwrap(),
1564 ignore_contents
1565 );
1566 fs.save(
1567 &ignore_path,
1568 &ignore_contents.as_str().into(),
1569 Default::default(),
1570 )
1571 .await
1572 .unwrap();
1573 } else {
1574 let old_path = {
1575 let file_path = files.choose(rng);
1576 let dir_path = dirs[1..].choose(rng);
1577 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1578 };
1579
1580 let is_rename = rng.gen();
1581 if is_rename {
1582 let new_path_parent = dirs
1583 .iter()
1584 .filter(|d| !d.starts_with(old_path))
1585 .choose(rng)
1586 .unwrap();
1587
1588 let overwrite_existing_dir =
1589 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1590 let new_path = if overwrite_existing_dir {
1591 fs.remove_dir(
1592 &new_path_parent,
1593 RemoveOptions {
1594 recursive: true,
1595 ignore_if_not_exists: true,
1596 },
1597 )
1598 .await
1599 .unwrap();
1600 new_path_parent.to_path_buf()
1601 } else {
1602 new_path_parent.join(random_filename(rng))
1603 };
1604
1605 log::info!(
1606 "renaming {:?} to {}{:?}",
1607 old_path.strip_prefix(&root_path).unwrap(),
1608 if overwrite_existing_dir {
1609 "overwrite "
1610 } else {
1611 ""
1612 },
1613 new_path.strip_prefix(&root_path).unwrap()
1614 );
1615 fs.rename(
1616 &old_path,
1617 &new_path,
1618 fs::RenameOptions {
1619 overwrite: true,
1620 ignore_if_exists: true,
1621 },
1622 )
1623 .await
1624 .unwrap();
1625 } else if fs.is_file(&old_path).await {
1626 log::info!(
1627 "deleting file {:?}",
1628 old_path.strip_prefix(&root_path).unwrap()
1629 );
1630 fs.remove_file(old_path, Default::default()).await.unwrap();
1631 } else {
1632 log::info!(
1633 "deleting dir {:?}",
1634 old_path.strip_prefix(&root_path).unwrap()
1635 );
1636 fs.remove_dir(
1637 &old_path,
1638 RemoveOptions {
1639 recursive: true,
1640 ignore_if_not_exists: true,
1641 },
1642 )
1643 .await
1644 .unwrap();
1645 }
1646 }
1647}
1648
1649fn random_filename(rng: &mut impl Rng) -> String {
1650 (0..6)
1651 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1652 .map(char::from)
1653 .collect()
1654}
1655
1656#[gpui::test]
1657async fn test_rename_work_directory(cx: &mut TestAppContext) {
1658 let root = temp_tree(json!({
1659 "projects": {
1660 "project1": {
1661 "a": "",
1662 "b": "",
1663 }
1664 },
1665
1666 }));
1667 let root_path = root.path();
1668
1669 let tree = Worktree::local(
1670 build_client(cx),
1671 root_path,
1672 true,
1673 Arc::new(RealFs),
1674 Default::default(),
1675 &mut cx.to_async(),
1676 )
1677 .await
1678 .unwrap();
1679
1680 let repo = git_init(&root_path.join("projects/project1"));
1681 git_add("a", &repo);
1682 git_commit("init", &repo);
1683 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1684
1685 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1686 .await;
1687
1688 tree.flush_fs_events(cx).await;
1689
1690 cx.read(|cx| {
1691 let tree = tree.read(cx);
1692 let (work_dir, _) = tree.repositories().next().unwrap();
1693 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1694 assert_eq!(
1695 tree.status_for_file(Path::new("projects/project1/a")),
1696 Some(GitFileStatus::Modified)
1697 );
1698 assert_eq!(
1699 tree.status_for_file(Path::new("projects/project1/b")),
1700 Some(GitFileStatus::Added)
1701 );
1702 });
1703
1704 std::fs::rename(
1705 root_path.join("projects/project1"),
1706 root_path.join("projects/project2"),
1707 )
1708 .ok();
1709 tree.flush_fs_events(cx).await;
1710
1711 cx.read(|cx| {
1712 let tree = tree.read(cx);
1713 let (work_dir, _) = tree.repositories().next().unwrap();
1714 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1715 assert_eq!(
1716 tree.status_for_file(Path::new("projects/project2/a")),
1717 Some(GitFileStatus::Modified)
1718 );
1719 assert_eq!(
1720 tree.status_for_file(Path::new("projects/project2/b")),
1721 Some(GitFileStatus::Added)
1722 );
1723 });
1724}
1725
1726#[gpui::test]
1727async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1728 let root = temp_tree(json!({
1729 "c.txt": "",
1730 "dir1": {
1731 ".git": {},
1732 "deps": {
1733 "dep1": {
1734 ".git": {},
1735 "src": {
1736 "a.txt": ""
1737 }
1738 }
1739 },
1740 "src": {
1741 "b.txt": ""
1742 }
1743 },
1744 }));
1745
1746 let tree = Worktree::local(
1747 build_client(cx),
1748 root.path(),
1749 true,
1750 Arc::new(RealFs),
1751 Default::default(),
1752 &mut cx.to_async(),
1753 )
1754 .await
1755 .unwrap();
1756
1757 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1758 .await;
1759 tree.flush_fs_events(cx).await;
1760
1761 tree.read_with(cx, |tree, _cx| {
1762 let tree = tree.as_local().unwrap();
1763
1764 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1765
1766 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1767 assert_eq!(
1768 entry
1769 .work_directory(tree)
1770 .map(|directory| directory.as_ref().to_owned()),
1771 Some(Path::new("dir1").to_owned())
1772 );
1773
1774 let entry = tree
1775 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1776 .unwrap();
1777 assert_eq!(
1778 entry
1779 .work_directory(tree)
1780 .map(|directory| directory.as_ref().to_owned()),
1781 Some(Path::new("dir1/deps/dep1").to_owned())
1782 );
1783
1784 let entries = tree.files(false, 0);
1785
1786 let paths_with_repos = tree
1787 .entries_with_repositories(entries)
1788 .map(|(entry, repo)| {
1789 (
1790 entry.path.as_ref(),
1791 repo.and_then(|repo| {
1792 repo.work_directory(&tree)
1793 .map(|work_directory| work_directory.0.to_path_buf())
1794 }),
1795 )
1796 })
1797 .collect::<Vec<_>>();
1798
1799 assert_eq!(
1800 paths_with_repos,
1801 &[
1802 (Path::new("c.txt"), None),
1803 (
1804 Path::new("dir1/deps/dep1/src/a.txt"),
1805 Some(Path::new("dir1/deps/dep1").into())
1806 ),
1807 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1808 ]
1809 );
1810 });
1811
1812 let repo_update_events = Arc::new(Mutex::new(vec![]));
1813 tree.update(cx, |_, cx| {
1814 let repo_update_events = repo_update_events.clone();
1815 cx.subscribe(&tree, move |_, _, event, _| {
1816 if let Event::UpdatedGitRepositories(update) = event {
1817 repo_update_events.lock().push(update.clone());
1818 }
1819 })
1820 .detach();
1821 });
1822
1823 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
1824 tree.flush_fs_events(cx).await;
1825
1826 assert_eq!(
1827 repo_update_events.lock()[0]
1828 .iter()
1829 .map(|e| e.0.clone())
1830 .collect::<Vec<Arc<Path>>>(),
1831 vec![Path::new("dir1").into()]
1832 );
1833
1834 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
1835 tree.flush_fs_events(cx).await;
1836
1837 tree.read_with(cx, |tree, _cx| {
1838 let tree = tree.as_local().unwrap();
1839
1840 assert!(tree
1841 .repository_for_path("dir1/src/b.txt".as_ref())
1842 .is_none());
1843 });
1844}
1845
1846#[gpui::test]
1847async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
1848 const IGNORE_RULE: &'static str = "**/target";
1849
1850 let root = temp_tree(json!({
1851 "project": {
1852 "a.txt": "a",
1853 "b.txt": "bb",
1854 "c": {
1855 "d": {
1856 "e.txt": "eee"
1857 }
1858 },
1859 "f.txt": "ffff",
1860 "target": {
1861 "build_file": "???"
1862 },
1863 ".gitignore": IGNORE_RULE
1864 },
1865
1866 }));
1867
1868 const A_TXT: &'static str = "a.txt";
1869 const B_TXT: &'static str = "b.txt";
1870 const E_TXT: &'static str = "c/d/e.txt";
1871 const F_TXT: &'static str = "f.txt";
1872 const DOTGITIGNORE: &'static str = ".gitignore";
1873 const BUILD_FILE: &'static str = "target/build_file";
1874 let project_path = Path::new("project");
1875
1876 // Set up git repository before creating the worktree.
1877 let work_dir = root.path().join("project");
1878 let mut repo = git_init(work_dir.as_path());
1879 repo.add_ignore_rule(IGNORE_RULE).unwrap();
1880 git_add(A_TXT, &repo);
1881 git_add(E_TXT, &repo);
1882 git_add(DOTGITIGNORE, &repo);
1883 git_commit("Initial commit", &repo);
1884
1885 let tree = Worktree::local(
1886 build_client(cx),
1887 root.path(),
1888 true,
1889 Arc::new(RealFs),
1890 Default::default(),
1891 &mut cx.to_async(),
1892 )
1893 .await
1894 .unwrap();
1895
1896 tree.flush_fs_events(cx).await;
1897 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1898 .await;
1899 deterministic.run_until_parked();
1900
1901 // Check that the right git state is observed on startup
1902 tree.read_with(cx, |tree, _cx| {
1903 let snapshot = tree.snapshot();
1904 assert_eq!(snapshot.repositories().count(), 1);
1905 let (dir, _) = snapshot.repositories().next().unwrap();
1906 assert_eq!(dir.as_ref(), Path::new("project"));
1907
1908 assert_eq!(
1909 snapshot.status_for_file(project_path.join(B_TXT)),
1910 Some(GitFileStatus::Added)
1911 );
1912 assert_eq!(
1913 snapshot.status_for_file(project_path.join(F_TXT)),
1914 Some(GitFileStatus::Added)
1915 );
1916 });
1917
1918 // Modify a file in the working copy.
1919 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
1920 tree.flush_fs_events(cx).await;
1921 deterministic.run_until_parked();
1922
1923 // The worktree detects that the file's git status has changed.
1924 tree.read_with(cx, |tree, _cx| {
1925 let snapshot = tree.snapshot();
1926 assert_eq!(
1927 snapshot.status_for_file(project_path.join(A_TXT)),
1928 Some(GitFileStatus::Modified)
1929 );
1930 });
1931
1932 // Create a commit in the git repository.
1933 git_add(A_TXT, &repo);
1934 git_add(B_TXT, &repo);
1935 git_commit("Committing modified and added", &repo);
1936 tree.flush_fs_events(cx).await;
1937 deterministic.run_until_parked();
1938
1939 // The worktree detects that the files' git status have changed.
1940 tree.read_with(cx, |tree, _cx| {
1941 let snapshot = tree.snapshot();
1942 assert_eq!(
1943 snapshot.status_for_file(project_path.join(F_TXT)),
1944 Some(GitFileStatus::Added)
1945 );
1946 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
1947 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1948 });
1949
1950 // Modify files in the working copy and perform git operations on other files.
1951 git_reset(0, &repo);
1952 git_remove_index(Path::new(B_TXT), &repo);
1953 git_stash(&mut repo);
1954 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
1955 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
1956 tree.flush_fs_events(cx).await;
1957 deterministic.run_until_parked();
1958
1959 // Check that more complex repo changes are tracked
1960 tree.read_with(cx, |tree, _cx| {
1961 let snapshot = tree.snapshot();
1962
1963 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
1964 assert_eq!(
1965 snapshot.status_for_file(project_path.join(B_TXT)),
1966 Some(GitFileStatus::Added)
1967 );
1968 assert_eq!(
1969 snapshot.status_for_file(project_path.join(E_TXT)),
1970 Some(GitFileStatus::Modified)
1971 );
1972 });
1973
1974 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
1975 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
1976 std::fs::write(
1977 work_dir.join(DOTGITIGNORE),
1978 [IGNORE_RULE, "f.txt"].join("\n"),
1979 )
1980 .unwrap();
1981
1982 git_add(Path::new(DOTGITIGNORE), &repo);
1983 git_commit("Committing modified git ignore", &repo);
1984
1985 tree.flush_fs_events(cx).await;
1986 deterministic.run_until_parked();
1987
1988 let mut renamed_dir_name = "first_directory/second_directory";
1989 const RENAMED_FILE: &'static str = "rf.txt";
1990
1991 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
1992 std::fs::write(
1993 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
1994 "new-contents",
1995 )
1996 .unwrap();
1997
1998 tree.flush_fs_events(cx).await;
1999 deterministic.run_until_parked();
2000
2001 tree.read_with(cx, |tree, _cx| {
2002 let snapshot = tree.snapshot();
2003 assert_eq!(
2004 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
2005 Some(GitFileStatus::Added)
2006 );
2007 });
2008
2009 renamed_dir_name = "new_first_directory/second_directory";
2010
2011 std::fs::rename(
2012 work_dir.join("first_directory"),
2013 work_dir.join("new_first_directory"),
2014 )
2015 .unwrap();
2016
2017 tree.flush_fs_events(cx).await;
2018 deterministic.run_until_parked();
2019
2020 tree.read_with(cx, |tree, _cx| {
2021 let snapshot = tree.snapshot();
2022
2023 assert_eq!(
2024 snapshot.status_for_file(
2025 project_path
2026 .join(Path::new(renamed_dir_name))
2027 .join(RENAMED_FILE)
2028 ),
2029 Some(GitFileStatus::Added)
2030 );
2031 });
2032}
2033
2034#[gpui::test]
2035async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
2036 let fs = FakeFs::new(cx.background());
2037 fs.insert_tree(
2038 "/root",
2039 json!({
2040 ".git": {},
2041 "a": {
2042 "b": {
2043 "c1.txt": "",
2044 "c2.txt": "",
2045 },
2046 "d": {
2047 "e1.txt": "",
2048 "e2.txt": "",
2049 "e3.txt": "",
2050 }
2051 },
2052 "f": {
2053 "no-status.txt": ""
2054 },
2055 "g": {
2056 "h1.txt": "",
2057 "h2.txt": ""
2058 },
2059
2060 }),
2061 )
2062 .await;
2063
2064 fs.set_status_for_repo_via_git_operation(
2065 &Path::new("/root/.git"),
2066 &[
2067 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
2068 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
2069 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
2070 ],
2071 );
2072
2073 let tree = Worktree::local(
2074 build_client(cx),
2075 Path::new("/root"),
2076 true,
2077 fs.clone(),
2078 Default::default(),
2079 &mut cx.to_async(),
2080 )
2081 .await
2082 .unwrap();
2083
2084 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2085 .await;
2086
2087 cx.foreground().run_until_parked();
2088 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
2089
2090 check_propagated_statuses(
2091 &snapshot,
2092 &[
2093 (Path::new(""), Some(GitFileStatus::Conflict)),
2094 (Path::new("a"), Some(GitFileStatus::Modified)),
2095 (Path::new("a/b"), Some(GitFileStatus::Added)),
2096 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2097 (Path::new("a/b/c2.txt"), None),
2098 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2099 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2100 (Path::new("f"), None),
2101 (Path::new("f/no-status.txt"), None),
2102 (Path::new("g"), Some(GitFileStatus::Conflict)),
2103 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
2104 ],
2105 );
2106
2107 check_propagated_statuses(
2108 &snapshot,
2109 &[
2110 (Path::new("a/b"), Some(GitFileStatus::Added)),
2111 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2112 (Path::new("a/b/c2.txt"), None),
2113 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2114 (Path::new("a/d/e1.txt"), None),
2115 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2116 (Path::new("f"), None),
2117 (Path::new("f/no-status.txt"), None),
2118 (Path::new("g"), Some(GitFileStatus::Conflict)),
2119 ],
2120 );
2121
2122 check_propagated_statuses(
2123 &snapshot,
2124 &[
2125 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2126 (Path::new("a/b/c2.txt"), None),
2127 (Path::new("a/d/e1.txt"), None),
2128 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2129 (Path::new("f/no-status.txt"), None),
2130 ],
2131 );
2132
2133 #[track_caller]
2134 fn check_propagated_statuses(
2135 snapshot: &Snapshot,
2136 expected_statuses: &[(&Path, Option<GitFileStatus>)],
2137 ) {
2138 let mut entries = expected_statuses
2139 .iter()
2140 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
2141 .collect::<Vec<_>>();
2142 snapshot.propagate_git_statuses(&mut entries);
2143 assert_eq!(
2144 entries
2145 .iter()
2146 .map(|e| (e.path.as_ref(), e.git_status))
2147 .collect::<Vec<_>>(),
2148 expected_statuses
2149 );
2150 }
2151}
2152
2153fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
2154 let http_client = FakeHttpClient::with_404_response();
2155 cx.read(|cx| Client::new(http_client, cx))
2156}
2157
2158#[track_caller]
2159fn git_init(path: &Path) -> git2::Repository {
2160 git2::Repository::init(path).expect("Failed to initialize git repository")
2161}
2162
2163#[track_caller]
2164fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
2165 let path = path.as_ref();
2166 let mut index = repo.index().expect("Failed to get index");
2167 index.add_path(path).expect("Failed to add a.txt");
2168 index.write().expect("Failed to write index");
2169}
2170
2171#[track_caller]
2172fn git_remove_index(path: &Path, repo: &git2::Repository) {
2173 let mut index = repo.index().expect("Failed to get index");
2174 index.remove_path(path).expect("Failed to add a.txt");
2175 index.write().expect("Failed to write index");
2176}
2177
2178#[track_caller]
2179fn git_commit(msg: &'static str, repo: &git2::Repository) {
2180 use git2::Signature;
2181
2182 let signature = Signature::now("test", "test@zed.dev").unwrap();
2183 let oid = repo.index().unwrap().write_tree().unwrap();
2184 let tree = repo.find_tree(oid).unwrap();
2185 if let Some(head) = repo.head().ok() {
2186 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
2187
2188 let parent_commit = parent_obj.as_commit().unwrap();
2189
2190 repo.commit(
2191 Some("HEAD"),
2192 &signature,
2193 &signature,
2194 msg,
2195 &tree,
2196 &[parent_commit],
2197 )
2198 .expect("Failed to commit with parent");
2199 } else {
2200 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
2201 .expect("Failed to commit");
2202 }
2203}
2204
2205#[track_caller]
2206fn git_stash(repo: &mut git2::Repository) {
2207 use git2::Signature;
2208
2209 let signature = Signature::now("test", "test@zed.dev").unwrap();
2210 repo.stash_save(&signature, "N/A", None)
2211 .expect("Failed to stash");
2212}
2213
2214#[track_caller]
2215fn git_reset(offset: usize, repo: &git2::Repository) {
2216 let head = repo.head().expect("Couldn't get repo head");
2217 let object = head.peel(git2::ObjectType::Commit).unwrap();
2218 let commit = object.as_commit().unwrap();
2219 let new_head = commit
2220 .parents()
2221 .inspect(|parnet| {
2222 parnet.message();
2223 })
2224 .skip(offset)
2225 .next()
2226 .expect("Not enough history");
2227 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2228 .expect("Could not reset");
2229}
2230
2231#[allow(dead_code)]
2232#[track_caller]
2233fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2234 repo.statuses(None)
2235 .unwrap()
2236 .iter()
2237 .map(|status| (status.path().unwrap().to_string(), status.status()))
2238 .collect()
2239}
2240
2241#[track_caller]
2242fn check_worktree_entries(
2243 tree: &Worktree,
2244 expected_excluded_paths: &[&str],
2245 expected_ignored_paths: &[&str],
2246 expected_tracked_paths: &[&str],
2247) {
2248 for path in expected_excluded_paths {
2249 let entry = tree.entry_for_path(path);
2250 assert!(
2251 entry.is_none(),
2252 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2253 );
2254 }
2255 for path in expected_ignored_paths {
2256 let entry = tree.entry_for_path(path).unwrap();
2257 assert!(
2258 entry.is_ignored,
2259 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2260 );
2261 }
2262 for path in expected_tracked_paths {
2263 let entry = tree.entry_for_path(path).unwrap();
2264 assert!(
2265 !entry.is_ignored,
2266 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2267 );
2268 }
2269}