1use crate::{
2 worktree_settings::WorktreeSettings, Entry, EntryKind, Event, PathChange, Snapshot, Worktree,
3 WorktreeModelHandle,
4};
5use anyhow::Result;
6use fs::{FakeFs, Fs, RealFs, RemoveOptions};
7use git::{repository::GitFileStatus, GITIGNORE};
8use gpui::{BorrowAppContext, ModelContext, Task, TestAppContext};
9use parking_lot::Mutex;
10use postage::stream::Stream;
11use pretty_assertions::assert_eq;
12use rand::prelude::*;
13use serde_json::json;
14use settings::{Settings, SettingsStore};
15use std::{env, fmt::Write, mem, path::Path, sync::Arc};
16use util::{test::temp_tree, ResultExt};
17
18#[gpui::test]
19async fn test_traversal(cx: &mut TestAppContext) {
20 init_test(cx);
21 let fs = FakeFs::new(cx.background_executor.clone());
22 fs.insert_tree(
23 "/root",
24 json!({
25 ".gitignore": "a/b\n",
26 "a": {
27 "b": "",
28 "c": "",
29 }
30 }),
31 )
32 .await;
33
34 let tree = Worktree::local(
35 Path::new("/root"),
36 true,
37 fs,
38 Default::default(),
39 &mut cx.to_async(),
40 )
41 .await
42 .unwrap();
43 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
44 .await;
45
46 tree.read_with(cx, |tree, _| {
47 assert_eq!(
48 tree.entries(false, 0)
49 .map(|entry| entry.path.as_ref())
50 .collect::<Vec<_>>(),
51 vec![
52 Path::new(""),
53 Path::new(".gitignore"),
54 Path::new("a"),
55 Path::new("a/c"),
56 ]
57 );
58 assert_eq!(
59 tree.entries(true, 0)
60 .map(|entry| entry.path.as_ref())
61 .collect::<Vec<_>>(),
62 vec![
63 Path::new(""),
64 Path::new(".gitignore"),
65 Path::new("a"),
66 Path::new("a/b"),
67 Path::new("a/c"),
68 ]
69 );
70 })
71}
72
73#[gpui::test(iterations = 10)]
74async fn test_circular_symlinks(cx: &mut TestAppContext) {
75 init_test(cx);
76 let fs = FakeFs::new(cx.background_executor.clone());
77 fs.insert_tree(
78 "/root",
79 json!({
80 "lib": {
81 "a": {
82 "a.txt": ""
83 },
84 "b": {
85 "b.txt": ""
86 }
87 }
88 }),
89 )
90 .await;
91 fs.create_symlink("/root/lib/a/lib".as_ref(), "..".into())
92 .await
93 .unwrap();
94 fs.create_symlink("/root/lib/b/lib".as_ref(), "..".into())
95 .await
96 .unwrap();
97
98 let tree = Worktree::local(
99 Path::new("/root"),
100 true,
101 fs.clone(),
102 Default::default(),
103 &mut cx.to_async(),
104 )
105 .await
106 .unwrap();
107
108 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
109 .await;
110
111 tree.read_with(cx, |tree, _| {
112 assert_eq!(
113 tree.entries(false, 0)
114 .map(|entry| entry.path.as_ref())
115 .collect::<Vec<_>>(),
116 vec![
117 Path::new(""),
118 Path::new("lib"),
119 Path::new("lib/a"),
120 Path::new("lib/a/a.txt"),
121 Path::new("lib/a/lib"),
122 Path::new("lib/b"),
123 Path::new("lib/b/b.txt"),
124 Path::new("lib/b/lib"),
125 ]
126 );
127 });
128
129 fs.rename(
130 Path::new("/root/lib/a/lib"),
131 Path::new("/root/lib/a/lib-2"),
132 Default::default(),
133 )
134 .await
135 .unwrap();
136 cx.executor().run_until_parked();
137 tree.read_with(cx, |tree, _| {
138 assert_eq!(
139 tree.entries(false, 0)
140 .map(|entry| entry.path.as_ref())
141 .collect::<Vec<_>>(),
142 vec![
143 Path::new(""),
144 Path::new("lib"),
145 Path::new("lib/a"),
146 Path::new("lib/a/a.txt"),
147 Path::new("lib/a/lib-2"),
148 Path::new("lib/b"),
149 Path::new("lib/b/b.txt"),
150 Path::new("lib/b/lib"),
151 ]
152 );
153 });
154}
155
156#[gpui::test]
157async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
158 init_test(cx);
159 let fs = FakeFs::new(cx.background_executor.clone());
160 fs.insert_tree(
161 "/root",
162 json!({
163 "dir1": {
164 "deps": {
165 // symlinks here
166 },
167 "src": {
168 "a.rs": "",
169 "b.rs": "",
170 },
171 },
172 "dir2": {
173 "src": {
174 "c.rs": "",
175 "d.rs": "",
176 }
177 },
178 "dir3": {
179 "deps": {},
180 "src": {
181 "e.rs": "",
182 "f.rs": "",
183 },
184 }
185 }),
186 )
187 .await;
188
189 // These symlinks point to directories outside of the worktree's root, dir1.
190 fs.create_symlink("/root/dir1/deps/dep-dir2".as_ref(), "../../dir2".into())
191 .await
192 .unwrap();
193 fs.create_symlink("/root/dir1/deps/dep-dir3".as_ref(), "../../dir3".into())
194 .await
195 .unwrap();
196
197 let tree = Worktree::local(
198 Path::new("/root/dir1"),
199 true,
200 fs.clone(),
201 Default::default(),
202 &mut cx.to_async(),
203 )
204 .await
205 .unwrap();
206
207 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
208 .await;
209
210 let tree_updates = Arc::new(Mutex::new(Vec::new()));
211 tree.update(cx, |_, cx| {
212 let tree_updates = tree_updates.clone();
213 cx.subscribe(&tree, move |_, _, event, _| {
214 if let Event::UpdatedEntries(update) = event {
215 tree_updates.lock().extend(
216 update
217 .iter()
218 .map(|(path, _, change)| (path.clone(), *change)),
219 );
220 }
221 })
222 .detach();
223 });
224
225 // The symlinked directories are not scanned by default.
226 tree.read_with(cx, |tree, _| {
227 assert_eq!(
228 tree.entries(true, 0)
229 .map(|entry| (entry.path.as_ref(), entry.is_external))
230 .collect::<Vec<_>>(),
231 vec![
232 (Path::new(""), false),
233 (Path::new("deps"), false),
234 (Path::new("deps/dep-dir2"), true),
235 (Path::new("deps/dep-dir3"), true),
236 (Path::new("src"), false),
237 (Path::new("src/a.rs"), false),
238 (Path::new("src/b.rs"), false),
239 ]
240 );
241
242 assert_eq!(
243 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
244 EntryKind::UnloadedDir
245 );
246 });
247
248 // Expand one of the symlinked directories.
249 tree.read_with(cx, |tree, _| {
250 tree.as_local()
251 .unwrap()
252 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
253 })
254 .recv()
255 .await;
256
257 // The expanded directory's contents are loaded. Subdirectories are
258 // not scanned yet.
259 tree.read_with(cx, |tree, _| {
260 assert_eq!(
261 tree.entries(true, 0)
262 .map(|entry| (entry.path.as_ref(), entry.is_external))
263 .collect::<Vec<_>>(),
264 vec![
265 (Path::new(""), false),
266 (Path::new("deps"), false),
267 (Path::new("deps/dep-dir2"), true),
268 (Path::new("deps/dep-dir3"), true),
269 (Path::new("deps/dep-dir3/deps"), true),
270 (Path::new("deps/dep-dir3/src"), true),
271 (Path::new("src"), false),
272 (Path::new("src/a.rs"), false),
273 (Path::new("src/b.rs"), false),
274 ]
275 );
276 });
277 assert_eq!(
278 mem::take(&mut *tree_updates.lock()),
279 &[
280 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
281 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
282 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
283 ]
284 );
285
286 // Expand a subdirectory of one of the symlinked directories.
287 tree.read_with(cx, |tree, _| {
288 tree.as_local()
289 .unwrap()
290 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
291 })
292 .recv()
293 .await;
294
295 // The expanded subdirectory's contents are loaded.
296 tree.read_with(cx, |tree, _| {
297 assert_eq!(
298 tree.entries(true, 0)
299 .map(|entry| (entry.path.as_ref(), entry.is_external))
300 .collect::<Vec<_>>(),
301 vec![
302 (Path::new(""), false),
303 (Path::new("deps"), false),
304 (Path::new("deps/dep-dir2"), true),
305 (Path::new("deps/dep-dir3"), true),
306 (Path::new("deps/dep-dir3/deps"), true),
307 (Path::new("deps/dep-dir3/src"), true),
308 (Path::new("deps/dep-dir3/src/e.rs"), true),
309 (Path::new("deps/dep-dir3/src/f.rs"), true),
310 (Path::new("src"), false),
311 (Path::new("src/a.rs"), false),
312 (Path::new("src/b.rs"), false),
313 ]
314 );
315 });
316
317 assert_eq!(
318 mem::take(&mut *tree_updates.lock()),
319 &[
320 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
321 (
322 Path::new("deps/dep-dir3/src/e.rs").into(),
323 PathChange::Loaded
324 ),
325 (
326 Path::new("deps/dep-dir3/src/f.rs").into(),
327 PathChange::Loaded
328 )
329 ]
330 );
331}
332
333#[cfg(target_os = "macos")]
334#[gpui::test]
335async fn test_renaming_case_only(cx: &mut TestAppContext) {
336 cx.executor().allow_parking();
337 init_test(cx);
338
339 const OLD_NAME: &str = "aaa.rs";
340 const NEW_NAME: &str = "AAA.rs";
341
342 let fs = Arc::new(RealFs::default());
343 let temp_root = temp_tree(json!({
344 OLD_NAME: "",
345 }));
346
347 let tree = Worktree::local(
348 temp_root.path(),
349 true,
350 fs.clone(),
351 Default::default(),
352 &mut cx.to_async(),
353 )
354 .await
355 .unwrap();
356
357 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
358 .await;
359 tree.read_with(cx, |tree, _| {
360 assert_eq!(
361 tree.entries(true, 0)
362 .map(|entry| entry.path.as_ref())
363 .collect::<Vec<_>>(),
364 vec![Path::new(""), Path::new(OLD_NAME)]
365 );
366 });
367
368 fs.rename(
369 &temp_root.path().join(OLD_NAME),
370 &temp_root.path().join(NEW_NAME),
371 fs::RenameOptions {
372 overwrite: true,
373 ignore_if_exists: true,
374 },
375 )
376 .await
377 .unwrap();
378
379 tree.flush_fs_events(cx).await;
380
381 tree.read_with(cx, |tree, _| {
382 assert_eq!(
383 tree.entries(true, 0)
384 .map(|entry| entry.path.as_ref())
385 .collect::<Vec<_>>(),
386 vec![Path::new(""), Path::new(NEW_NAME)]
387 );
388 });
389}
390
391#[gpui::test]
392async fn test_open_gitignored_files(cx: &mut TestAppContext) {
393 init_test(cx);
394 let fs = FakeFs::new(cx.background_executor.clone());
395 fs.insert_tree(
396 "/root",
397 json!({
398 ".gitignore": "node_modules\n",
399 "one": {
400 "node_modules": {
401 "a": {
402 "a1.js": "a1",
403 "a2.js": "a2",
404 },
405 "b": {
406 "b1.js": "b1",
407 "b2.js": "b2",
408 },
409 "c": {
410 "c1.js": "c1",
411 "c2.js": "c2",
412 }
413 },
414 },
415 "two": {
416 "x.js": "",
417 "y.js": "",
418 },
419 }),
420 )
421 .await;
422
423 let tree = Worktree::local(
424 Path::new("/root"),
425 true,
426 fs.clone(),
427 Default::default(),
428 &mut cx.to_async(),
429 )
430 .await
431 .unwrap();
432
433 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
434 .await;
435
436 tree.read_with(cx, |tree, _| {
437 assert_eq!(
438 tree.entries(true, 0)
439 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
440 .collect::<Vec<_>>(),
441 vec![
442 (Path::new(""), false),
443 (Path::new(".gitignore"), false),
444 (Path::new("one"), false),
445 (Path::new("one/node_modules"), true),
446 (Path::new("two"), false),
447 (Path::new("two/x.js"), false),
448 (Path::new("two/y.js"), false),
449 ]
450 );
451 });
452
453 // Open a file that is nested inside of a gitignored directory that
454 // has not yet been expanded.
455 let prev_read_dir_count = fs.read_dir_call_count();
456 let loaded = tree
457 .update(cx, |tree, cx| {
458 tree.load_file("one/node_modules/b/b1.js".as_ref(), cx)
459 })
460 .await
461 .unwrap();
462
463 tree.read_with(cx, |tree, _| {
464 assert_eq!(
465 tree.entries(true, 0)
466 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
467 .collect::<Vec<_>>(),
468 vec![
469 (Path::new(""), false),
470 (Path::new(".gitignore"), false),
471 (Path::new("one"), false),
472 (Path::new("one/node_modules"), true),
473 (Path::new("one/node_modules/a"), true),
474 (Path::new("one/node_modules/b"), true),
475 (Path::new("one/node_modules/b/b1.js"), true),
476 (Path::new("one/node_modules/b/b2.js"), true),
477 (Path::new("one/node_modules/c"), true),
478 (Path::new("two"), false),
479 (Path::new("two/x.js"), false),
480 (Path::new("two/y.js"), false),
481 ]
482 );
483
484 assert_eq!(
485 loaded.file.path.as_ref(),
486 Path::new("one/node_modules/b/b1.js")
487 );
488
489 // Only the newly-expanded directories are scanned.
490 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
491 });
492
493 // Open another file in a different subdirectory of the same
494 // gitignored directory.
495 let prev_read_dir_count = fs.read_dir_call_count();
496 let loaded = tree
497 .update(cx, |tree, cx| {
498 tree.load_file("one/node_modules/a/a2.js".as_ref(), cx)
499 })
500 .await
501 .unwrap();
502
503 tree.read_with(cx, |tree, _| {
504 assert_eq!(
505 tree.entries(true, 0)
506 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
507 .collect::<Vec<_>>(),
508 vec![
509 (Path::new(""), false),
510 (Path::new(".gitignore"), false),
511 (Path::new("one"), false),
512 (Path::new("one/node_modules"), true),
513 (Path::new("one/node_modules/a"), true),
514 (Path::new("one/node_modules/a/a1.js"), true),
515 (Path::new("one/node_modules/a/a2.js"), true),
516 (Path::new("one/node_modules/b"), true),
517 (Path::new("one/node_modules/b/b1.js"), true),
518 (Path::new("one/node_modules/b/b2.js"), true),
519 (Path::new("one/node_modules/c"), true),
520 (Path::new("two"), false),
521 (Path::new("two/x.js"), false),
522 (Path::new("two/y.js"), false),
523 ]
524 );
525
526 assert_eq!(
527 loaded.file.path.as_ref(),
528 Path::new("one/node_modules/a/a2.js")
529 );
530
531 // Only the newly-expanded directory is scanned.
532 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
533 });
534
535 // No work happens when files and directories change within an unloaded directory.
536 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
537 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
538 .await
539 .unwrap();
540 cx.executor().run_until_parked();
541 assert_eq!(
542 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
543 0
544 );
545}
546
547#[gpui::test]
548async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
549 init_test(cx);
550 let fs = FakeFs::new(cx.background_executor.clone());
551 fs.insert_tree(
552 "/root",
553 json!({
554 ".gitignore": "node_modules\n",
555 "a": {
556 "a.js": "",
557 },
558 "b": {
559 "b.js": "",
560 },
561 "node_modules": {
562 "c": {
563 "c.js": "",
564 },
565 "d": {
566 "d.js": "",
567 "e": {
568 "e1.js": "",
569 "e2.js": "",
570 },
571 "f": {
572 "f1.js": "",
573 "f2.js": "",
574 }
575 },
576 },
577 }),
578 )
579 .await;
580
581 let tree = Worktree::local(
582 Path::new("/root"),
583 true,
584 fs.clone(),
585 Default::default(),
586 &mut cx.to_async(),
587 )
588 .await
589 .unwrap();
590
591 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
592 .await;
593
594 // Open a file within the gitignored directory, forcing some of its
595 // subdirectories to be read, but not all.
596 let read_dir_count_1 = fs.read_dir_call_count();
597 tree.read_with(cx, |tree, _| {
598 tree.as_local()
599 .unwrap()
600 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
601 })
602 .recv()
603 .await;
604
605 // Those subdirectories are now loaded.
606 tree.read_with(cx, |tree, _| {
607 assert_eq!(
608 tree.entries(true, 0)
609 .map(|e| (e.path.as_ref(), e.is_ignored))
610 .collect::<Vec<_>>(),
611 &[
612 (Path::new(""), false),
613 (Path::new(".gitignore"), false),
614 (Path::new("a"), false),
615 (Path::new("a/a.js"), false),
616 (Path::new("b"), false),
617 (Path::new("b/b.js"), false),
618 (Path::new("node_modules"), true),
619 (Path::new("node_modules/c"), true),
620 (Path::new("node_modules/d"), true),
621 (Path::new("node_modules/d/d.js"), true),
622 (Path::new("node_modules/d/e"), true),
623 (Path::new("node_modules/d/f"), true),
624 ]
625 );
626 });
627 let read_dir_count_2 = fs.read_dir_call_count();
628 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
629
630 // Update the gitignore so that node_modules is no longer ignored,
631 // but a subdirectory is ignored
632 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
633 .await
634 .unwrap();
635 cx.executor().run_until_parked();
636
637 // All of the directories that are no longer ignored are now loaded.
638 tree.read_with(cx, |tree, _| {
639 assert_eq!(
640 tree.entries(true, 0)
641 .map(|e| (e.path.as_ref(), e.is_ignored))
642 .collect::<Vec<_>>(),
643 &[
644 (Path::new(""), false),
645 (Path::new(".gitignore"), false),
646 (Path::new("a"), false),
647 (Path::new("a/a.js"), false),
648 (Path::new("b"), false),
649 (Path::new("b/b.js"), false),
650 // This directory is no longer ignored
651 (Path::new("node_modules"), false),
652 (Path::new("node_modules/c"), false),
653 (Path::new("node_modules/c/c.js"), false),
654 (Path::new("node_modules/d"), false),
655 (Path::new("node_modules/d/d.js"), false),
656 // This subdirectory is now ignored
657 (Path::new("node_modules/d/e"), true),
658 (Path::new("node_modules/d/f"), false),
659 (Path::new("node_modules/d/f/f1.js"), false),
660 (Path::new("node_modules/d/f/f2.js"), false),
661 ]
662 );
663 });
664
665 // Each of the newly-loaded directories is scanned only once.
666 let read_dir_count_3 = fs.read_dir_call_count();
667 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
668}
669
670#[gpui::test(iterations = 10)]
671async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
672 init_test(cx);
673 cx.update(|cx| {
674 cx.update_global::<SettingsStore, _>(|store, cx| {
675 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
676 project_settings.file_scan_exclusions = Some(Vec::new());
677 });
678 });
679 });
680 let fs = FakeFs::new(cx.background_executor.clone());
681 fs.insert_tree(
682 "/root",
683 json!({
684 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
685 "tree": {
686 ".git": {},
687 ".gitignore": "ignored-dir\n",
688 "tracked-dir": {
689 "tracked-file1": "",
690 "ancestor-ignored-file1": "",
691 },
692 "ignored-dir": {
693 "ignored-file1": ""
694 }
695 }
696 }),
697 )
698 .await;
699
700 let tree = Worktree::local(
701 "/root/tree".as_ref(),
702 true,
703 fs.clone(),
704 Default::default(),
705 &mut cx.to_async(),
706 )
707 .await
708 .unwrap();
709 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
710 .await;
711
712 tree.read_with(cx, |tree, _| {
713 tree.as_local()
714 .unwrap()
715 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
716 })
717 .recv()
718 .await;
719
720 cx.read(|cx| {
721 let tree = tree.read(cx);
722 assert_entry_git_state(tree, "tracked-dir/tracked-file1", None, false);
723 assert_entry_git_state(tree, "tracked-dir/ancestor-ignored-file1", None, true);
724 assert_entry_git_state(tree, "ignored-dir/ignored-file1", None, true);
725 });
726
727 fs.set_status_for_repo_via_working_copy_change(
728 &Path::new("/root/tree/.git"),
729 &[(Path::new("tracked-dir/tracked-file2"), GitFileStatus::Added)],
730 );
731
732 fs.create_file(
733 "/root/tree/tracked-dir/tracked-file2".as_ref(),
734 Default::default(),
735 )
736 .await
737 .unwrap();
738 fs.create_file(
739 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
740 Default::default(),
741 )
742 .await
743 .unwrap();
744 fs.create_file(
745 "/root/tree/ignored-dir/ignored-file2".as_ref(),
746 Default::default(),
747 )
748 .await
749 .unwrap();
750
751 cx.executor().run_until_parked();
752 cx.read(|cx| {
753 let tree = tree.read(cx);
754 assert_entry_git_state(
755 tree,
756 "tracked-dir/tracked-file2",
757 Some(GitFileStatus::Added),
758 false,
759 );
760 assert_entry_git_state(tree, "tracked-dir/ancestor-ignored-file2", None, true);
761 assert_entry_git_state(tree, "ignored-dir/ignored-file2", None, true);
762 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
763 });
764}
765
766#[gpui::test]
767async fn test_update_gitignore(cx: &mut TestAppContext) {
768 init_test(cx);
769 let fs = FakeFs::new(cx.background_executor.clone());
770 fs.insert_tree(
771 "/root",
772 json!({
773 ".git": {},
774 ".gitignore": "*.txt\n",
775 "a.xml": "<a></a>",
776 "b.txt": "Some text"
777 }),
778 )
779 .await;
780
781 let tree = Worktree::local(
782 "/root".as_ref(),
783 true,
784 fs.clone(),
785 Default::default(),
786 &mut cx.to_async(),
787 )
788 .await
789 .unwrap();
790 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
791 .await;
792
793 tree.read_with(cx, |tree, _| {
794 tree.as_local()
795 .unwrap()
796 .refresh_entries_for_paths(vec![Path::new("").into()])
797 })
798 .recv()
799 .await;
800
801 cx.read(|cx| {
802 let tree = tree.read(cx);
803 assert_entry_git_state(tree, "a.xml", None, false);
804 assert_entry_git_state(tree, "b.txt", None, true);
805 });
806
807 fs.atomic_write("/root/.gitignore".into(), "*.xml".into())
808 .await
809 .unwrap();
810
811 fs.set_status_for_repo_via_working_copy_change(
812 &Path::new("/root/.git"),
813 &[(Path::new("b.txt"), GitFileStatus::Added)],
814 );
815
816 cx.executor().run_until_parked();
817 cx.read(|cx| {
818 let tree = tree.read(cx);
819 assert_entry_git_state(tree, "a.xml", None, true);
820 assert_entry_git_state(tree, "b.txt", Some(GitFileStatus::Added), false);
821 });
822}
823
824#[gpui::test]
825async fn test_write_file(cx: &mut TestAppContext) {
826 init_test(cx);
827 cx.executor().allow_parking();
828 let dir = temp_tree(json!({
829 ".git": {},
830 ".gitignore": "ignored-dir\n",
831 "tracked-dir": {},
832 "ignored-dir": {}
833 }));
834
835 let tree = Worktree::local(
836 dir.path(),
837 true,
838 Arc::new(RealFs::default()),
839 Default::default(),
840 &mut cx.to_async(),
841 )
842 .await
843 .unwrap();
844 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
845 .await;
846 tree.flush_fs_events(cx).await;
847
848 tree.update(cx, |tree, cx| {
849 tree.write_file(
850 Path::new("tracked-dir/file.txt"),
851 "hello".into(),
852 Default::default(),
853 cx,
854 )
855 })
856 .await
857 .unwrap();
858 tree.update(cx, |tree, cx| {
859 tree.write_file(
860 Path::new("ignored-dir/file.txt"),
861 "world".into(),
862 Default::default(),
863 cx,
864 )
865 })
866 .await
867 .unwrap();
868
869 tree.read_with(cx, |tree, _| {
870 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
871 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
872 assert!(!tracked.is_ignored);
873 assert!(ignored.is_ignored);
874 });
875}
876
877#[gpui::test]
878async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
879 init_test(cx);
880 cx.executor().allow_parking();
881 let dir = temp_tree(json!({
882 ".gitignore": "**/target\n/node_modules\n",
883 "target": {
884 "index": "blah2"
885 },
886 "node_modules": {
887 ".DS_Store": "",
888 "prettier": {
889 "package.json": "{}",
890 },
891 },
892 "src": {
893 ".DS_Store": "",
894 "foo": {
895 "foo.rs": "mod another;\n",
896 "another.rs": "// another",
897 },
898 "bar": {
899 "bar.rs": "// bar",
900 },
901 "lib.rs": "mod foo;\nmod bar;\n",
902 },
903 ".DS_Store": "",
904 }));
905 cx.update(|cx| {
906 cx.update_global::<SettingsStore, _>(|store, cx| {
907 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
908 project_settings.file_scan_exclusions =
909 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
910 });
911 });
912 });
913
914 let tree = Worktree::local(
915 dir.path(),
916 true,
917 Arc::new(RealFs::default()),
918 Default::default(),
919 &mut cx.to_async(),
920 )
921 .await
922 .unwrap();
923 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
924 .await;
925 tree.flush_fs_events(cx).await;
926 tree.read_with(cx, |tree, _| {
927 check_worktree_entries(
928 tree,
929 &[
930 "src/foo/foo.rs",
931 "src/foo/another.rs",
932 "node_modules/.DS_Store",
933 "src/.DS_Store",
934 ".DS_Store",
935 ],
936 &["target", "node_modules"],
937 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
938 )
939 });
940
941 cx.update(|cx| {
942 cx.update_global::<SettingsStore, _>(|store, cx| {
943 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
944 project_settings.file_scan_exclusions =
945 Some(vec!["**/node_modules/**".to_string()]);
946 });
947 });
948 });
949 tree.flush_fs_events(cx).await;
950 cx.executor().run_until_parked();
951 tree.read_with(cx, |tree, _| {
952 check_worktree_entries(
953 tree,
954 &[
955 "node_modules/prettier/package.json",
956 "node_modules/.DS_Store",
957 "node_modules",
958 ],
959 &["target"],
960 &[
961 ".gitignore",
962 "src/lib.rs",
963 "src/bar/bar.rs",
964 "src/foo/foo.rs",
965 "src/foo/another.rs",
966 "src/.DS_Store",
967 ".DS_Store",
968 ],
969 )
970 });
971}
972
973#[gpui::test]
974async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
975 init_test(cx);
976 cx.executor().allow_parking();
977 let dir = temp_tree(json!({
978 ".git": {
979 "HEAD": "ref: refs/heads/main\n",
980 "foo": "bar",
981 },
982 ".gitignore": "**/target\n/node_modules\ntest_output\n",
983 "target": {
984 "index": "blah2"
985 },
986 "node_modules": {
987 ".DS_Store": "",
988 "prettier": {
989 "package.json": "{}",
990 },
991 },
992 "src": {
993 ".DS_Store": "",
994 "foo": {
995 "foo.rs": "mod another;\n",
996 "another.rs": "// another",
997 },
998 "bar": {
999 "bar.rs": "// bar",
1000 },
1001 "lib.rs": "mod foo;\nmod bar;\n",
1002 },
1003 ".DS_Store": "",
1004 }));
1005 cx.update(|cx| {
1006 cx.update_global::<SettingsStore, _>(|store, cx| {
1007 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
1008 project_settings.file_scan_exclusions = Some(vec![
1009 "**/.git".to_string(),
1010 "node_modules/".to_string(),
1011 "build_output".to_string(),
1012 ]);
1013 });
1014 });
1015 });
1016
1017 let tree = Worktree::local(
1018 dir.path(),
1019 true,
1020 Arc::new(RealFs::default()),
1021 Default::default(),
1022 &mut cx.to_async(),
1023 )
1024 .await
1025 .unwrap();
1026 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1027 .await;
1028 tree.flush_fs_events(cx).await;
1029 tree.read_with(cx, |tree, _| {
1030 check_worktree_entries(
1031 tree,
1032 &[
1033 ".git/HEAD",
1034 ".git/foo",
1035 "node_modules",
1036 "node_modules/.DS_Store",
1037 "node_modules/prettier",
1038 "node_modules/prettier/package.json",
1039 ],
1040 &["target"],
1041 &[
1042 ".DS_Store",
1043 "src/.DS_Store",
1044 "src/lib.rs",
1045 "src/foo/foo.rs",
1046 "src/foo/another.rs",
1047 "src/bar/bar.rs",
1048 ".gitignore",
1049 ],
1050 )
1051 });
1052
1053 let new_excluded_dir = dir.path().join("build_output");
1054 let new_ignored_dir = dir.path().join("test_output");
1055 std::fs::create_dir_all(&new_excluded_dir)
1056 .unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
1057 std::fs::create_dir_all(&new_ignored_dir)
1058 .unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
1059 let node_modules_dir = dir.path().join("node_modules");
1060 let dot_git_dir = dir.path().join(".git");
1061 let src_dir = dir.path().join("src");
1062 for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
1063 assert!(
1064 existing_dir.is_dir(),
1065 "Expect {existing_dir:?} to be present in the FS already"
1066 );
1067 }
1068
1069 for directory_for_new_file in [
1070 new_excluded_dir,
1071 new_ignored_dir,
1072 node_modules_dir,
1073 dot_git_dir,
1074 src_dir,
1075 ] {
1076 std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
1077 .unwrap_or_else(|e| {
1078 panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
1079 });
1080 }
1081 tree.flush_fs_events(cx).await;
1082
1083 tree.read_with(cx, |tree, _| {
1084 check_worktree_entries(
1085 tree,
1086 &[
1087 ".git/HEAD",
1088 ".git/foo",
1089 ".git/new_file",
1090 "node_modules",
1091 "node_modules/.DS_Store",
1092 "node_modules/prettier",
1093 "node_modules/prettier/package.json",
1094 "node_modules/new_file",
1095 "build_output",
1096 "build_output/new_file",
1097 "test_output/new_file",
1098 ],
1099 &["target", "test_output"],
1100 &[
1101 ".DS_Store",
1102 "src/.DS_Store",
1103 "src/lib.rs",
1104 "src/foo/foo.rs",
1105 "src/foo/another.rs",
1106 "src/bar/bar.rs",
1107 "src/new_file",
1108 ".gitignore",
1109 ],
1110 )
1111 });
1112}
1113
1114#[gpui::test]
1115async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
1116 init_test(cx);
1117 cx.executor().allow_parking();
1118 let dir = temp_tree(json!({
1119 ".git": {
1120 "HEAD": "ref: refs/heads/main\n",
1121 "foo": "foo contents",
1122 },
1123 }));
1124 let dot_git_worktree_dir = dir.path().join(".git");
1125
1126 let tree = Worktree::local(
1127 dot_git_worktree_dir.clone(),
1128 true,
1129 Arc::new(RealFs::default()),
1130 Default::default(),
1131 &mut cx.to_async(),
1132 )
1133 .await
1134 .unwrap();
1135 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1136 .await;
1137 tree.flush_fs_events(cx).await;
1138 tree.read_with(cx, |tree, _| {
1139 check_worktree_entries(tree, &[], &["HEAD", "foo"], &[])
1140 });
1141
1142 std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents")
1143 .unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}"));
1144 tree.flush_fs_events(cx).await;
1145 tree.read_with(cx, |tree, _| {
1146 check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[])
1147 });
1148}
1149
1150#[gpui::test(iterations = 30)]
1151async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
1152 init_test(cx);
1153 let fs = FakeFs::new(cx.background_executor.clone());
1154 fs.insert_tree(
1155 "/root",
1156 json!({
1157 "b": {},
1158 "c": {},
1159 "d": {},
1160 }),
1161 )
1162 .await;
1163
1164 let tree = Worktree::local(
1165 "/root".as_ref(),
1166 true,
1167 fs,
1168 Default::default(),
1169 &mut cx.to_async(),
1170 )
1171 .await
1172 .unwrap();
1173
1174 let snapshot1 = tree.update(cx, |tree, cx| {
1175 let tree = tree.as_local_mut().unwrap();
1176 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1177 tree.observe_updates(0, cx, {
1178 let snapshot = snapshot.clone();
1179 move |update| {
1180 snapshot.lock().apply_remote_update(update).unwrap();
1181 async { true }
1182 }
1183 });
1184 snapshot
1185 });
1186
1187 let entry = tree
1188 .update(cx, |tree, cx| {
1189 tree.as_local_mut()
1190 .unwrap()
1191 .create_entry("a/e".as_ref(), true, cx)
1192 })
1193 .await
1194 .unwrap()
1195 .to_included()
1196 .unwrap();
1197 assert!(entry.is_dir());
1198
1199 cx.executor().run_until_parked();
1200 tree.read_with(cx, |tree, _| {
1201 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
1202 });
1203
1204 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1205 assert_eq!(
1206 snapshot1.lock().entries(true, 0).collect::<Vec<_>>(),
1207 snapshot2.entries(true, 0).collect::<Vec<_>>()
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1213 init_test(cx);
1214 cx.executor().allow_parking();
1215
1216 let fs_fake = FakeFs::new(cx.background_executor.clone());
1217 fs_fake
1218 .insert_tree(
1219 "/root",
1220 json!({
1221 "a": {},
1222 }),
1223 )
1224 .await;
1225
1226 let tree_fake = Worktree::local(
1227 "/root".as_ref(),
1228 true,
1229 fs_fake,
1230 Default::default(),
1231 &mut cx.to_async(),
1232 )
1233 .await
1234 .unwrap();
1235
1236 let entry = tree_fake
1237 .update(cx, |tree, cx| {
1238 tree.as_local_mut()
1239 .unwrap()
1240 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1241 })
1242 .await
1243 .unwrap()
1244 .to_included()
1245 .unwrap();
1246 assert!(entry.is_file());
1247
1248 cx.executor().run_until_parked();
1249 tree_fake.read_with(cx, |tree, _| {
1250 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1251 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1252 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1253 });
1254
1255 let fs_real = Arc::new(RealFs::default());
1256 let temp_root = temp_tree(json!({
1257 "a": {}
1258 }));
1259
1260 let tree_real = Worktree::local(
1261 temp_root.path(),
1262 true,
1263 fs_real,
1264 Default::default(),
1265 &mut cx.to_async(),
1266 )
1267 .await
1268 .unwrap();
1269
1270 let entry = tree_real
1271 .update(cx, |tree, cx| {
1272 tree.as_local_mut()
1273 .unwrap()
1274 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1275 })
1276 .await
1277 .unwrap()
1278 .to_included()
1279 .unwrap();
1280 assert!(entry.is_file());
1281
1282 cx.executor().run_until_parked();
1283 tree_real.read_with(cx, |tree, _| {
1284 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1285 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1286 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1287 });
1288
1289 // Test smallest change
1290 let entry = tree_real
1291 .update(cx, |tree, cx| {
1292 tree.as_local_mut()
1293 .unwrap()
1294 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
1295 })
1296 .await
1297 .unwrap()
1298 .to_included()
1299 .unwrap();
1300 assert!(entry.is_file());
1301
1302 cx.executor().run_until_parked();
1303 tree_real.read_with(cx, |tree, _| {
1304 assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
1305 });
1306
1307 // Test largest change
1308 let entry = tree_real
1309 .update(cx, |tree, cx| {
1310 tree.as_local_mut()
1311 .unwrap()
1312 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
1313 })
1314 .await
1315 .unwrap()
1316 .to_included()
1317 .unwrap();
1318 assert!(entry.is_file());
1319
1320 cx.executor().run_until_parked();
1321 tree_real.read_with(cx, |tree, _| {
1322 assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
1323 assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
1324 assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
1325 assert!(tree.entry_for_path("d/").unwrap().is_dir());
1326 });
1327}
1328
1329#[gpui::test(iterations = 100)]
1330async fn test_random_worktree_operations_during_initial_scan(
1331 cx: &mut TestAppContext,
1332 mut rng: StdRng,
1333) {
1334 init_test(cx);
1335 let operations = env::var("OPERATIONS")
1336 .map(|o| o.parse().unwrap())
1337 .unwrap_or(5);
1338 let initial_entries = env::var("INITIAL_ENTRIES")
1339 .map(|o| o.parse().unwrap())
1340 .unwrap_or(20);
1341
1342 let root_dir = Path::new("/test");
1343 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1344 fs.as_fake().insert_tree(root_dir, json!({})).await;
1345 for _ in 0..initial_entries {
1346 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1347 }
1348 log::info!("generated initial tree");
1349
1350 let worktree = Worktree::local(
1351 root_dir,
1352 true,
1353 fs.clone(),
1354 Default::default(),
1355 &mut cx.to_async(),
1356 )
1357 .await
1358 .unwrap();
1359
1360 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1361 let updates = Arc::new(Mutex::new(Vec::new()));
1362 worktree.update(cx, |tree, cx| {
1363 check_worktree_change_events(tree, cx);
1364
1365 tree.as_local_mut().unwrap().observe_updates(0, cx, {
1366 let updates = updates.clone();
1367 move |update| {
1368 updates.lock().push(update);
1369 async { true }
1370 }
1371 });
1372 });
1373
1374 for _ in 0..operations {
1375 worktree
1376 .update(cx, |worktree, cx| {
1377 randomly_mutate_worktree(worktree, &mut rng, cx)
1378 })
1379 .await
1380 .log_err();
1381 worktree.read_with(cx, |tree, _| {
1382 tree.as_local().unwrap().snapshot().check_invariants(true)
1383 });
1384
1385 if rng.gen_bool(0.6) {
1386 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1387 }
1388 }
1389
1390 worktree
1391 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1392 .await;
1393
1394 cx.executor().run_until_parked();
1395
1396 let final_snapshot = worktree.read_with(cx, |tree, _| {
1397 let tree = tree.as_local().unwrap();
1398 let snapshot = tree.snapshot();
1399 snapshot.check_invariants(true);
1400 snapshot
1401 });
1402
1403 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1404 let mut updated_snapshot = snapshot.clone();
1405 for update in updates.lock().iter() {
1406 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1407 updated_snapshot
1408 .apply_remote_update(update.clone())
1409 .unwrap();
1410 }
1411 }
1412
1413 assert_eq!(
1414 updated_snapshot.entries(true, 0).collect::<Vec<_>>(),
1415 final_snapshot.entries(true, 0).collect::<Vec<_>>(),
1416 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1417 );
1418 }
1419}
1420
1421#[gpui::test(iterations = 100)]
1422async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1423 init_test(cx);
1424 let operations = env::var("OPERATIONS")
1425 .map(|o| o.parse().unwrap())
1426 .unwrap_or(40);
1427 let initial_entries = env::var("INITIAL_ENTRIES")
1428 .map(|o| o.parse().unwrap())
1429 .unwrap_or(20);
1430
1431 let root_dir = Path::new("/test");
1432 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1433 fs.as_fake().insert_tree(root_dir, json!({})).await;
1434 for _ in 0..initial_entries {
1435 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1436 }
1437 log::info!("generated initial tree");
1438
1439 let worktree = Worktree::local(
1440 root_dir,
1441 true,
1442 fs.clone(),
1443 Default::default(),
1444 &mut cx.to_async(),
1445 )
1446 .await
1447 .unwrap();
1448
1449 let updates = Arc::new(Mutex::new(Vec::new()));
1450 worktree.update(cx, |tree, cx| {
1451 check_worktree_change_events(tree, cx);
1452
1453 tree.as_local_mut().unwrap().observe_updates(0, cx, {
1454 let updates = updates.clone();
1455 move |update| {
1456 updates.lock().push(update);
1457 async { true }
1458 }
1459 });
1460 });
1461
1462 worktree
1463 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1464 .await;
1465
1466 fs.as_fake().pause_events();
1467 let mut snapshots = Vec::new();
1468 let mut mutations_len = operations;
1469 while mutations_len > 1 {
1470 if rng.gen_bool(0.2) {
1471 worktree
1472 .update(cx, |worktree, cx| {
1473 randomly_mutate_worktree(worktree, &mut rng, cx)
1474 })
1475 .await
1476 .log_err();
1477 } else {
1478 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1479 }
1480
1481 let buffered_event_count = fs.as_fake().buffered_event_count();
1482 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1483 let len = rng.gen_range(0..=buffered_event_count);
1484 log::info!("flushing {} events", len);
1485 fs.as_fake().flush_events(len);
1486 } else {
1487 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1488 mutations_len -= 1;
1489 }
1490
1491 cx.executor().run_until_parked();
1492 if rng.gen_bool(0.2) {
1493 log::info!("storing snapshot {}", snapshots.len());
1494 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1495 snapshots.push(snapshot);
1496 }
1497 }
1498
1499 log::info!("quiescing");
1500 fs.as_fake().flush_events(usize::MAX);
1501 cx.executor().run_until_parked();
1502
1503 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1504 snapshot.check_invariants(true);
1505 let expanded_paths = snapshot
1506 .expanded_entries()
1507 .map(|e| e.path.clone())
1508 .collect::<Vec<_>>();
1509
1510 {
1511 let new_worktree = Worktree::local(
1512 root_dir,
1513 true,
1514 fs.clone(),
1515 Default::default(),
1516 &mut cx.to_async(),
1517 )
1518 .await
1519 .unwrap();
1520 new_worktree
1521 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1522 .await;
1523 new_worktree
1524 .update(cx, |tree, _| {
1525 tree.as_local_mut()
1526 .unwrap()
1527 .refresh_entries_for_paths(expanded_paths)
1528 })
1529 .recv()
1530 .await;
1531 let new_snapshot =
1532 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1533 assert_eq!(
1534 snapshot.entries_without_ids(true),
1535 new_snapshot.entries_without_ids(true)
1536 );
1537 }
1538
1539 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1540 for update in updates.lock().iter() {
1541 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1542 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1543 }
1544 }
1545
1546 assert_eq!(
1547 prev_snapshot
1548 .entries(true, 0)
1549 .map(ignore_pending_dir)
1550 .collect::<Vec<_>>(),
1551 snapshot
1552 .entries(true, 0)
1553 .map(ignore_pending_dir)
1554 .collect::<Vec<_>>(),
1555 "wrong updates after snapshot {i}: {updates:#?}",
1556 );
1557 }
1558
1559 fn ignore_pending_dir(entry: &Entry) -> Entry {
1560 let mut entry = entry.clone();
1561 if entry.kind.is_dir() {
1562 entry.kind = EntryKind::Dir
1563 }
1564 entry
1565 }
1566}
1567
1568// The worktree's `UpdatedEntries` event can be used to follow along with
1569// all changes to the worktree's snapshot.
1570fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1571 let mut entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
1572 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1573 if let Event::UpdatedEntries(changes) = event {
1574 for (path, _, change_type) in changes.iter() {
1575 let entry = tree.entry_for_path(&path).cloned();
1576 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1577 Ok(ix) | Err(ix) => ix,
1578 };
1579 match change_type {
1580 PathChange::Added => entries.insert(ix, entry.unwrap()),
1581 PathChange::Removed => drop(entries.remove(ix)),
1582 PathChange::Updated => {
1583 let entry = entry.unwrap();
1584 let existing_entry = entries.get_mut(ix).unwrap();
1585 assert_eq!(existing_entry.path, entry.path);
1586 *existing_entry = entry;
1587 }
1588 PathChange::AddedOrUpdated | PathChange::Loaded => {
1589 let entry = entry.unwrap();
1590 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1591 *entries.get_mut(ix).unwrap() = entry;
1592 } else {
1593 entries.insert(ix, entry);
1594 }
1595 }
1596 }
1597 }
1598
1599 let new_entries = tree.entries(true, 0).cloned().collect::<Vec<_>>();
1600 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1601 }
1602 })
1603 .detach();
1604}
1605
1606fn randomly_mutate_worktree(
1607 worktree: &mut Worktree,
1608 rng: &mut impl Rng,
1609 cx: &mut ModelContext<Worktree>,
1610) -> Task<Result<()>> {
1611 log::info!("mutating worktree");
1612 let worktree = worktree.as_local_mut().unwrap();
1613 let snapshot = worktree.snapshot();
1614 let entry = snapshot.entries(false, 0).choose(rng).unwrap();
1615
1616 match rng.gen_range(0_u32..100) {
1617 0..=33 if entry.path.as_ref() != Path::new("") => {
1618 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1619 worktree.delete_entry(entry.id, false, cx).unwrap()
1620 }
1621 ..=66 if entry.path.as_ref() != Path::new("") => {
1622 let other_entry = snapshot.entries(false, 0).choose(rng).unwrap();
1623 let new_parent_path = if other_entry.is_dir() {
1624 other_entry.path.clone()
1625 } else {
1626 other_entry.path.parent().unwrap().into()
1627 };
1628 let mut new_path = new_parent_path.join(random_filename(rng));
1629 if new_path.starts_with(&entry.path) {
1630 new_path = random_filename(rng).into();
1631 }
1632
1633 log::info!(
1634 "renaming entry {:?} ({}) to {:?}",
1635 entry.path,
1636 entry.id.0,
1637 new_path
1638 );
1639 let task = worktree.rename_entry(entry.id, new_path, cx);
1640 cx.background_executor().spawn(async move {
1641 task.await?.to_included().unwrap();
1642 Ok(())
1643 })
1644 }
1645 _ => {
1646 if entry.is_dir() {
1647 let child_path = entry.path.join(random_filename(rng));
1648 let is_dir = rng.gen_bool(0.3);
1649 log::info!(
1650 "creating {} at {:?}",
1651 if is_dir { "dir" } else { "file" },
1652 child_path,
1653 );
1654 let task = worktree.create_entry(child_path, is_dir, cx);
1655 cx.background_executor().spawn(async move {
1656 task.await?;
1657 Ok(())
1658 })
1659 } else {
1660 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1661 let task =
1662 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
1663 cx.background_executor().spawn(async move {
1664 task.await?;
1665 Ok(())
1666 })
1667 }
1668 }
1669 }
1670}
1671
1672async fn randomly_mutate_fs(
1673 fs: &Arc<dyn Fs>,
1674 root_path: &Path,
1675 insertion_probability: f64,
1676 rng: &mut impl Rng,
1677) {
1678 log::info!("mutating fs");
1679 let mut files = Vec::new();
1680 let mut dirs = Vec::new();
1681 for path in fs.as_fake().paths(false) {
1682 if path.starts_with(root_path) {
1683 if fs.is_file(&path).await {
1684 files.push(path);
1685 } else {
1686 dirs.push(path);
1687 }
1688 }
1689 }
1690
1691 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1692 let path = dirs.choose(rng).unwrap();
1693 let new_path = path.join(random_filename(rng));
1694
1695 if rng.gen() {
1696 log::info!(
1697 "creating dir {:?}",
1698 new_path.strip_prefix(root_path).unwrap()
1699 );
1700 fs.create_dir(&new_path).await.unwrap();
1701 } else {
1702 log::info!(
1703 "creating file {:?}",
1704 new_path.strip_prefix(root_path).unwrap()
1705 );
1706 fs.create_file(&new_path, Default::default()).await.unwrap();
1707 }
1708 } else if rng.gen_bool(0.05) {
1709 let ignore_dir_path = dirs.choose(rng).unwrap();
1710 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1711
1712 let subdirs = dirs
1713 .iter()
1714 .filter(|d| d.starts_with(&ignore_dir_path))
1715 .cloned()
1716 .collect::<Vec<_>>();
1717 let subfiles = files
1718 .iter()
1719 .filter(|d| d.starts_with(&ignore_dir_path))
1720 .cloned()
1721 .collect::<Vec<_>>();
1722 let files_to_ignore = {
1723 let len = rng.gen_range(0..=subfiles.len());
1724 subfiles.choose_multiple(rng, len)
1725 };
1726 let dirs_to_ignore = {
1727 let len = rng.gen_range(0..subdirs.len());
1728 subdirs.choose_multiple(rng, len)
1729 };
1730
1731 let mut ignore_contents = String::new();
1732 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1733 writeln!(
1734 ignore_contents,
1735 "{}",
1736 path_to_ignore
1737 .strip_prefix(&ignore_dir_path)
1738 .unwrap()
1739 .to_str()
1740 .unwrap()
1741 )
1742 .unwrap();
1743 }
1744 log::info!(
1745 "creating gitignore {:?} with contents:\n{}",
1746 ignore_path.strip_prefix(&root_path).unwrap(),
1747 ignore_contents
1748 );
1749 fs.save(
1750 &ignore_path,
1751 &ignore_contents.as_str().into(),
1752 Default::default(),
1753 )
1754 .await
1755 .unwrap();
1756 } else {
1757 let old_path = {
1758 let file_path = files.choose(rng);
1759 let dir_path = dirs[1..].choose(rng);
1760 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1761 };
1762
1763 let is_rename = rng.gen();
1764 if is_rename {
1765 let new_path_parent = dirs
1766 .iter()
1767 .filter(|d| !d.starts_with(old_path))
1768 .choose(rng)
1769 .unwrap();
1770
1771 let overwrite_existing_dir =
1772 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1773 let new_path = if overwrite_existing_dir {
1774 fs.remove_dir(
1775 &new_path_parent,
1776 RemoveOptions {
1777 recursive: true,
1778 ignore_if_not_exists: true,
1779 },
1780 )
1781 .await
1782 .unwrap();
1783 new_path_parent.to_path_buf()
1784 } else {
1785 new_path_parent.join(random_filename(rng))
1786 };
1787
1788 log::info!(
1789 "renaming {:?} to {}{:?}",
1790 old_path.strip_prefix(&root_path).unwrap(),
1791 if overwrite_existing_dir {
1792 "overwrite "
1793 } else {
1794 ""
1795 },
1796 new_path.strip_prefix(&root_path).unwrap()
1797 );
1798 fs.rename(
1799 &old_path,
1800 &new_path,
1801 fs::RenameOptions {
1802 overwrite: true,
1803 ignore_if_exists: true,
1804 },
1805 )
1806 .await
1807 .unwrap();
1808 } else if fs.is_file(&old_path).await {
1809 log::info!(
1810 "deleting file {:?}",
1811 old_path.strip_prefix(&root_path).unwrap()
1812 );
1813 fs.remove_file(old_path, Default::default()).await.unwrap();
1814 } else {
1815 log::info!(
1816 "deleting dir {:?}",
1817 old_path.strip_prefix(&root_path).unwrap()
1818 );
1819 fs.remove_dir(
1820 &old_path,
1821 RemoveOptions {
1822 recursive: true,
1823 ignore_if_not_exists: true,
1824 },
1825 )
1826 .await
1827 .unwrap();
1828 }
1829 }
1830}
1831
1832fn random_filename(rng: &mut impl Rng) -> String {
1833 (0..6)
1834 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1835 .map(char::from)
1836 .collect()
1837}
1838
1839#[gpui::test]
1840async fn test_rename_work_directory(cx: &mut TestAppContext) {
1841 init_test(cx);
1842 cx.executor().allow_parking();
1843 let root = temp_tree(json!({
1844 "projects": {
1845 "project1": {
1846 "a": "",
1847 "b": "",
1848 }
1849 },
1850
1851 }));
1852 let root_path = root.path();
1853
1854 let tree = Worktree::local(
1855 root_path,
1856 true,
1857 Arc::new(RealFs::default()),
1858 Default::default(),
1859 &mut cx.to_async(),
1860 )
1861 .await
1862 .unwrap();
1863
1864 let repo = git_init(&root_path.join("projects/project1"));
1865 git_add("a", &repo);
1866 git_commit("init", &repo);
1867 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1868
1869 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1870 .await;
1871
1872 tree.flush_fs_events(cx).await;
1873
1874 cx.read(|cx| {
1875 let tree = tree.read(cx);
1876 let (work_dir, _) = tree.repositories().next().unwrap();
1877 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1878 assert_eq!(
1879 tree.status_for_file(Path::new("projects/project1/a")),
1880 Some(GitFileStatus::Modified)
1881 );
1882 assert_eq!(
1883 tree.status_for_file(Path::new("projects/project1/b")),
1884 Some(GitFileStatus::Added)
1885 );
1886 });
1887
1888 std::fs::rename(
1889 root_path.join("projects/project1"),
1890 root_path.join("projects/project2"),
1891 )
1892 .ok();
1893 tree.flush_fs_events(cx).await;
1894
1895 cx.read(|cx| {
1896 let tree = tree.read(cx);
1897 let (work_dir, _) = tree.repositories().next().unwrap();
1898 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1899 assert_eq!(
1900 tree.status_for_file(Path::new("projects/project2/a")),
1901 Some(GitFileStatus::Modified)
1902 );
1903 assert_eq!(
1904 tree.status_for_file(Path::new("projects/project2/b")),
1905 Some(GitFileStatus::Added)
1906 );
1907 });
1908}
1909
1910#[gpui::test]
1911async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1912 init_test(cx);
1913 cx.executor().allow_parking();
1914 let root = temp_tree(json!({
1915 "c.txt": "",
1916 "dir1": {
1917 ".git": {},
1918 "deps": {
1919 "dep1": {
1920 ".git": {},
1921 "src": {
1922 "a.txt": ""
1923 }
1924 }
1925 },
1926 "src": {
1927 "b.txt": ""
1928 }
1929 },
1930 }));
1931
1932 let tree = Worktree::local(
1933 root.path(),
1934 true,
1935 Arc::new(RealFs::default()),
1936 Default::default(),
1937 &mut cx.to_async(),
1938 )
1939 .await
1940 .unwrap();
1941
1942 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1943 .await;
1944 tree.flush_fs_events(cx).await;
1945
1946 tree.read_with(cx, |tree, _cx| {
1947 let tree = tree.as_local().unwrap();
1948
1949 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1950
1951 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1952 assert_eq!(
1953 entry
1954 .work_directory(tree)
1955 .map(|directory| directory.as_ref().to_owned()),
1956 Some(Path::new("dir1").to_owned())
1957 );
1958
1959 let entry = tree
1960 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1961 .unwrap();
1962 assert_eq!(
1963 entry
1964 .work_directory(tree)
1965 .map(|directory| directory.as_ref().to_owned()),
1966 Some(Path::new("dir1/deps/dep1").to_owned())
1967 );
1968
1969 let entries = tree.files(false, 0);
1970
1971 let paths_with_repos = tree
1972 .entries_with_repositories(entries)
1973 .map(|(entry, repo)| {
1974 (
1975 entry.path.as_ref(),
1976 repo.and_then(|repo| {
1977 repo.work_directory(&tree)
1978 .map(|work_directory| work_directory.0.to_path_buf())
1979 }),
1980 )
1981 })
1982 .collect::<Vec<_>>();
1983
1984 assert_eq!(
1985 paths_with_repos,
1986 &[
1987 (Path::new("c.txt"), None),
1988 (
1989 Path::new("dir1/deps/dep1/src/a.txt"),
1990 Some(Path::new("dir1/deps/dep1").into())
1991 ),
1992 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
1993 ]
1994 );
1995 });
1996
1997 let repo_update_events = Arc::new(Mutex::new(vec![]));
1998 tree.update(cx, |_, cx| {
1999 let repo_update_events = repo_update_events.clone();
2000 cx.subscribe(&tree, move |_, _, event, _| {
2001 if let Event::UpdatedGitRepositories(update) = event {
2002 repo_update_events.lock().push(update.clone());
2003 }
2004 })
2005 .detach();
2006 });
2007
2008 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
2009 tree.flush_fs_events(cx).await;
2010
2011 assert_eq!(
2012 repo_update_events.lock()[0]
2013 .iter()
2014 .map(|e| e.0.clone())
2015 .collect::<Vec<Arc<Path>>>(),
2016 vec![Path::new("dir1").into()]
2017 );
2018
2019 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
2020 tree.flush_fs_events(cx).await;
2021
2022 tree.read_with(cx, |tree, _cx| {
2023 let tree = tree.as_local().unwrap();
2024
2025 assert!(tree
2026 .repository_for_path("dir1/src/b.txt".as_ref())
2027 .is_none());
2028 });
2029}
2030
2031#[gpui::test]
2032async fn test_git_status(cx: &mut TestAppContext) {
2033 init_test(cx);
2034 cx.executor().allow_parking();
2035 const IGNORE_RULE: &str = "**/target";
2036
2037 let root = temp_tree(json!({
2038 "project": {
2039 "a.txt": "a",
2040 "b.txt": "bb",
2041 "c": {
2042 "d": {
2043 "e.txt": "eee"
2044 }
2045 },
2046 "f.txt": "ffff",
2047 "target": {
2048 "build_file": "???"
2049 },
2050 ".gitignore": IGNORE_RULE
2051 },
2052
2053 }));
2054
2055 const A_TXT: &str = "a.txt";
2056 const B_TXT: &str = "b.txt";
2057 const E_TXT: &str = "c/d/e.txt";
2058 const F_TXT: &str = "f.txt";
2059 const DOTGITIGNORE: &str = ".gitignore";
2060 const BUILD_FILE: &str = "target/build_file";
2061 let project_path = Path::new("project");
2062
2063 // Set up git repository before creating the worktree.
2064 let work_dir = root.path().join("project");
2065 let mut repo = git_init(work_dir.as_path());
2066 repo.add_ignore_rule(IGNORE_RULE).unwrap();
2067 git_add(A_TXT, &repo);
2068 git_add(E_TXT, &repo);
2069 git_add(DOTGITIGNORE, &repo);
2070 git_commit("Initial commit", &repo);
2071
2072 let tree = Worktree::local(
2073 root.path(),
2074 true,
2075 Arc::new(RealFs::default()),
2076 Default::default(),
2077 &mut cx.to_async(),
2078 )
2079 .await
2080 .unwrap();
2081
2082 tree.flush_fs_events(cx).await;
2083 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2084 .await;
2085 cx.executor().run_until_parked();
2086
2087 // Check that the right git state is observed on startup
2088 tree.read_with(cx, |tree, _cx| {
2089 let snapshot = tree.snapshot();
2090 assert_eq!(snapshot.repositories().count(), 1);
2091 let (dir, repo_entry) = snapshot.repositories().next().unwrap();
2092 assert_eq!(dir.as_ref(), Path::new("project"));
2093 assert!(repo_entry.location_in_repo.is_none());
2094
2095 assert_eq!(
2096 snapshot.status_for_file(project_path.join(B_TXT)),
2097 Some(GitFileStatus::Added)
2098 );
2099 assert_eq!(
2100 snapshot.status_for_file(project_path.join(F_TXT)),
2101 Some(GitFileStatus::Added)
2102 );
2103 });
2104
2105 // Modify a file in the working copy.
2106 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
2107 tree.flush_fs_events(cx).await;
2108 cx.executor().run_until_parked();
2109
2110 // The worktree detects that the file's git status has changed.
2111 tree.read_with(cx, |tree, _cx| {
2112 let snapshot = tree.snapshot();
2113 assert_eq!(
2114 snapshot.status_for_file(project_path.join(A_TXT)),
2115 Some(GitFileStatus::Modified)
2116 );
2117 });
2118
2119 // Create a commit in the git repository.
2120 git_add(A_TXT, &repo);
2121 git_add(B_TXT, &repo);
2122 git_commit("Committing modified and added", &repo);
2123 tree.flush_fs_events(cx).await;
2124 cx.executor().run_until_parked();
2125
2126 // The worktree detects that the files' git status have changed.
2127 tree.read_with(cx, |tree, _cx| {
2128 let snapshot = tree.snapshot();
2129 assert_eq!(
2130 snapshot.status_for_file(project_path.join(F_TXT)),
2131 Some(GitFileStatus::Added)
2132 );
2133 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
2134 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
2135 });
2136
2137 // Modify files in the working copy and perform git operations on other files.
2138 git_reset(0, &repo);
2139 git_remove_index(Path::new(B_TXT), &repo);
2140 git_stash(&mut repo);
2141 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
2142 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
2143 tree.flush_fs_events(cx).await;
2144 cx.executor().run_until_parked();
2145
2146 // Check that more complex repo changes are tracked
2147 tree.read_with(cx, |tree, _cx| {
2148 let snapshot = tree.snapshot();
2149
2150 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
2151 assert_eq!(
2152 snapshot.status_for_file(project_path.join(B_TXT)),
2153 Some(GitFileStatus::Added)
2154 );
2155 assert_eq!(
2156 snapshot.status_for_file(project_path.join(E_TXT)),
2157 Some(GitFileStatus::Modified)
2158 );
2159 });
2160
2161 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
2162 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
2163 std::fs::write(
2164 work_dir.join(DOTGITIGNORE),
2165 [IGNORE_RULE, "f.txt"].join("\n"),
2166 )
2167 .unwrap();
2168
2169 git_add(Path::new(DOTGITIGNORE), &repo);
2170 git_commit("Committing modified git ignore", &repo);
2171
2172 tree.flush_fs_events(cx).await;
2173 cx.executor().run_until_parked();
2174
2175 let mut renamed_dir_name = "first_directory/second_directory";
2176 const RENAMED_FILE: &str = "rf.txt";
2177
2178 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
2179 std::fs::write(
2180 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
2181 "new-contents",
2182 )
2183 .unwrap();
2184
2185 tree.flush_fs_events(cx).await;
2186 cx.executor().run_until_parked();
2187
2188 tree.read_with(cx, |tree, _cx| {
2189 let snapshot = tree.snapshot();
2190 assert_eq!(
2191 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
2192 Some(GitFileStatus::Added)
2193 );
2194 });
2195
2196 renamed_dir_name = "new_first_directory/second_directory";
2197
2198 std::fs::rename(
2199 work_dir.join("first_directory"),
2200 work_dir.join("new_first_directory"),
2201 )
2202 .unwrap();
2203
2204 tree.flush_fs_events(cx).await;
2205 cx.executor().run_until_parked();
2206
2207 tree.read_with(cx, |tree, _cx| {
2208 let snapshot = tree.snapshot();
2209
2210 assert_eq!(
2211 snapshot.status_for_file(
2212 project_path
2213 .join(Path::new(renamed_dir_name))
2214 .join(RENAMED_FILE)
2215 ),
2216 Some(GitFileStatus::Added)
2217 );
2218 });
2219}
2220
2221#[gpui::test]
2222async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) {
2223 init_test(cx);
2224 cx.executor().allow_parking();
2225
2226 let root = temp_tree(json!({
2227 "my-repo": {
2228 // .git folder will go here
2229 "a.txt": "a",
2230 "sub-folder-1": {
2231 "sub-folder-2": {
2232 "c.txt": "cc",
2233 "d": {
2234 "e.txt": "eee"
2235 }
2236 },
2237 }
2238 },
2239
2240 }));
2241
2242 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
2243 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
2244
2245 // Set up git repository before creating the worktree.
2246 let git_repo_work_dir = root.path().join("my-repo");
2247 let repo = git_init(git_repo_work_dir.as_path());
2248 git_add(C_TXT, &repo);
2249 git_commit("Initial commit", &repo);
2250
2251 // Open the worktree in subfolder
2252 let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2");
2253 let tree = Worktree::local(
2254 root.path().join(project_root),
2255 true,
2256 Arc::new(RealFs::default()),
2257 Default::default(),
2258 &mut cx.to_async(),
2259 )
2260 .await
2261 .unwrap();
2262
2263 tree.flush_fs_events(cx).await;
2264 tree.flush_fs_events_in_root_git_repository(cx).await;
2265 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2266 .await;
2267 cx.executor().run_until_parked();
2268
2269 // Ensure that the git status is loaded correctly
2270 tree.read_with(cx, |tree, _cx| {
2271 let snapshot = tree.snapshot();
2272 assert_eq!(snapshot.repositories().count(), 1);
2273 let (dir, repo_entry) = snapshot.repositories().next().unwrap();
2274 // Path is blank because the working directory of
2275 // the git repository is located at the root of the project
2276 assert_eq!(dir.as_ref(), Path::new(""));
2277
2278 // This is the missing path between the root of the project (sub-folder-2) and its
2279 // location relative to the root of the repository.
2280 assert_eq!(
2281 repo_entry.location_in_repo,
2282 Some(Arc::from(Path::new("sub-folder-1/sub-folder-2")))
2283 );
2284
2285 assert_eq!(snapshot.status_for_file("c.txt"), None);
2286 assert_eq!(
2287 snapshot.status_for_file("d/e.txt"),
2288 Some(GitFileStatus::Added)
2289 );
2290 });
2291
2292 // Now we simulate FS events, but ONLY in the .git folder that's outside
2293 // of out project root.
2294 // Meaning: we don't produce any FS events for files inside the project.
2295 git_add(E_TXT, &repo);
2296 git_commit("Second commit", &repo);
2297 tree.flush_fs_events_in_root_git_repository(cx).await;
2298 cx.executor().run_until_parked();
2299
2300 tree.read_with(cx, |tree, _cx| {
2301 let snapshot = tree.snapshot();
2302
2303 assert!(snapshot.repositories().next().is_some());
2304
2305 assert_eq!(snapshot.status_for_file("c.txt"), None);
2306 assert_eq!(snapshot.status_for_file("d/e.txt"), None);
2307 });
2308}
2309
2310#[gpui::test]
2311async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
2312 init_test(cx);
2313 let fs = FakeFs::new(cx.background_executor.clone());
2314 fs.insert_tree(
2315 "/root",
2316 json!({
2317 ".git": {},
2318 "a": {
2319 "b": {
2320 "c1.txt": "",
2321 "c2.txt": "",
2322 },
2323 "d": {
2324 "e1.txt": "",
2325 "e2.txt": "",
2326 "e3.txt": "",
2327 }
2328 },
2329 "f": {
2330 "no-status.txt": ""
2331 },
2332 "g": {
2333 "h1.txt": "",
2334 "h2.txt": ""
2335 },
2336
2337 }),
2338 )
2339 .await;
2340
2341 fs.set_status_for_repo_via_git_operation(
2342 &Path::new("/root/.git"),
2343 &[
2344 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
2345 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
2346 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
2347 ],
2348 );
2349
2350 let tree = Worktree::local(
2351 Path::new("/root"),
2352 true,
2353 fs.clone(),
2354 Default::default(),
2355 &mut cx.to_async(),
2356 )
2357 .await
2358 .unwrap();
2359
2360 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2361 .await;
2362
2363 cx.executor().run_until_parked();
2364 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
2365
2366 check_propagated_statuses(
2367 &snapshot,
2368 &[
2369 (Path::new(""), Some(GitFileStatus::Conflict)),
2370 (Path::new("a"), Some(GitFileStatus::Modified)),
2371 (Path::new("a/b"), Some(GitFileStatus::Added)),
2372 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2373 (Path::new("a/b/c2.txt"), None),
2374 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2375 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2376 (Path::new("f"), None),
2377 (Path::new("f/no-status.txt"), None),
2378 (Path::new("g"), Some(GitFileStatus::Conflict)),
2379 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
2380 ],
2381 );
2382
2383 check_propagated_statuses(
2384 &snapshot,
2385 &[
2386 (Path::new("a/b"), Some(GitFileStatus::Added)),
2387 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2388 (Path::new("a/b/c2.txt"), None),
2389 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2390 (Path::new("a/d/e1.txt"), None),
2391 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2392 (Path::new("f"), None),
2393 (Path::new("f/no-status.txt"), None),
2394 (Path::new("g"), Some(GitFileStatus::Conflict)),
2395 ],
2396 );
2397
2398 check_propagated_statuses(
2399 &snapshot,
2400 &[
2401 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2402 (Path::new("a/b/c2.txt"), None),
2403 (Path::new("a/d/e1.txt"), None),
2404 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2405 (Path::new("f/no-status.txt"), None),
2406 ],
2407 );
2408
2409 #[track_caller]
2410 fn check_propagated_statuses(
2411 snapshot: &Snapshot,
2412 expected_statuses: &[(&Path, Option<GitFileStatus>)],
2413 ) {
2414 let mut entries = expected_statuses
2415 .iter()
2416 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
2417 .collect::<Vec<_>>();
2418 snapshot.propagate_git_statuses(&mut entries);
2419 assert_eq!(
2420 entries
2421 .iter()
2422 .map(|e| (e.path.as_ref(), e.git_status))
2423 .collect::<Vec<_>>(),
2424 expected_statuses
2425 );
2426 }
2427}
2428
2429#[track_caller]
2430fn git_init(path: &Path) -> git2::Repository {
2431 git2::Repository::init(path).expect("Failed to initialize git repository")
2432}
2433
2434#[track_caller]
2435fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
2436 let path = path.as_ref();
2437 let mut index = repo.index().expect("Failed to get index");
2438 index.add_path(path).expect("Failed to add a.txt");
2439 index.write().expect("Failed to write index");
2440}
2441
2442#[track_caller]
2443fn git_remove_index(path: &Path, repo: &git2::Repository) {
2444 let mut index = repo.index().expect("Failed to get index");
2445 index.remove_path(path).expect("Failed to add a.txt");
2446 index.write().expect("Failed to write index");
2447}
2448
2449#[track_caller]
2450fn git_commit(msg: &'static str, repo: &git2::Repository) {
2451 use git2::Signature;
2452
2453 let signature = Signature::now("test", "test@zed.dev").unwrap();
2454 let oid = repo.index().unwrap().write_tree().unwrap();
2455 let tree = repo.find_tree(oid).unwrap();
2456 if let Some(head) = repo.head().ok() {
2457 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
2458
2459 let parent_commit = parent_obj.as_commit().unwrap();
2460
2461 repo.commit(
2462 Some("HEAD"),
2463 &signature,
2464 &signature,
2465 msg,
2466 &tree,
2467 &[parent_commit],
2468 )
2469 .expect("Failed to commit with parent");
2470 } else {
2471 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
2472 .expect("Failed to commit");
2473 }
2474}
2475
2476#[track_caller]
2477fn git_stash(repo: &mut git2::Repository) {
2478 use git2::Signature;
2479
2480 let signature = Signature::now("test", "test@zed.dev").unwrap();
2481 repo.stash_save(&signature, "N/A", None)
2482 .expect("Failed to stash");
2483}
2484
2485#[track_caller]
2486fn git_reset(offset: usize, repo: &git2::Repository) {
2487 let head = repo.head().expect("Couldn't get repo head");
2488 let object = head.peel(git2::ObjectType::Commit).unwrap();
2489 let commit = object.as_commit().unwrap();
2490 let new_head = commit
2491 .parents()
2492 .inspect(|parnet| {
2493 parnet.message();
2494 })
2495 .skip(offset)
2496 .next()
2497 .expect("Not enough history");
2498 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2499 .expect("Could not reset");
2500}
2501
2502#[allow(dead_code)]
2503#[track_caller]
2504fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2505 repo.statuses(None)
2506 .unwrap()
2507 .iter()
2508 .map(|status| (status.path().unwrap().to_string(), status.status()))
2509 .collect()
2510}
2511
2512#[track_caller]
2513fn check_worktree_entries(
2514 tree: &Worktree,
2515 expected_excluded_paths: &[&str],
2516 expected_ignored_paths: &[&str],
2517 expected_tracked_paths: &[&str],
2518) {
2519 for path in expected_excluded_paths {
2520 let entry = tree.entry_for_path(path);
2521 assert!(
2522 entry.is_none(),
2523 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2524 );
2525 }
2526 for path in expected_ignored_paths {
2527 let entry = tree
2528 .entry_for_path(path)
2529 .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
2530 assert!(
2531 entry.is_ignored,
2532 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2533 );
2534 }
2535 for path in expected_tracked_paths {
2536 let entry = tree
2537 .entry_for_path(path)
2538 .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
2539 assert!(
2540 !entry.is_ignored,
2541 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2542 );
2543 }
2544}
2545
2546fn init_test(cx: &mut gpui::TestAppContext) {
2547 if std::env::var("RUST_LOG").is_ok() {
2548 env_logger::try_init().ok();
2549 }
2550
2551 cx.update(|cx| {
2552 let settings_store = SettingsStore::test(cx);
2553 cx.set_global(settings_store);
2554 WorktreeSettings::register(cx);
2555 });
2556}
2557
2558fn assert_entry_git_state(
2559 tree: &Worktree,
2560 path: &str,
2561 git_status: Option<GitFileStatus>,
2562 is_ignored: bool,
2563) {
2564 let entry = tree.entry_for_path(path).expect("entry {path} not found");
2565 assert_eq!(entry.git_status, git_status);
2566 assert_eq!(entry.is_ignored, is_ignored);
2567}