1use crate::{
2 worktree_settings::WorktreeSettings, Entry, EntryKind, Event, PathChange, Snapshot, Worktree,
3 WorktreeModelHandle,
4};
5use anyhow::Result;
6use client::Client;
7use clock::FakeSystemClock;
8use fs::{FakeFs, Fs, RealFs, RemoveOptions};
9use git::{repository::GitFileStatus, GITIGNORE};
10use gpui::{BorrowAppContext, ModelContext, Task, TestAppContext};
11use http::FakeHttpClient;
12use parking_lot::Mutex;
13use postage::stream::Stream;
14use pretty_assertions::assert_eq;
15use rand::prelude::*;
16use serde_json::json;
17use settings::{Settings, SettingsStore};
18use std::{env, fmt::Write, mem, path::Path, sync::Arc};
19use util::{test::temp_tree, ResultExt};
20
21#[gpui::test]
22async fn test_traversal(cx: &mut TestAppContext) {
23 init_test(cx);
24 let fs = FakeFs::new(cx.background_executor.clone());
25 fs.insert_tree(
26 "/root",
27 json!({
28 ".gitignore": "a/b\n",
29 "a": {
30 "b": "",
31 "c": "",
32 }
33 }),
34 )
35 .await;
36
37 let tree = Worktree::local(
38 build_client(cx),
39 Path::new("/root"),
40 true,
41 fs,
42 Default::default(),
43 &mut cx.to_async(),
44 )
45 .await
46 .unwrap();
47 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
48 .await;
49
50 tree.read_with(cx, |tree, _| {
51 assert_eq!(
52 tree.entries(false)
53 .map(|entry| entry.path.as_ref())
54 .collect::<Vec<_>>(),
55 vec![
56 Path::new(""),
57 Path::new(".gitignore"),
58 Path::new("a"),
59 Path::new("a/c"),
60 ]
61 );
62 assert_eq!(
63 tree.entries(true)
64 .map(|entry| entry.path.as_ref())
65 .collect::<Vec<_>>(),
66 vec![
67 Path::new(""),
68 Path::new(".gitignore"),
69 Path::new("a"),
70 Path::new("a/b"),
71 Path::new("a/c"),
72 ]
73 );
74 })
75}
76
77#[gpui::test(iterations = 10)]
78async fn test_circular_symlinks(cx: &mut TestAppContext) {
79 init_test(cx);
80 let fs = FakeFs::new(cx.background_executor.clone());
81 fs.insert_tree(
82 "/root",
83 json!({
84 "lib": {
85 "a": {
86 "a.txt": ""
87 },
88 "b": {
89 "b.txt": ""
90 }
91 }
92 }),
93 )
94 .await;
95 fs.create_symlink("/root/lib/a/lib".as_ref(), "..".into())
96 .await
97 .unwrap();
98 fs.create_symlink("/root/lib/b/lib".as_ref(), "..".into())
99 .await
100 .unwrap();
101
102 let tree = Worktree::local(
103 build_client(cx),
104 Path::new("/root"),
105 true,
106 fs.clone(),
107 Default::default(),
108 &mut cx.to_async(),
109 )
110 .await
111 .unwrap();
112
113 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
114 .await;
115
116 tree.read_with(cx, |tree, _| {
117 assert_eq!(
118 tree.entries(false)
119 .map(|entry| entry.path.as_ref())
120 .collect::<Vec<_>>(),
121 vec![
122 Path::new(""),
123 Path::new("lib"),
124 Path::new("lib/a"),
125 Path::new("lib/a/a.txt"),
126 Path::new("lib/a/lib"),
127 Path::new("lib/b"),
128 Path::new("lib/b/b.txt"),
129 Path::new("lib/b/lib"),
130 ]
131 );
132 });
133
134 fs.rename(
135 Path::new("/root/lib/a/lib"),
136 Path::new("/root/lib/a/lib-2"),
137 Default::default(),
138 )
139 .await
140 .unwrap();
141 cx.executor().run_until_parked();
142 tree.read_with(cx, |tree, _| {
143 assert_eq!(
144 tree.entries(false)
145 .map(|entry| entry.path.as_ref())
146 .collect::<Vec<_>>(),
147 vec![
148 Path::new(""),
149 Path::new("lib"),
150 Path::new("lib/a"),
151 Path::new("lib/a/a.txt"),
152 Path::new("lib/a/lib-2"),
153 Path::new("lib/b"),
154 Path::new("lib/b/b.txt"),
155 Path::new("lib/b/lib"),
156 ]
157 );
158 });
159}
160
161#[gpui::test]
162async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
163 init_test(cx);
164 let fs = FakeFs::new(cx.background_executor.clone());
165 fs.insert_tree(
166 "/root",
167 json!({
168 "dir1": {
169 "deps": {
170 // symlinks here
171 },
172 "src": {
173 "a.rs": "",
174 "b.rs": "",
175 },
176 },
177 "dir2": {
178 "src": {
179 "c.rs": "",
180 "d.rs": "",
181 }
182 },
183 "dir3": {
184 "deps": {},
185 "src": {
186 "e.rs": "",
187 "f.rs": "",
188 },
189 }
190 }),
191 )
192 .await;
193
194 // These symlinks point to directories outside of the worktree's root, dir1.
195 fs.create_symlink("/root/dir1/deps/dep-dir2".as_ref(), "../../dir2".into())
196 .await
197 .unwrap();
198 fs.create_symlink("/root/dir1/deps/dep-dir3".as_ref(), "../../dir3".into())
199 .await
200 .unwrap();
201
202 let tree = Worktree::local(
203 build_client(cx),
204 Path::new("/root/dir1"),
205 true,
206 fs.clone(),
207 Default::default(),
208 &mut cx.to_async(),
209 )
210 .await
211 .unwrap();
212
213 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
214 .await;
215
216 let tree_updates = Arc::new(Mutex::new(Vec::new()));
217 tree.update(cx, |_, cx| {
218 let tree_updates = tree_updates.clone();
219 cx.subscribe(&tree, move |_, _, event, _| {
220 if let Event::UpdatedEntries(update) = event {
221 tree_updates.lock().extend(
222 update
223 .iter()
224 .map(|(path, _, change)| (path.clone(), *change)),
225 );
226 }
227 })
228 .detach();
229 });
230
231 // The symlinked directories are not scanned by default.
232 tree.read_with(cx, |tree, _| {
233 assert_eq!(
234 tree.entries(true)
235 .map(|entry| (entry.path.as_ref(), entry.is_external))
236 .collect::<Vec<_>>(),
237 vec![
238 (Path::new(""), false),
239 (Path::new("deps"), false),
240 (Path::new("deps/dep-dir2"), true),
241 (Path::new("deps/dep-dir3"), true),
242 (Path::new("src"), false),
243 (Path::new("src/a.rs"), false),
244 (Path::new("src/b.rs"), false),
245 ]
246 );
247
248 assert_eq!(
249 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
250 EntryKind::UnloadedDir
251 );
252 });
253
254 // Expand one of the symlinked directories.
255 tree.read_with(cx, |tree, _| {
256 tree.as_local()
257 .unwrap()
258 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
259 })
260 .recv()
261 .await;
262
263 // The expanded directory's contents are loaded. Subdirectories are
264 // not scanned yet.
265 tree.read_with(cx, |tree, _| {
266 assert_eq!(
267 tree.entries(true)
268 .map(|entry| (entry.path.as_ref(), entry.is_external))
269 .collect::<Vec<_>>(),
270 vec![
271 (Path::new(""), false),
272 (Path::new("deps"), false),
273 (Path::new("deps/dep-dir2"), true),
274 (Path::new("deps/dep-dir3"), true),
275 (Path::new("deps/dep-dir3/deps"), true),
276 (Path::new("deps/dep-dir3/src"), true),
277 (Path::new("src"), false),
278 (Path::new("src/a.rs"), false),
279 (Path::new("src/b.rs"), false),
280 ]
281 );
282 });
283 assert_eq!(
284 mem::take(&mut *tree_updates.lock()),
285 &[
286 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
287 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
288 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
289 ]
290 );
291
292 // Expand a subdirectory of one of the symlinked directories.
293 tree.read_with(cx, |tree, _| {
294 tree.as_local()
295 .unwrap()
296 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
297 })
298 .recv()
299 .await;
300
301 // The expanded subdirectory's contents are loaded.
302 tree.read_with(cx, |tree, _| {
303 assert_eq!(
304 tree.entries(true)
305 .map(|entry| (entry.path.as_ref(), entry.is_external))
306 .collect::<Vec<_>>(),
307 vec![
308 (Path::new(""), false),
309 (Path::new("deps"), false),
310 (Path::new("deps/dep-dir2"), true),
311 (Path::new("deps/dep-dir3"), true),
312 (Path::new("deps/dep-dir3/deps"), true),
313 (Path::new("deps/dep-dir3/src"), true),
314 (Path::new("deps/dep-dir3/src/e.rs"), true),
315 (Path::new("deps/dep-dir3/src/f.rs"), true),
316 (Path::new("src"), false),
317 (Path::new("src/a.rs"), false),
318 (Path::new("src/b.rs"), false),
319 ]
320 );
321 });
322
323 assert_eq!(
324 mem::take(&mut *tree_updates.lock()),
325 &[
326 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
327 (
328 Path::new("deps/dep-dir3/src/e.rs").into(),
329 PathChange::Loaded
330 ),
331 (
332 Path::new("deps/dep-dir3/src/f.rs").into(),
333 PathChange::Loaded
334 )
335 ]
336 );
337}
338
339#[cfg(target_os = "macos")]
340#[gpui::test]
341async fn test_renaming_case_only(cx: &mut TestAppContext) {
342 cx.executor().allow_parking();
343 init_test(cx);
344
345 const OLD_NAME: &str = "aaa.rs";
346 const NEW_NAME: &str = "AAA.rs";
347
348 let fs = Arc::new(RealFs::default());
349 let temp_root = temp_tree(json!({
350 OLD_NAME: "",
351 }));
352
353 let tree = Worktree::local(
354 build_client(cx),
355 temp_root.path(),
356 true,
357 fs.clone(),
358 Default::default(),
359 &mut cx.to_async(),
360 )
361 .await
362 .unwrap();
363
364 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
365 .await;
366 tree.read_with(cx, |tree, _| {
367 assert_eq!(
368 tree.entries(true)
369 .map(|entry| entry.path.as_ref())
370 .collect::<Vec<_>>(),
371 vec![Path::new(""), Path::new(OLD_NAME)]
372 );
373 });
374
375 fs.rename(
376 &temp_root.path().join(OLD_NAME),
377 &temp_root.path().join(NEW_NAME),
378 fs::RenameOptions {
379 overwrite: true,
380 ignore_if_exists: true,
381 },
382 )
383 .await
384 .unwrap();
385
386 tree.flush_fs_events(cx).await;
387
388 tree.read_with(cx, |tree, _| {
389 assert_eq!(
390 tree.entries(true)
391 .map(|entry| entry.path.as_ref())
392 .collect::<Vec<_>>(),
393 vec![Path::new(""), Path::new(NEW_NAME)]
394 );
395 });
396}
397
398#[gpui::test]
399async fn test_open_gitignored_files(cx: &mut TestAppContext) {
400 init_test(cx);
401 let fs = FakeFs::new(cx.background_executor.clone());
402 fs.insert_tree(
403 "/root",
404 json!({
405 ".gitignore": "node_modules\n",
406 "one": {
407 "node_modules": {
408 "a": {
409 "a1.js": "a1",
410 "a2.js": "a2",
411 },
412 "b": {
413 "b1.js": "b1",
414 "b2.js": "b2",
415 },
416 "c": {
417 "c1.js": "c1",
418 "c2.js": "c2",
419 }
420 },
421 },
422 "two": {
423 "x.js": "",
424 "y.js": "",
425 },
426 }),
427 )
428 .await;
429
430 let tree = Worktree::local(
431 build_client(cx),
432 Path::new("/root"),
433 true,
434 fs.clone(),
435 Default::default(),
436 &mut cx.to_async(),
437 )
438 .await
439 .unwrap();
440
441 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
442 .await;
443
444 tree.read_with(cx, |tree, _| {
445 assert_eq!(
446 tree.entries(true)
447 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
448 .collect::<Vec<_>>(),
449 vec![
450 (Path::new(""), false),
451 (Path::new(".gitignore"), false),
452 (Path::new("one"), false),
453 (Path::new("one/node_modules"), true),
454 (Path::new("two"), false),
455 (Path::new("two/x.js"), false),
456 (Path::new("two/y.js"), false),
457 ]
458 );
459 });
460
461 // Open a file that is nested inside of a gitignored directory that
462 // has not yet been expanded.
463 let prev_read_dir_count = fs.read_dir_call_count();
464 let buffer = tree
465 .update(cx, |tree, cx| {
466 tree.as_local_mut()
467 .unwrap()
468 .load_buffer("one/node_modules/b/b1.js".as_ref(), cx)
469 })
470 .await
471 .unwrap();
472
473 tree.read_with(cx, |tree, cx| {
474 assert_eq!(
475 tree.entries(true)
476 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
477 .collect::<Vec<_>>(),
478 vec![
479 (Path::new(""), false),
480 (Path::new(".gitignore"), false),
481 (Path::new("one"), false),
482 (Path::new("one/node_modules"), true),
483 (Path::new("one/node_modules/a"), true),
484 (Path::new("one/node_modules/b"), true),
485 (Path::new("one/node_modules/b/b1.js"), true),
486 (Path::new("one/node_modules/b/b2.js"), true),
487 (Path::new("one/node_modules/c"), true),
488 (Path::new("two"), false),
489 (Path::new("two/x.js"), false),
490 (Path::new("two/y.js"), false),
491 ]
492 );
493
494 assert_eq!(
495 buffer.read(cx).file().unwrap().path().as_ref(),
496 Path::new("one/node_modules/b/b1.js")
497 );
498
499 // Only the newly-expanded directories are scanned.
500 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
501 });
502
503 // Open another file in a different subdirectory of the same
504 // gitignored directory.
505 let prev_read_dir_count = fs.read_dir_call_count();
506 let buffer = tree
507 .update(cx, |tree, cx| {
508 tree.as_local_mut()
509 .unwrap()
510 .load_buffer("one/node_modules/a/a2.js".as_ref(), cx)
511 })
512 .await
513 .unwrap();
514
515 tree.read_with(cx, |tree, cx| {
516 assert_eq!(
517 tree.entries(true)
518 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
519 .collect::<Vec<_>>(),
520 vec![
521 (Path::new(""), false),
522 (Path::new(".gitignore"), false),
523 (Path::new("one"), false),
524 (Path::new("one/node_modules"), true),
525 (Path::new("one/node_modules/a"), true),
526 (Path::new("one/node_modules/a/a1.js"), true),
527 (Path::new("one/node_modules/a/a2.js"), true),
528 (Path::new("one/node_modules/b"), true),
529 (Path::new("one/node_modules/b/b1.js"), true),
530 (Path::new("one/node_modules/b/b2.js"), true),
531 (Path::new("one/node_modules/c"), true),
532 (Path::new("two"), false),
533 (Path::new("two/x.js"), false),
534 (Path::new("two/y.js"), false),
535 ]
536 );
537
538 assert_eq!(
539 buffer.read(cx).file().unwrap().path().as_ref(),
540 Path::new("one/node_modules/a/a2.js")
541 );
542
543 // Only the newly-expanded directory is scanned.
544 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
545 });
546
547 // No work happens when files and directories change within an unloaded directory.
548 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
549 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
550 .await
551 .unwrap();
552 cx.executor().run_until_parked();
553 assert_eq!(
554 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
555 0
556 );
557}
558
559#[gpui::test]
560async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
561 init_test(cx);
562 let fs = FakeFs::new(cx.background_executor.clone());
563 fs.insert_tree(
564 "/root",
565 json!({
566 ".gitignore": "node_modules\n",
567 "a": {
568 "a.js": "",
569 },
570 "b": {
571 "b.js": "",
572 },
573 "node_modules": {
574 "c": {
575 "c.js": "",
576 },
577 "d": {
578 "d.js": "",
579 "e": {
580 "e1.js": "",
581 "e2.js": "",
582 },
583 "f": {
584 "f1.js": "",
585 "f2.js": "",
586 }
587 },
588 },
589 }),
590 )
591 .await;
592
593 let tree = Worktree::local(
594 build_client(cx),
595 Path::new("/root"),
596 true,
597 fs.clone(),
598 Default::default(),
599 &mut cx.to_async(),
600 )
601 .await
602 .unwrap();
603
604 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
605 .await;
606
607 // Open a file within the gitignored directory, forcing some of its
608 // subdirectories to be read, but not all.
609 let read_dir_count_1 = fs.read_dir_call_count();
610 tree.read_with(cx, |tree, _| {
611 tree.as_local()
612 .unwrap()
613 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
614 })
615 .recv()
616 .await;
617
618 // Those subdirectories are now loaded.
619 tree.read_with(cx, |tree, _| {
620 assert_eq!(
621 tree.entries(true)
622 .map(|e| (e.path.as_ref(), e.is_ignored))
623 .collect::<Vec<_>>(),
624 &[
625 (Path::new(""), false),
626 (Path::new(".gitignore"), false),
627 (Path::new("a"), false),
628 (Path::new("a/a.js"), false),
629 (Path::new("b"), false),
630 (Path::new("b/b.js"), false),
631 (Path::new("node_modules"), true),
632 (Path::new("node_modules/c"), true),
633 (Path::new("node_modules/d"), true),
634 (Path::new("node_modules/d/d.js"), true),
635 (Path::new("node_modules/d/e"), true),
636 (Path::new("node_modules/d/f"), true),
637 ]
638 );
639 });
640 let read_dir_count_2 = fs.read_dir_call_count();
641 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
642
643 // Update the gitignore so that node_modules is no longer ignored,
644 // but a subdirectory is ignored
645 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
646 .await
647 .unwrap();
648 cx.executor().run_until_parked();
649
650 // All of the directories that are no longer ignored are now loaded.
651 tree.read_with(cx, |tree, _| {
652 assert_eq!(
653 tree.entries(true)
654 .map(|e| (e.path.as_ref(), e.is_ignored))
655 .collect::<Vec<_>>(),
656 &[
657 (Path::new(""), false),
658 (Path::new(".gitignore"), false),
659 (Path::new("a"), false),
660 (Path::new("a/a.js"), false),
661 (Path::new("b"), false),
662 (Path::new("b/b.js"), false),
663 // This directory is no longer ignored
664 (Path::new("node_modules"), false),
665 (Path::new("node_modules/c"), false),
666 (Path::new("node_modules/c/c.js"), false),
667 (Path::new("node_modules/d"), false),
668 (Path::new("node_modules/d/d.js"), false),
669 // This subdirectory is now ignored
670 (Path::new("node_modules/d/e"), true),
671 (Path::new("node_modules/d/f"), false),
672 (Path::new("node_modules/d/f/f1.js"), false),
673 (Path::new("node_modules/d/f/f2.js"), false),
674 ]
675 );
676 });
677
678 // Each of the newly-loaded directories is scanned only once.
679 let read_dir_count_3 = fs.read_dir_call_count();
680 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
681}
682
683#[gpui::test(iterations = 10)]
684async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
685 init_test(cx);
686 cx.update(|cx| {
687 cx.update_global::<SettingsStore, _>(|store, cx| {
688 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
689 project_settings.file_scan_exclusions = Some(Vec::new());
690 });
691 });
692 });
693 let fs = FakeFs::new(cx.background_executor.clone());
694 fs.insert_tree(
695 "/root",
696 json!({
697 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
698 "tree": {
699 ".git": {},
700 ".gitignore": "ignored-dir\n",
701 "tracked-dir": {
702 "tracked-file1": "",
703 "ancestor-ignored-file1": "",
704 },
705 "ignored-dir": {
706 "ignored-file1": ""
707 }
708 }
709 }),
710 )
711 .await;
712
713 let tree = Worktree::local(
714 build_client(cx),
715 "/root/tree".as_ref(),
716 true,
717 fs.clone(),
718 Default::default(),
719 &mut cx.to_async(),
720 )
721 .await
722 .unwrap();
723 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
724 .await;
725
726 tree.read_with(cx, |tree, _| {
727 tree.as_local()
728 .unwrap()
729 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
730 })
731 .recv()
732 .await;
733
734 cx.read(|cx| {
735 let tree = tree.read(cx);
736 assert_entry_git_state(tree, "tracked-dir/tracked-file1", None, false);
737 assert_entry_git_state(tree, "tracked-dir/ancestor-ignored-file1", None, true);
738 assert_entry_git_state(tree, "ignored-dir/ignored-file1", None, true);
739 });
740
741 fs.set_status_for_repo_via_working_copy_change(
742 &Path::new("/root/tree/.git"),
743 &[(Path::new("tracked-dir/tracked-file2"), GitFileStatus::Added)],
744 );
745
746 fs.create_file(
747 "/root/tree/tracked-dir/tracked-file2".as_ref(),
748 Default::default(),
749 )
750 .await
751 .unwrap();
752 fs.create_file(
753 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
754 Default::default(),
755 )
756 .await
757 .unwrap();
758 fs.create_file(
759 "/root/tree/ignored-dir/ignored-file2".as_ref(),
760 Default::default(),
761 )
762 .await
763 .unwrap();
764
765 cx.executor().run_until_parked();
766 cx.read(|cx| {
767 let tree = tree.read(cx);
768 assert_entry_git_state(
769 tree,
770 "tracked-dir/tracked-file2",
771 Some(GitFileStatus::Added),
772 false,
773 );
774 assert_entry_git_state(tree, "tracked-dir/ancestor-ignored-file2", None, true);
775 assert_entry_git_state(tree, "ignored-dir/ignored-file2", None, true);
776 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
777 });
778}
779
780#[gpui::test]
781async fn test_update_gitignore(cx: &mut TestAppContext) {
782 init_test(cx);
783 let fs = FakeFs::new(cx.background_executor.clone());
784 fs.insert_tree(
785 "/root",
786 json!({
787 ".git": {},
788 ".gitignore": "*.txt\n",
789 "a.xml": "<a></a>",
790 "b.txt": "Some text"
791 }),
792 )
793 .await;
794
795 let tree = Worktree::local(
796 build_client(cx),
797 "/root".as_ref(),
798 true,
799 fs.clone(),
800 Default::default(),
801 &mut cx.to_async(),
802 )
803 .await
804 .unwrap();
805 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
806 .await;
807
808 tree.read_with(cx, |tree, _| {
809 tree.as_local()
810 .unwrap()
811 .refresh_entries_for_paths(vec![Path::new("").into()])
812 })
813 .recv()
814 .await;
815
816 cx.read(|cx| {
817 let tree = tree.read(cx);
818 assert_entry_git_state(tree, "a.xml", None, false);
819 assert_entry_git_state(tree, "b.txt", None, true);
820 });
821
822 fs.atomic_write("/root/.gitignore".into(), "*.xml".into())
823 .await
824 .unwrap();
825
826 fs.set_status_for_repo_via_working_copy_change(
827 &Path::new("/root/.git"),
828 &[(Path::new("b.txt"), GitFileStatus::Added)],
829 );
830
831 cx.executor().run_until_parked();
832 cx.read(|cx| {
833 let tree = tree.read(cx);
834 assert_entry_git_state(tree, "a.xml", None, true);
835 assert_entry_git_state(tree, "b.txt", Some(GitFileStatus::Added), false);
836 });
837}
838
839#[gpui::test]
840async fn test_write_file(cx: &mut TestAppContext) {
841 init_test(cx);
842 cx.executor().allow_parking();
843 let dir = temp_tree(json!({
844 ".git": {},
845 ".gitignore": "ignored-dir\n",
846 "tracked-dir": {},
847 "ignored-dir": {}
848 }));
849
850 let tree = Worktree::local(
851 build_client(cx),
852 dir.path(),
853 true,
854 Arc::new(RealFs::default()),
855 Default::default(),
856 &mut cx.to_async(),
857 )
858 .await
859 .unwrap();
860 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
861 .await;
862 tree.flush_fs_events(cx).await;
863
864 tree.update(cx, |tree, cx| {
865 tree.as_local().unwrap().write_file(
866 Path::new("tracked-dir/file.txt"),
867 "hello".into(),
868 Default::default(),
869 cx,
870 )
871 })
872 .await
873 .unwrap();
874 tree.update(cx, |tree, cx| {
875 tree.as_local().unwrap().write_file(
876 Path::new("ignored-dir/file.txt"),
877 "world".into(),
878 Default::default(),
879 cx,
880 )
881 })
882 .await
883 .unwrap();
884
885 tree.read_with(cx, |tree, _| {
886 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
887 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
888 assert!(!tracked.is_ignored);
889 assert!(ignored.is_ignored);
890 });
891}
892
893#[gpui::test]
894async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
895 init_test(cx);
896 cx.executor().allow_parking();
897 let dir = temp_tree(json!({
898 ".gitignore": "**/target\n/node_modules\n",
899 "target": {
900 "index": "blah2"
901 },
902 "node_modules": {
903 ".DS_Store": "",
904 "prettier": {
905 "package.json": "{}",
906 },
907 },
908 "src": {
909 ".DS_Store": "",
910 "foo": {
911 "foo.rs": "mod another;\n",
912 "another.rs": "// another",
913 },
914 "bar": {
915 "bar.rs": "// bar",
916 },
917 "lib.rs": "mod foo;\nmod bar;\n",
918 },
919 ".DS_Store": "",
920 }));
921 cx.update(|cx| {
922 cx.update_global::<SettingsStore, _>(|store, cx| {
923 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
924 project_settings.file_scan_exclusions =
925 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
926 });
927 });
928 });
929
930 let tree = Worktree::local(
931 build_client(cx),
932 dir.path(),
933 true,
934 Arc::new(RealFs::default()),
935 Default::default(),
936 &mut cx.to_async(),
937 )
938 .await
939 .unwrap();
940 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
941 .await;
942 tree.flush_fs_events(cx).await;
943 tree.read_with(cx, |tree, _| {
944 check_worktree_entries(
945 tree,
946 &[
947 "src/foo/foo.rs",
948 "src/foo/another.rs",
949 "node_modules/.DS_Store",
950 "src/.DS_Store",
951 ".DS_Store",
952 ],
953 &["target", "node_modules"],
954 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
955 )
956 });
957
958 cx.update(|cx| {
959 cx.update_global::<SettingsStore, _>(|store, cx| {
960 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
961 project_settings.file_scan_exclusions =
962 Some(vec!["**/node_modules/**".to_string()]);
963 });
964 });
965 });
966 tree.flush_fs_events(cx).await;
967 cx.executor().run_until_parked();
968 tree.read_with(cx, |tree, _| {
969 check_worktree_entries(
970 tree,
971 &[
972 "node_modules/prettier/package.json",
973 "node_modules/.DS_Store",
974 "node_modules",
975 ],
976 &["target"],
977 &[
978 ".gitignore",
979 "src/lib.rs",
980 "src/bar/bar.rs",
981 "src/foo/foo.rs",
982 "src/foo/another.rs",
983 "src/.DS_Store",
984 ".DS_Store",
985 ],
986 )
987 });
988}
989
990#[gpui::test]
991async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
992 init_test(cx);
993 cx.executor().allow_parking();
994 let dir = temp_tree(json!({
995 ".git": {
996 "HEAD": "ref: refs/heads/main\n",
997 "foo": "bar",
998 },
999 ".gitignore": "**/target\n/node_modules\ntest_output\n",
1000 "target": {
1001 "index": "blah2"
1002 },
1003 "node_modules": {
1004 ".DS_Store": "",
1005 "prettier": {
1006 "package.json": "{}",
1007 },
1008 },
1009 "src": {
1010 ".DS_Store": "",
1011 "foo": {
1012 "foo.rs": "mod another;\n",
1013 "another.rs": "// another",
1014 },
1015 "bar": {
1016 "bar.rs": "// bar",
1017 },
1018 "lib.rs": "mod foo;\nmod bar;\n",
1019 },
1020 ".DS_Store": "",
1021 }));
1022 cx.update(|cx| {
1023 cx.update_global::<SettingsStore, _>(|store, cx| {
1024 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
1025 project_settings.file_scan_exclusions = Some(vec![
1026 "**/.git".to_string(),
1027 "node_modules/".to_string(),
1028 "build_output".to_string(),
1029 ]);
1030 });
1031 });
1032 });
1033
1034 let tree = Worktree::local(
1035 build_client(cx),
1036 dir.path(),
1037 true,
1038 Arc::new(RealFs::default()),
1039 Default::default(),
1040 &mut cx.to_async(),
1041 )
1042 .await
1043 .unwrap();
1044 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1045 .await;
1046 tree.flush_fs_events(cx).await;
1047 tree.read_with(cx, |tree, _| {
1048 check_worktree_entries(
1049 tree,
1050 &[
1051 ".git/HEAD",
1052 ".git/foo",
1053 "node_modules",
1054 "node_modules/.DS_Store",
1055 "node_modules/prettier",
1056 "node_modules/prettier/package.json",
1057 ],
1058 &["target"],
1059 &[
1060 ".DS_Store",
1061 "src/.DS_Store",
1062 "src/lib.rs",
1063 "src/foo/foo.rs",
1064 "src/foo/another.rs",
1065 "src/bar/bar.rs",
1066 ".gitignore",
1067 ],
1068 )
1069 });
1070
1071 let new_excluded_dir = dir.path().join("build_output");
1072 let new_ignored_dir = dir.path().join("test_output");
1073 std::fs::create_dir_all(&new_excluded_dir)
1074 .unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
1075 std::fs::create_dir_all(&new_ignored_dir)
1076 .unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
1077 let node_modules_dir = dir.path().join("node_modules");
1078 let dot_git_dir = dir.path().join(".git");
1079 let src_dir = dir.path().join("src");
1080 for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
1081 assert!(
1082 existing_dir.is_dir(),
1083 "Expect {existing_dir:?} to be present in the FS already"
1084 );
1085 }
1086
1087 for directory_for_new_file in [
1088 new_excluded_dir,
1089 new_ignored_dir,
1090 node_modules_dir,
1091 dot_git_dir,
1092 src_dir,
1093 ] {
1094 std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
1095 .unwrap_or_else(|e| {
1096 panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
1097 });
1098 }
1099 tree.flush_fs_events(cx).await;
1100
1101 tree.read_with(cx, |tree, _| {
1102 check_worktree_entries(
1103 tree,
1104 &[
1105 ".git/HEAD",
1106 ".git/foo",
1107 ".git/new_file",
1108 "node_modules",
1109 "node_modules/.DS_Store",
1110 "node_modules/prettier",
1111 "node_modules/prettier/package.json",
1112 "node_modules/new_file",
1113 "build_output",
1114 "build_output/new_file",
1115 "test_output/new_file",
1116 ],
1117 &["target", "test_output"],
1118 &[
1119 ".DS_Store",
1120 "src/.DS_Store",
1121 "src/lib.rs",
1122 "src/foo/foo.rs",
1123 "src/foo/another.rs",
1124 "src/bar/bar.rs",
1125 "src/new_file",
1126 ".gitignore",
1127 ],
1128 )
1129 });
1130}
1131
1132#[gpui::test]
1133async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
1134 init_test(cx);
1135 cx.executor().allow_parking();
1136 let dir = temp_tree(json!({
1137 ".git": {
1138 "HEAD": "ref: refs/heads/main\n",
1139 "foo": "foo contents",
1140 },
1141 }));
1142 let dot_git_worktree_dir = dir.path().join(".git");
1143
1144 let tree = Worktree::local(
1145 build_client(cx),
1146 dot_git_worktree_dir.clone(),
1147 true,
1148 Arc::new(RealFs::default()),
1149 Default::default(),
1150 &mut cx.to_async(),
1151 )
1152 .await
1153 .unwrap();
1154 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1155 .await;
1156 tree.flush_fs_events(cx).await;
1157 tree.read_with(cx, |tree, _| {
1158 check_worktree_entries(tree, &[], &["HEAD", "foo"], &[])
1159 });
1160
1161 std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents")
1162 .unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}"));
1163 tree.flush_fs_events(cx).await;
1164 tree.read_with(cx, |tree, _| {
1165 check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[])
1166 });
1167}
1168
1169#[gpui::test(iterations = 30)]
1170async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
1171 init_test(cx);
1172 let fs = FakeFs::new(cx.background_executor.clone());
1173 fs.insert_tree(
1174 "/root",
1175 json!({
1176 "b": {},
1177 "c": {},
1178 "d": {},
1179 }),
1180 )
1181 .await;
1182
1183 let tree = Worktree::local(
1184 build_client(cx),
1185 "/root".as_ref(),
1186 true,
1187 fs,
1188 Default::default(),
1189 &mut cx.to_async(),
1190 )
1191 .await
1192 .unwrap();
1193
1194 let snapshot1 = tree.update(cx, |tree, cx| {
1195 let tree = tree.as_local_mut().unwrap();
1196 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1197 let _ = tree.observe_updates(0, cx, {
1198 let snapshot = snapshot.clone();
1199 move |update| {
1200 snapshot.lock().apply_remote_update(update).unwrap();
1201 async { true }
1202 }
1203 });
1204 snapshot
1205 });
1206
1207 let entry = tree
1208 .update(cx, |tree, cx| {
1209 tree.as_local_mut()
1210 .unwrap()
1211 .create_entry("a/e".as_ref(), true, cx)
1212 })
1213 .await
1214 .unwrap()
1215 .to_included()
1216 .unwrap();
1217 assert!(entry.is_dir());
1218
1219 cx.executor().run_until_parked();
1220 tree.read_with(cx, |tree, _| {
1221 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
1222 });
1223
1224 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1225 assert_eq!(
1226 snapshot1.lock().entries(true).collect::<Vec<_>>(),
1227 snapshot2.entries(true).collect::<Vec<_>>()
1228 );
1229}
1230
1231#[gpui::test]
1232async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1233 init_test(cx);
1234 cx.executor().allow_parking();
1235 let client_fake = cx.update(|cx| {
1236 Client::new(
1237 Arc::new(FakeSystemClock::default()),
1238 FakeHttpClient::with_404_response(),
1239 cx,
1240 )
1241 });
1242
1243 let fs_fake = FakeFs::new(cx.background_executor.clone());
1244 fs_fake
1245 .insert_tree(
1246 "/root",
1247 json!({
1248 "a": {},
1249 }),
1250 )
1251 .await;
1252
1253 let tree_fake = Worktree::local(
1254 client_fake,
1255 "/root".as_ref(),
1256 true,
1257 fs_fake,
1258 Default::default(),
1259 &mut cx.to_async(),
1260 )
1261 .await
1262 .unwrap();
1263
1264 let entry = tree_fake
1265 .update(cx, |tree, cx| {
1266 tree.as_local_mut()
1267 .unwrap()
1268 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1269 })
1270 .await
1271 .unwrap()
1272 .to_included()
1273 .unwrap();
1274 assert!(entry.is_file());
1275
1276 cx.executor().run_until_parked();
1277 tree_fake.read_with(cx, |tree, _| {
1278 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1279 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1280 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1281 });
1282
1283 let client_real = cx.update(|cx| {
1284 Client::new(
1285 Arc::new(FakeSystemClock::default()),
1286 FakeHttpClient::with_404_response(),
1287 cx,
1288 )
1289 });
1290
1291 let fs_real = Arc::new(RealFs::default());
1292 let temp_root = temp_tree(json!({
1293 "a": {}
1294 }));
1295
1296 let tree_real = Worktree::local(
1297 client_real,
1298 temp_root.path(),
1299 true,
1300 fs_real,
1301 Default::default(),
1302 &mut cx.to_async(),
1303 )
1304 .await
1305 .unwrap();
1306
1307 let entry = tree_real
1308 .update(cx, |tree, cx| {
1309 tree.as_local_mut()
1310 .unwrap()
1311 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1312 })
1313 .await
1314 .unwrap()
1315 .to_included()
1316 .unwrap();
1317 assert!(entry.is_file());
1318
1319 cx.executor().run_until_parked();
1320 tree_real.read_with(cx, |tree, _| {
1321 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1322 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1323 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1324 });
1325
1326 // Test smallest change
1327 let entry = tree_real
1328 .update(cx, |tree, cx| {
1329 tree.as_local_mut()
1330 .unwrap()
1331 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
1332 })
1333 .await
1334 .unwrap()
1335 .to_included()
1336 .unwrap();
1337 assert!(entry.is_file());
1338
1339 cx.executor().run_until_parked();
1340 tree_real.read_with(cx, |tree, _| {
1341 assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
1342 });
1343
1344 // Test largest change
1345 let entry = tree_real
1346 .update(cx, |tree, cx| {
1347 tree.as_local_mut()
1348 .unwrap()
1349 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
1350 })
1351 .await
1352 .unwrap()
1353 .to_included()
1354 .unwrap();
1355 assert!(entry.is_file());
1356
1357 cx.executor().run_until_parked();
1358 tree_real.read_with(cx, |tree, _| {
1359 assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
1360 assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
1361 assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
1362 assert!(tree.entry_for_path("d/").unwrap().is_dir());
1363 });
1364}
1365
1366#[gpui::test(iterations = 100)]
1367async fn test_random_worktree_operations_during_initial_scan(
1368 cx: &mut TestAppContext,
1369 mut rng: StdRng,
1370) {
1371 init_test(cx);
1372 let operations = env::var("OPERATIONS")
1373 .map(|o| o.parse().unwrap())
1374 .unwrap_or(5);
1375 let initial_entries = env::var("INITIAL_ENTRIES")
1376 .map(|o| o.parse().unwrap())
1377 .unwrap_or(20);
1378
1379 let root_dir = Path::new("/test");
1380 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1381 fs.as_fake().insert_tree(root_dir, json!({})).await;
1382 for _ in 0..initial_entries {
1383 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1384 }
1385 log::info!("generated initial tree");
1386
1387 let worktree = Worktree::local(
1388 build_client(cx),
1389 root_dir,
1390 true,
1391 fs.clone(),
1392 Default::default(),
1393 &mut cx.to_async(),
1394 )
1395 .await
1396 .unwrap();
1397
1398 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1399 let updates = Arc::new(Mutex::new(Vec::new()));
1400 worktree.update(cx, |tree, cx| {
1401 check_worktree_change_events(tree, cx);
1402
1403 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1404 let updates = updates.clone();
1405 move |update| {
1406 updates.lock().push(update);
1407 async { true }
1408 }
1409 });
1410 });
1411
1412 for _ in 0..operations {
1413 worktree
1414 .update(cx, |worktree, cx| {
1415 randomly_mutate_worktree(worktree, &mut rng, cx)
1416 })
1417 .await
1418 .log_err();
1419 worktree.read_with(cx, |tree, _| {
1420 tree.as_local().unwrap().snapshot().check_invariants(true)
1421 });
1422
1423 if rng.gen_bool(0.6) {
1424 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1425 }
1426 }
1427
1428 worktree
1429 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1430 .await;
1431
1432 cx.executor().run_until_parked();
1433
1434 let final_snapshot = worktree.read_with(cx, |tree, _| {
1435 let tree = tree.as_local().unwrap();
1436 let snapshot = tree.snapshot();
1437 snapshot.check_invariants(true);
1438 snapshot
1439 });
1440
1441 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1442 let mut updated_snapshot = snapshot.clone();
1443 for update in updates.lock().iter() {
1444 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1445 updated_snapshot
1446 .apply_remote_update(update.clone())
1447 .unwrap();
1448 }
1449 }
1450
1451 assert_eq!(
1452 updated_snapshot.entries(true).collect::<Vec<_>>(),
1453 final_snapshot.entries(true).collect::<Vec<_>>(),
1454 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1455 );
1456 }
1457}
1458
1459#[gpui::test(iterations = 100)]
1460async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1461 init_test(cx);
1462 let operations = env::var("OPERATIONS")
1463 .map(|o| o.parse().unwrap())
1464 .unwrap_or(40);
1465 let initial_entries = env::var("INITIAL_ENTRIES")
1466 .map(|o| o.parse().unwrap())
1467 .unwrap_or(20);
1468
1469 let root_dir = Path::new("/test");
1470 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1471 fs.as_fake().insert_tree(root_dir, json!({})).await;
1472 for _ in 0..initial_entries {
1473 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1474 }
1475 log::info!("generated initial tree");
1476
1477 let worktree = Worktree::local(
1478 build_client(cx),
1479 root_dir,
1480 true,
1481 fs.clone(),
1482 Default::default(),
1483 &mut cx.to_async(),
1484 )
1485 .await
1486 .unwrap();
1487
1488 let updates = Arc::new(Mutex::new(Vec::new()));
1489 worktree.update(cx, |tree, cx| {
1490 check_worktree_change_events(tree, cx);
1491
1492 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1493 let updates = updates.clone();
1494 move |update| {
1495 updates.lock().push(update);
1496 async { true }
1497 }
1498 });
1499 });
1500
1501 worktree
1502 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1503 .await;
1504
1505 fs.as_fake().pause_events();
1506 let mut snapshots = Vec::new();
1507 let mut mutations_len = operations;
1508 while mutations_len > 1 {
1509 if rng.gen_bool(0.2) {
1510 worktree
1511 .update(cx, |worktree, cx| {
1512 randomly_mutate_worktree(worktree, &mut rng, cx)
1513 })
1514 .await
1515 .log_err();
1516 } else {
1517 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1518 }
1519
1520 let buffered_event_count = fs.as_fake().buffered_event_count();
1521 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1522 let len = rng.gen_range(0..=buffered_event_count);
1523 log::info!("flushing {} events", len);
1524 fs.as_fake().flush_events(len);
1525 } else {
1526 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1527 mutations_len -= 1;
1528 }
1529
1530 cx.executor().run_until_parked();
1531 if rng.gen_bool(0.2) {
1532 log::info!("storing snapshot {}", snapshots.len());
1533 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1534 snapshots.push(snapshot);
1535 }
1536 }
1537
1538 log::info!("quiescing");
1539 fs.as_fake().flush_events(usize::MAX);
1540 cx.executor().run_until_parked();
1541
1542 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1543 snapshot.check_invariants(true);
1544 let expanded_paths = snapshot
1545 .expanded_entries()
1546 .map(|e| e.path.clone())
1547 .collect::<Vec<_>>();
1548
1549 {
1550 let new_worktree = Worktree::local(
1551 build_client(cx),
1552 root_dir,
1553 true,
1554 fs.clone(),
1555 Default::default(),
1556 &mut cx.to_async(),
1557 )
1558 .await
1559 .unwrap();
1560 new_worktree
1561 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1562 .await;
1563 new_worktree
1564 .update(cx, |tree, _| {
1565 tree.as_local_mut()
1566 .unwrap()
1567 .refresh_entries_for_paths(expanded_paths)
1568 })
1569 .recv()
1570 .await;
1571 let new_snapshot =
1572 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1573 assert_eq!(
1574 snapshot.entries_without_ids(true),
1575 new_snapshot.entries_without_ids(true)
1576 );
1577 }
1578
1579 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1580 for update in updates.lock().iter() {
1581 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1582 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1583 }
1584 }
1585
1586 assert_eq!(
1587 prev_snapshot
1588 .entries(true)
1589 .map(ignore_pending_dir)
1590 .collect::<Vec<_>>(),
1591 snapshot
1592 .entries(true)
1593 .map(ignore_pending_dir)
1594 .collect::<Vec<_>>(),
1595 "wrong updates after snapshot {i}: {updates:#?}",
1596 );
1597 }
1598
1599 fn ignore_pending_dir(entry: &Entry) -> Entry {
1600 let mut entry = entry.clone();
1601 if entry.kind.is_dir() {
1602 entry.kind = EntryKind::Dir
1603 }
1604 entry
1605 }
1606}
1607
1608// The worktree's `UpdatedEntries` event can be used to follow along with
1609// all changes to the worktree's snapshot.
1610fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1611 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1612 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1613 if let Event::UpdatedEntries(changes) = event {
1614 for (path, _, change_type) in changes.iter() {
1615 let entry = tree.entry_for_path(&path).cloned();
1616 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1617 Ok(ix) | Err(ix) => ix,
1618 };
1619 match change_type {
1620 PathChange::Added => entries.insert(ix, entry.unwrap()),
1621 PathChange::Removed => drop(entries.remove(ix)),
1622 PathChange::Updated => {
1623 let entry = entry.unwrap();
1624 let existing_entry = entries.get_mut(ix).unwrap();
1625 assert_eq!(existing_entry.path, entry.path);
1626 *existing_entry = entry;
1627 }
1628 PathChange::AddedOrUpdated | PathChange::Loaded => {
1629 let entry = entry.unwrap();
1630 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1631 *entries.get_mut(ix).unwrap() = entry;
1632 } else {
1633 entries.insert(ix, entry);
1634 }
1635 }
1636 }
1637 }
1638
1639 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1640 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1641 }
1642 })
1643 .detach();
1644}
1645
1646fn randomly_mutate_worktree(
1647 worktree: &mut Worktree,
1648 rng: &mut impl Rng,
1649 cx: &mut ModelContext<Worktree>,
1650) -> Task<Result<()>> {
1651 log::info!("mutating worktree");
1652 let worktree = worktree.as_local_mut().unwrap();
1653 let snapshot = worktree.snapshot();
1654 let entry = snapshot.entries(false).choose(rng).unwrap();
1655
1656 match rng.gen_range(0_u32..100) {
1657 0..=33 if entry.path.as_ref() != Path::new("") => {
1658 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1659 worktree.delete_entry(entry.id, false, cx).unwrap()
1660 }
1661 ..=66 if entry.path.as_ref() != Path::new("") => {
1662 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1663 let new_parent_path = if other_entry.is_dir() {
1664 other_entry.path.clone()
1665 } else {
1666 other_entry.path.parent().unwrap().into()
1667 };
1668 let mut new_path = new_parent_path.join(random_filename(rng));
1669 if new_path.starts_with(&entry.path) {
1670 new_path = random_filename(rng).into();
1671 }
1672
1673 log::info!(
1674 "renaming entry {:?} ({}) to {:?}",
1675 entry.path,
1676 entry.id.0,
1677 new_path
1678 );
1679 let task = worktree.rename_entry(entry.id, new_path, cx);
1680 cx.background_executor().spawn(async move {
1681 task.await?.to_included().unwrap();
1682 Ok(())
1683 })
1684 }
1685 _ => {
1686 if entry.is_dir() {
1687 let child_path = entry.path.join(random_filename(rng));
1688 let is_dir = rng.gen_bool(0.3);
1689 log::info!(
1690 "creating {} at {:?}",
1691 if is_dir { "dir" } else { "file" },
1692 child_path,
1693 );
1694 let task = worktree.create_entry(child_path, is_dir, cx);
1695 cx.background_executor().spawn(async move {
1696 task.await?;
1697 Ok(())
1698 })
1699 } else {
1700 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1701 let task =
1702 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
1703 cx.background_executor().spawn(async move {
1704 task.await?;
1705 Ok(())
1706 })
1707 }
1708 }
1709 }
1710}
1711
1712async fn randomly_mutate_fs(
1713 fs: &Arc<dyn Fs>,
1714 root_path: &Path,
1715 insertion_probability: f64,
1716 rng: &mut impl Rng,
1717) {
1718 log::info!("mutating fs");
1719 let mut files = Vec::new();
1720 let mut dirs = Vec::new();
1721 for path in fs.as_fake().paths(false) {
1722 if path.starts_with(root_path) {
1723 if fs.is_file(&path).await {
1724 files.push(path);
1725 } else {
1726 dirs.push(path);
1727 }
1728 }
1729 }
1730
1731 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1732 let path = dirs.choose(rng).unwrap();
1733 let new_path = path.join(random_filename(rng));
1734
1735 if rng.gen() {
1736 log::info!(
1737 "creating dir {:?}",
1738 new_path.strip_prefix(root_path).unwrap()
1739 );
1740 fs.create_dir(&new_path).await.unwrap();
1741 } else {
1742 log::info!(
1743 "creating file {:?}",
1744 new_path.strip_prefix(root_path).unwrap()
1745 );
1746 fs.create_file(&new_path, Default::default()).await.unwrap();
1747 }
1748 } else if rng.gen_bool(0.05) {
1749 let ignore_dir_path = dirs.choose(rng).unwrap();
1750 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1751
1752 let subdirs = dirs
1753 .iter()
1754 .filter(|d| d.starts_with(&ignore_dir_path))
1755 .cloned()
1756 .collect::<Vec<_>>();
1757 let subfiles = files
1758 .iter()
1759 .filter(|d| d.starts_with(&ignore_dir_path))
1760 .cloned()
1761 .collect::<Vec<_>>();
1762 let files_to_ignore = {
1763 let len = rng.gen_range(0..=subfiles.len());
1764 subfiles.choose_multiple(rng, len)
1765 };
1766 let dirs_to_ignore = {
1767 let len = rng.gen_range(0..subdirs.len());
1768 subdirs.choose_multiple(rng, len)
1769 };
1770
1771 let mut ignore_contents = String::new();
1772 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1773 writeln!(
1774 ignore_contents,
1775 "{}",
1776 path_to_ignore
1777 .strip_prefix(&ignore_dir_path)
1778 .unwrap()
1779 .to_str()
1780 .unwrap()
1781 )
1782 .unwrap();
1783 }
1784 log::info!(
1785 "creating gitignore {:?} with contents:\n{}",
1786 ignore_path.strip_prefix(&root_path).unwrap(),
1787 ignore_contents
1788 );
1789 fs.save(
1790 &ignore_path,
1791 &ignore_contents.as_str().into(),
1792 Default::default(),
1793 )
1794 .await
1795 .unwrap();
1796 } else {
1797 let old_path = {
1798 let file_path = files.choose(rng);
1799 let dir_path = dirs[1..].choose(rng);
1800 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1801 };
1802
1803 let is_rename = rng.gen();
1804 if is_rename {
1805 let new_path_parent = dirs
1806 .iter()
1807 .filter(|d| !d.starts_with(old_path))
1808 .choose(rng)
1809 .unwrap();
1810
1811 let overwrite_existing_dir =
1812 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1813 let new_path = if overwrite_existing_dir {
1814 fs.remove_dir(
1815 &new_path_parent,
1816 RemoveOptions {
1817 recursive: true,
1818 ignore_if_not_exists: true,
1819 },
1820 )
1821 .await
1822 .unwrap();
1823 new_path_parent.to_path_buf()
1824 } else {
1825 new_path_parent.join(random_filename(rng))
1826 };
1827
1828 log::info!(
1829 "renaming {:?} to {}{:?}",
1830 old_path.strip_prefix(&root_path).unwrap(),
1831 if overwrite_existing_dir {
1832 "overwrite "
1833 } else {
1834 ""
1835 },
1836 new_path.strip_prefix(&root_path).unwrap()
1837 );
1838 fs.rename(
1839 &old_path,
1840 &new_path,
1841 fs::RenameOptions {
1842 overwrite: true,
1843 ignore_if_exists: true,
1844 },
1845 )
1846 .await
1847 .unwrap();
1848 } else if fs.is_file(&old_path).await {
1849 log::info!(
1850 "deleting file {:?}",
1851 old_path.strip_prefix(&root_path).unwrap()
1852 );
1853 fs.remove_file(old_path, Default::default()).await.unwrap();
1854 } else {
1855 log::info!(
1856 "deleting dir {:?}",
1857 old_path.strip_prefix(&root_path).unwrap()
1858 );
1859 fs.remove_dir(
1860 &old_path,
1861 RemoveOptions {
1862 recursive: true,
1863 ignore_if_not_exists: true,
1864 },
1865 )
1866 .await
1867 .unwrap();
1868 }
1869 }
1870}
1871
1872fn random_filename(rng: &mut impl Rng) -> String {
1873 (0..6)
1874 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1875 .map(char::from)
1876 .collect()
1877}
1878
1879#[gpui::test]
1880async fn test_rename_work_directory(cx: &mut TestAppContext) {
1881 init_test(cx);
1882 cx.executor().allow_parking();
1883 let root = temp_tree(json!({
1884 "projects": {
1885 "project1": {
1886 "a": "",
1887 "b": "",
1888 }
1889 },
1890
1891 }));
1892 let root_path = root.path();
1893
1894 let tree = Worktree::local(
1895 build_client(cx),
1896 root_path,
1897 true,
1898 Arc::new(RealFs::default()),
1899 Default::default(),
1900 &mut cx.to_async(),
1901 )
1902 .await
1903 .unwrap();
1904
1905 let repo = git_init(&root_path.join("projects/project1"));
1906 git_add("a", &repo);
1907 git_commit("init", &repo);
1908 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1909
1910 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1911 .await;
1912
1913 tree.flush_fs_events(cx).await;
1914
1915 cx.read(|cx| {
1916 let tree = tree.read(cx);
1917 let (work_dir, _) = tree.repositories().next().unwrap();
1918 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1919 assert_eq!(
1920 tree.status_for_file(Path::new("projects/project1/a")),
1921 Some(GitFileStatus::Modified)
1922 );
1923 assert_eq!(
1924 tree.status_for_file(Path::new("projects/project1/b")),
1925 Some(GitFileStatus::Added)
1926 );
1927 });
1928
1929 std::fs::rename(
1930 root_path.join("projects/project1"),
1931 root_path.join("projects/project2"),
1932 )
1933 .ok();
1934 tree.flush_fs_events(cx).await;
1935
1936 cx.read(|cx| {
1937 let tree = tree.read(cx);
1938 let (work_dir, _) = tree.repositories().next().unwrap();
1939 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1940 assert_eq!(
1941 tree.status_for_file(Path::new("projects/project2/a")),
1942 Some(GitFileStatus::Modified)
1943 );
1944 assert_eq!(
1945 tree.status_for_file(Path::new("projects/project2/b")),
1946 Some(GitFileStatus::Added)
1947 );
1948 });
1949}
1950
1951#[gpui::test]
1952async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1953 init_test(cx);
1954 cx.executor().allow_parking();
1955 let root = temp_tree(json!({
1956 "c.txt": "",
1957 "dir1": {
1958 ".git": {},
1959 "deps": {
1960 "dep1": {
1961 ".git": {},
1962 "src": {
1963 "a.txt": ""
1964 }
1965 }
1966 },
1967 "src": {
1968 "b.txt": ""
1969 }
1970 },
1971 }));
1972
1973 let tree = Worktree::local(
1974 build_client(cx),
1975 root.path(),
1976 true,
1977 Arc::new(RealFs::default()),
1978 Default::default(),
1979 &mut cx.to_async(),
1980 )
1981 .await
1982 .unwrap();
1983
1984 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1985 .await;
1986 tree.flush_fs_events(cx).await;
1987
1988 tree.read_with(cx, |tree, _cx| {
1989 let tree = tree.as_local().unwrap();
1990
1991 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1992
1993 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1994 assert_eq!(
1995 entry
1996 .work_directory(tree)
1997 .map(|directory| directory.as_ref().to_owned()),
1998 Some(Path::new("dir1").to_owned())
1999 );
2000
2001 let entry = tree
2002 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
2003 .unwrap();
2004 assert_eq!(
2005 entry
2006 .work_directory(tree)
2007 .map(|directory| directory.as_ref().to_owned()),
2008 Some(Path::new("dir1/deps/dep1").to_owned())
2009 );
2010
2011 let entries = tree.files(false, 0);
2012
2013 let paths_with_repos = tree
2014 .entries_with_repositories(entries)
2015 .map(|(entry, repo)| {
2016 (
2017 entry.path.as_ref(),
2018 repo.and_then(|repo| {
2019 repo.work_directory(&tree)
2020 .map(|work_directory| work_directory.0.to_path_buf())
2021 }),
2022 )
2023 })
2024 .collect::<Vec<_>>();
2025
2026 assert_eq!(
2027 paths_with_repos,
2028 &[
2029 (Path::new("c.txt"), None),
2030 (
2031 Path::new("dir1/deps/dep1/src/a.txt"),
2032 Some(Path::new("dir1/deps/dep1").into())
2033 ),
2034 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
2035 ]
2036 );
2037 });
2038
2039 let repo_update_events = Arc::new(Mutex::new(vec![]));
2040 tree.update(cx, |_, cx| {
2041 let repo_update_events = repo_update_events.clone();
2042 cx.subscribe(&tree, move |_, _, event, _| {
2043 if let Event::UpdatedGitRepositories(update) = event {
2044 repo_update_events.lock().push(update.clone());
2045 }
2046 })
2047 .detach();
2048 });
2049
2050 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
2051 tree.flush_fs_events(cx).await;
2052
2053 assert_eq!(
2054 repo_update_events.lock()[0]
2055 .iter()
2056 .map(|e| e.0.clone())
2057 .collect::<Vec<Arc<Path>>>(),
2058 vec![Path::new("dir1").into()]
2059 );
2060
2061 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
2062 tree.flush_fs_events(cx).await;
2063
2064 tree.read_with(cx, |tree, _cx| {
2065 let tree = tree.as_local().unwrap();
2066
2067 assert!(tree
2068 .repository_for_path("dir1/src/b.txt".as_ref())
2069 .is_none());
2070 });
2071}
2072
2073#[gpui::test]
2074async fn test_git_status(cx: &mut TestAppContext) {
2075 init_test(cx);
2076 cx.executor().allow_parking();
2077 const IGNORE_RULE: &str = "**/target";
2078
2079 let root = temp_tree(json!({
2080 "project": {
2081 "a.txt": "a",
2082 "b.txt": "bb",
2083 "c": {
2084 "d": {
2085 "e.txt": "eee"
2086 }
2087 },
2088 "f.txt": "ffff",
2089 "target": {
2090 "build_file": "???"
2091 },
2092 ".gitignore": IGNORE_RULE
2093 },
2094
2095 }));
2096
2097 const A_TXT: &str = "a.txt";
2098 const B_TXT: &str = "b.txt";
2099 const E_TXT: &str = "c/d/e.txt";
2100 const F_TXT: &str = "f.txt";
2101 const DOTGITIGNORE: &str = ".gitignore";
2102 const BUILD_FILE: &str = "target/build_file";
2103 let project_path = Path::new("project");
2104
2105 // Set up git repository before creating the worktree.
2106 let work_dir = root.path().join("project");
2107 let mut repo = git_init(work_dir.as_path());
2108 repo.add_ignore_rule(IGNORE_RULE).unwrap();
2109 git_add(A_TXT, &repo);
2110 git_add(E_TXT, &repo);
2111 git_add(DOTGITIGNORE, &repo);
2112 git_commit("Initial commit", &repo);
2113
2114 let tree = Worktree::local(
2115 build_client(cx),
2116 root.path(),
2117 true,
2118 Arc::new(RealFs::default()),
2119 Default::default(),
2120 &mut cx.to_async(),
2121 )
2122 .await
2123 .unwrap();
2124
2125 tree.flush_fs_events(cx).await;
2126 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2127 .await;
2128 cx.executor().run_until_parked();
2129
2130 // Check that the right git state is observed on startup
2131 tree.read_with(cx, |tree, _cx| {
2132 let snapshot = tree.snapshot();
2133 assert_eq!(snapshot.repositories().count(), 1);
2134 let (dir, repo_entry) = snapshot.repositories().next().unwrap();
2135 assert_eq!(dir.as_ref(), Path::new("project"));
2136 assert!(repo_entry.location_in_repo.is_none());
2137
2138 assert_eq!(
2139 snapshot.status_for_file(project_path.join(B_TXT)),
2140 Some(GitFileStatus::Added)
2141 );
2142 assert_eq!(
2143 snapshot.status_for_file(project_path.join(F_TXT)),
2144 Some(GitFileStatus::Added)
2145 );
2146 });
2147
2148 // Modify a file in the working copy.
2149 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
2150 tree.flush_fs_events(cx).await;
2151 cx.executor().run_until_parked();
2152
2153 // The worktree detects that the file's git status has changed.
2154 tree.read_with(cx, |tree, _cx| {
2155 let snapshot = tree.snapshot();
2156 assert_eq!(
2157 snapshot.status_for_file(project_path.join(A_TXT)),
2158 Some(GitFileStatus::Modified)
2159 );
2160 });
2161
2162 // Create a commit in the git repository.
2163 git_add(A_TXT, &repo);
2164 git_add(B_TXT, &repo);
2165 git_commit("Committing modified and added", &repo);
2166 tree.flush_fs_events(cx).await;
2167 cx.executor().run_until_parked();
2168
2169 // The worktree detects that the files' git status have changed.
2170 tree.read_with(cx, |tree, _cx| {
2171 let snapshot = tree.snapshot();
2172 assert_eq!(
2173 snapshot.status_for_file(project_path.join(F_TXT)),
2174 Some(GitFileStatus::Added)
2175 );
2176 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
2177 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
2178 });
2179
2180 // Modify files in the working copy and perform git operations on other files.
2181 git_reset(0, &repo);
2182 git_remove_index(Path::new(B_TXT), &repo);
2183 git_stash(&mut repo);
2184 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
2185 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
2186 tree.flush_fs_events(cx).await;
2187 cx.executor().run_until_parked();
2188
2189 // Check that more complex repo changes are tracked
2190 tree.read_with(cx, |tree, _cx| {
2191 let snapshot = tree.snapshot();
2192
2193 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
2194 assert_eq!(
2195 snapshot.status_for_file(project_path.join(B_TXT)),
2196 Some(GitFileStatus::Added)
2197 );
2198 assert_eq!(
2199 snapshot.status_for_file(project_path.join(E_TXT)),
2200 Some(GitFileStatus::Modified)
2201 );
2202 });
2203
2204 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
2205 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
2206 std::fs::write(
2207 work_dir.join(DOTGITIGNORE),
2208 [IGNORE_RULE, "f.txt"].join("\n"),
2209 )
2210 .unwrap();
2211
2212 git_add(Path::new(DOTGITIGNORE), &repo);
2213 git_commit("Committing modified git ignore", &repo);
2214
2215 tree.flush_fs_events(cx).await;
2216 cx.executor().run_until_parked();
2217
2218 let mut renamed_dir_name = "first_directory/second_directory";
2219 const RENAMED_FILE: &str = "rf.txt";
2220
2221 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
2222 std::fs::write(
2223 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
2224 "new-contents",
2225 )
2226 .unwrap();
2227
2228 tree.flush_fs_events(cx).await;
2229 cx.executor().run_until_parked();
2230
2231 tree.read_with(cx, |tree, _cx| {
2232 let snapshot = tree.snapshot();
2233 assert_eq!(
2234 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
2235 Some(GitFileStatus::Added)
2236 );
2237 });
2238
2239 renamed_dir_name = "new_first_directory/second_directory";
2240
2241 std::fs::rename(
2242 work_dir.join("first_directory"),
2243 work_dir.join("new_first_directory"),
2244 )
2245 .unwrap();
2246
2247 tree.flush_fs_events(cx).await;
2248 cx.executor().run_until_parked();
2249
2250 tree.read_with(cx, |tree, _cx| {
2251 let snapshot = tree.snapshot();
2252
2253 assert_eq!(
2254 snapshot.status_for_file(
2255 project_path
2256 .join(Path::new(renamed_dir_name))
2257 .join(RENAMED_FILE)
2258 ),
2259 Some(GitFileStatus::Added)
2260 );
2261 });
2262}
2263
2264#[gpui::test]
2265async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) {
2266 init_test(cx);
2267 cx.executor().allow_parking();
2268
2269 let root = temp_tree(json!({
2270 "my-repo": {
2271 // .git folder will go here
2272 "a.txt": "a",
2273 "sub-folder-1": {
2274 "sub-folder-2": {
2275 "c.txt": "cc",
2276 "d": {
2277 "e.txt": "eee"
2278 }
2279 },
2280 }
2281 },
2282
2283 }));
2284
2285 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
2286 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
2287
2288 // Set up git repository before creating the worktree.
2289 let git_repo_work_dir = root.path().join("my-repo");
2290 let repo = git_init(git_repo_work_dir.as_path());
2291 git_add(C_TXT, &repo);
2292 git_commit("Initial commit", &repo);
2293
2294 // Open the worktree in subfolder
2295 let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2");
2296 let tree = Worktree::local(
2297 build_client(cx),
2298 root.path().join(project_root),
2299 true,
2300 Arc::new(RealFs::default()),
2301 Default::default(),
2302 &mut cx.to_async(),
2303 )
2304 .await
2305 .unwrap();
2306
2307 tree.flush_fs_events(cx).await;
2308 tree.flush_fs_events_in_root_git_repository(cx).await;
2309 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2310 .await;
2311 cx.executor().run_until_parked();
2312
2313 // Ensure that the git status is loaded correctly
2314 tree.read_with(cx, |tree, _cx| {
2315 let snapshot = tree.snapshot();
2316 assert_eq!(snapshot.repositories().count(), 1);
2317 let (dir, repo_entry) = snapshot.repositories().next().unwrap();
2318 // Path is blank because the working directory of
2319 // the git repository is located at the root of the project
2320 assert_eq!(dir.as_ref(), Path::new(""));
2321
2322 // This is the missing path between the root of the project (sub-folder-2) and its
2323 // location relative to the root of the repository.
2324 assert_eq!(
2325 repo_entry.location_in_repo,
2326 Some(Arc::from(Path::new("sub-folder-1/sub-folder-2")))
2327 );
2328
2329 assert_eq!(snapshot.status_for_file("c.txt"), None);
2330 assert_eq!(
2331 snapshot.status_for_file("d/e.txt"),
2332 Some(GitFileStatus::Added)
2333 );
2334 });
2335
2336 // Now we simulate FS events, but ONLY in the .git folder that's outside
2337 // of out project root.
2338 // Meaning: we don't produce any FS events for files inside the project.
2339 git_add(E_TXT, &repo);
2340 git_commit("Second commit", &repo);
2341 tree.flush_fs_events_in_root_git_repository(cx).await;
2342 cx.executor().run_until_parked();
2343
2344 tree.read_with(cx, |tree, _cx| {
2345 let snapshot = tree.snapshot();
2346
2347 assert!(snapshot.repositories().next().is_some());
2348
2349 assert_eq!(snapshot.status_for_file("c.txt"), None);
2350 assert_eq!(snapshot.status_for_file("d/e.txt"), None);
2351 });
2352}
2353
2354#[gpui::test]
2355async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
2356 init_test(cx);
2357 let fs = FakeFs::new(cx.background_executor.clone());
2358 fs.insert_tree(
2359 "/root",
2360 json!({
2361 ".git": {},
2362 "a": {
2363 "b": {
2364 "c1.txt": "",
2365 "c2.txt": "",
2366 },
2367 "d": {
2368 "e1.txt": "",
2369 "e2.txt": "",
2370 "e3.txt": "",
2371 }
2372 },
2373 "f": {
2374 "no-status.txt": ""
2375 },
2376 "g": {
2377 "h1.txt": "",
2378 "h2.txt": ""
2379 },
2380
2381 }),
2382 )
2383 .await;
2384
2385 fs.set_status_for_repo_via_git_operation(
2386 &Path::new("/root/.git"),
2387 &[
2388 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
2389 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
2390 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
2391 ],
2392 );
2393
2394 let tree = Worktree::local(
2395 build_client(cx),
2396 Path::new("/root"),
2397 true,
2398 fs.clone(),
2399 Default::default(),
2400 &mut cx.to_async(),
2401 )
2402 .await
2403 .unwrap();
2404
2405 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2406 .await;
2407
2408 cx.executor().run_until_parked();
2409 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
2410
2411 check_propagated_statuses(
2412 &snapshot,
2413 &[
2414 (Path::new(""), Some(GitFileStatus::Conflict)),
2415 (Path::new("a"), Some(GitFileStatus::Modified)),
2416 (Path::new("a/b"), Some(GitFileStatus::Added)),
2417 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2418 (Path::new("a/b/c2.txt"), None),
2419 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2420 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2421 (Path::new("f"), None),
2422 (Path::new("f/no-status.txt"), None),
2423 (Path::new("g"), Some(GitFileStatus::Conflict)),
2424 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
2425 ],
2426 );
2427
2428 check_propagated_statuses(
2429 &snapshot,
2430 &[
2431 (Path::new("a/b"), Some(GitFileStatus::Added)),
2432 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2433 (Path::new("a/b/c2.txt"), None),
2434 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2435 (Path::new("a/d/e1.txt"), None),
2436 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2437 (Path::new("f"), None),
2438 (Path::new("f/no-status.txt"), None),
2439 (Path::new("g"), Some(GitFileStatus::Conflict)),
2440 ],
2441 );
2442
2443 check_propagated_statuses(
2444 &snapshot,
2445 &[
2446 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2447 (Path::new("a/b/c2.txt"), None),
2448 (Path::new("a/d/e1.txt"), None),
2449 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2450 (Path::new("f/no-status.txt"), None),
2451 ],
2452 );
2453
2454 #[track_caller]
2455 fn check_propagated_statuses(
2456 snapshot: &Snapshot,
2457 expected_statuses: &[(&Path, Option<GitFileStatus>)],
2458 ) {
2459 let mut entries = expected_statuses
2460 .iter()
2461 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
2462 .collect::<Vec<_>>();
2463 snapshot.propagate_git_statuses(&mut entries);
2464 assert_eq!(
2465 entries
2466 .iter()
2467 .map(|e| (e.path.as_ref(), e.git_status))
2468 .collect::<Vec<_>>(),
2469 expected_statuses
2470 );
2471 }
2472}
2473
2474fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
2475 let clock = Arc::new(FakeSystemClock::default());
2476 let http_client = FakeHttpClient::with_404_response();
2477 cx.update(|cx| Client::new(clock, http_client, cx))
2478}
2479
2480#[track_caller]
2481fn git_init(path: &Path) -> git2::Repository {
2482 git2::Repository::init(path).expect("Failed to initialize git repository")
2483}
2484
2485#[track_caller]
2486fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
2487 let path = path.as_ref();
2488 let mut index = repo.index().expect("Failed to get index");
2489 index.add_path(path).expect("Failed to add a.txt");
2490 index.write().expect("Failed to write index");
2491}
2492
2493#[track_caller]
2494fn git_remove_index(path: &Path, repo: &git2::Repository) {
2495 let mut index = repo.index().expect("Failed to get index");
2496 index.remove_path(path).expect("Failed to add a.txt");
2497 index.write().expect("Failed to write index");
2498}
2499
2500#[track_caller]
2501fn git_commit(msg: &'static str, repo: &git2::Repository) {
2502 use git2::Signature;
2503
2504 let signature = Signature::now("test", "test@zed.dev").unwrap();
2505 let oid = repo.index().unwrap().write_tree().unwrap();
2506 let tree = repo.find_tree(oid).unwrap();
2507 if let Some(head) = repo.head().ok() {
2508 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
2509
2510 let parent_commit = parent_obj.as_commit().unwrap();
2511
2512 repo.commit(
2513 Some("HEAD"),
2514 &signature,
2515 &signature,
2516 msg,
2517 &tree,
2518 &[parent_commit],
2519 )
2520 .expect("Failed to commit with parent");
2521 } else {
2522 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
2523 .expect("Failed to commit");
2524 }
2525}
2526
2527#[track_caller]
2528fn git_stash(repo: &mut git2::Repository) {
2529 use git2::Signature;
2530
2531 let signature = Signature::now("test", "test@zed.dev").unwrap();
2532 repo.stash_save(&signature, "N/A", None)
2533 .expect("Failed to stash");
2534}
2535
2536#[track_caller]
2537fn git_reset(offset: usize, repo: &git2::Repository) {
2538 let head = repo.head().expect("Couldn't get repo head");
2539 let object = head.peel(git2::ObjectType::Commit).unwrap();
2540 let commit = object.as_commit().unwrap();
2541 let new_head = commit
2542 .parents()
2543 .inspect(|parnet| {
2544 parnet.message();
2545 })
2546 .skip(offset)
2547 .next()
2548 .expect("Not enough history");
2549 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2550 .expect("Could not reset");
2551}
2552
2553#[allow(dead_code)]
2554#[track_caller]
2555fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2556 repo.statuses(None)
2557 .unwrap()
2558 .iter()
2559 .map(|status| (status.path().unwrap().to_string(), status.status()))
2560 .collect()
2561}
2562
2563#[track_caller]
2564fn check_worktree_entries(
2565 tree: &Worktree,
2566 expected_excluded_paths: &[&str],
2567 expected_ignored_paths: &[&str],
2568 expected_tracked_paths: &[&str],
2569) {
2570 for path in expected_excluded_paths {
2571 let entry = tree.entry_for_path(path);
2572 assert!(
2573 entry.is_none(),
2574 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2575 );
2576 }
2577 for path in expected_ignored_paths {
2578 let entry = tree
2579 .entry_for_path(path)
2580 .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
2581 assert!(
2582 entry.is_ignored,
2583 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2584 );
2585 }
2586 for path in expected_tracked_paths {
2587 let entry = tree
2588 .entry_for_path(path)
2589 .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
2590 assert!(
2591 !entry.is_ignored,
2592 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2593 );
2594 }
2595}
2596
2597fn init_test(cx: &mut gpui::TestAppContext) {
2598 if std::env::var("RUST_LOG").is_ok() {
2599 env_logger::try_init().ok();
2600 }
2601
2602 cx.update(|cx| {
2603 let settings_store = SettingsStore::test(cx);
2604 cx.set_global(settings_store);
2605 WorktreeSettings::register(cx);
2606 });
2607}
2608
2609fn assert_entry_git_state(
2610 tree: &Worktree,
2611 path: &str,
2612 git_status: Option<GitFileStatus>,
2613 is_ignored: bool,
2614) {
2615 let entry = tree.entry_for_path(path).expect("entry {path} not found");
2616 assert_eq!(entry.git_status, git_status);
2617 assert_eq!(entry.is_ignored, is_ignored);
2618}