1use crate::{
2 worktree_settings::WorktreeSettings, Entry, EntryKind, Event, PathChange, Snapshot, Worktree,
3 WorktreeModelHandle,
4};
5use anyhow::Result;
6use client::Client;
7use clock::FakeSystemClock;
8use fs::{FakeFs, Fs, RealFs, RemoveOptions};
9use git::{repository::GitFileStatus, GITIGNORE};
10use gpui::{BorrowAppContext, ModelContext, Task, TestAppContext};
11use http::FakeHttpClient;
12use parking_lot::Mutex;
13use postage::stream::Stream;
14use pretty_assertions::assert_eq;
15use rand::prelude::*;
16use serde_json::json;
17use settings::{Settings, SettingsStore};
18use std::{env, fmt::Write, mem, path::Path, sync::Arc};
19use util::{test::temp_tree, ResultExt};
20
21#[gpui::test]
22async fn test_traversal(cx: &mut TestAppContext) {
23 init_test(cx);
24 let fs = FakeFs::new(cx.background_executor.clone());
25 fs.insert_tree(
26 "/root",
27 json!({
28 ".gitignore": "a/b\n",
29 "a": {
30 "b": "",
31 "c": "",
32 }
33 }),
34 )
35 .await;
36
37 let tree = Worktree::local(
38 build_client(cx),
39 Path::new("/root"),
40 true,
41 fs,
42 Default::default(),
43 &mut cx.to_async(),
44 )
45 .await
46 .unwrap();
47 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
48 .await;
49
50 tree.read_with(cx, |tree, _| {
51 assert_eq!(
52 tree.entries(false)
53 .map(|entry| entry.path.as_ref())
54 .collect::<Vec<_>>(),
55 vec![
56 Path::new(""),
57 Path::new(".gitignore"),
58 Path::new("a"),
59 Path::new("a/c"),
60 ]
61 );
62 assert_eq!(
63 tree.entries(true)
64 .map(|entry| entry.path.as_ref())
65 .collect::<Vec<_>>(),
66 vec![
67 Path::new(""),
68 Path::new(".gitignore"),
69 Path::new("a"),
70 Path::new("a/b"),
71 Path::new("a/c"),
72 ]
73 );
74 })
75}
76
77#[gpui::test(iterations = 10)]
78async fn test_circular_symlinks(cx: &mut TestAppContext) {
79 init_test(cx);
80 let fs = FakeFs::new(cx.background_executor.clone());
81 fs.insert_tree(
82 "/root",
83 json!({
84 "lib": {
85 "a": {
86 "a.txt": ""
87 },
88 "b": {
89 "b.txt": ""
90 }
91 }
92 }),
93 )
94 .await;
95 fs.create_symlink("/root/lib/a/lib".as_ref(), "..".into())
96 .await
97 .unwrap();
98 fs.create_symlink("/root/lib/b/lib".as_ref(), "..".into())
99 .await
100 .unwrap();
101
102 let tree = Worktree::local(
103 build_client(cx),
104 Path::new("/root"),
105 true,
106 fs.clone(),
107 Default::default(),
108 &mut cx.to_async(),
109 )
110 .await
111 .unwrap();
112
113 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
114 .await;
115
116 tree.read_with(cx, |tree, _| {
117 assert_eq!(
118 tree.entries(false)
119 .map(|entry| entry.path.as_ref())
120 .collect::<Vec<_>>(),
121 vec![
122 Path::new(""),
123 Path::new("lib"),
124 Path::new("lib/a"),
125 Path::new("lib/a/a.txt"),
126 Path::new("lib/a/lib"),
127 Path::new("lib/b"),
128 Path::new("lib/b/b.txt"),
129 Path::new("lib/b/lib"),
130 ]
131 );
132 });
133
134 fs.rename(
135 Path::new("/root/lib/a/lib"),
136 Path::new("/root/lib/a/lib-2"),
137 Default::default(),
138 )
139 .await
140 .unwrap();
141 cx.executor().run_until_parked();
142 tree.read_with(cx, |tree, _| {
143 assert_eq!(
144 tree.entries(false)
145 .map(|entry| entry.path.as_ref())
146 .collect::<Vec<_>>(),
147 vec![
148 Path::new(""),
149 Path::new("lib"),
150 Path::new("lib/a"),
151 Path::new("lib/a/a.txt"),
152 Path::new("lib/a/lib-2"),
153 Path::new("lib/b"),
154 Path::new("lib/b/b.txt"),
155 Path::new("lib/b/lib"),
156 ]
157 );
158 });
159}
160
161#[gpui::test]
162async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
163 init_test(cx);
164 let fs = FakeFs::new(cx.background_executor.clone());
165 fs.insert_tree(
166 "/root",
167 json!({
168 "dir1": {
169 "deps": {
170 // symlinks here
171 },
172 "src": {
173 "a.rs": "",
174 "b.rs": "",
175 },
176 },
177 "dir2": {
178 "src": {
179 "c.rs": "",
180 "d.rs": "",
181 }
182 },
183 "dir3": {
184 "deps": {},
185 "src": {
186 "e.rs": "",
187 "f.rs": "",
188 },
189 }
190 }),
191 )
192 .await;
193
194 // These symlinks point to directories outside of the worktree's root, dir1.
195 fs.create_symlink("/root/dir1/deps/dep-dir2".as_ref(), "../../dir2".into())
196 .await
197 .unwrap();
198 fs.create_symlink("/root/dir1/deps/dep-dir3".as_ref(), "../../dir3".into())
199 .await
200 .unwrap();
201
202 let tree = Worktree::local(
203 build_client(cx),
204 Path::new("/root/dir1"),
205 true,
206 fs.clone(),
207 Default::default(),
208 &mut cx.to_async(),
209 )
210 .await
211 .unwrap();
212
213 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
214 .await;
215
216 let tree_updates = Arc::new(Mutex::new(Vec::new()));
217 tree.update(cx, |_, cx| {
218 let tree_updates = tree_updates.clone();
219 cx.subscribe(&tree, move |_, _, event, _| {
220 if let Event::UpdatedEntries(update) = event {
221 tree_updates.lock().extend(
222 update
223 .iter()
224 .map(|(path, _, change)| (path.clone(), *change)),
225 );
226 }
227 })
228 .detach();
229 });
230
231 // The symlinked directories are not scanned by default.
232 tree.read_with(cx, |tree, _| {
233 assert_eq!(
234 tree.entries(true)
235 .map(|entry| (entry.path.as_ref(), entry.is_external))
236 .collect::<Vec<_>>(),
237 vec![
238 (Path::new(""), false),
239 (Path::new("deps"), false),
240 (Path::new("deps/dep-dir2"), true),
241 (Path::new("deps/dep-dir3"), true),
242 (Path::new("src"), false),
243 (Path::new("src/a.rs"), false),
244 (Path::new("src/b.rs"), false),
245 ]
246 );
247
248 assert_eq!(
249 tree.entry_for_path("deps/dep-dir2").unwrap().kind,
250 EntryKind::UnloadedDir
251 );
252 });
253
254 // Expand one of the symlinked directories.
255 tree.read_with(cx, |tree, _| {
256 tree.as_local()
257 .unwrap()
258 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
259 })
260 .recv()
261 .await;
262
263 // The expanded directory's contents are loaded. Subdirectories are
264 // not scanned yet.
265 tree.read_with(cx, |tree, _| {
266 assert_eq!(
267 tree.entries(true)
268 .map(|entry| (entry.path.as_ref(), entry.is_external))
269 .collect::<Vec<_>>(),
270 vec![
271 (Path::new(""), false),
272 (Path::new("deps"), false),
273 (Path::new("deps/dep-dir2"), true),
274 (Path::new("deps/dep-dir3"), true),
275 (Path::new("deps/dep-dir3/deps"), true),
276 (Path::new("deps/dep-dir3/src"), true),
277 (Path::new("src"), false),
278 (Path::new("src/a.rs"), false),
279 (Path::new("src/b.rs"), false),
280 ]
281 );
282 });
283 assert_eq!(
284 mem::take(&mut *tree_updates.lock()),
285 &[
286 (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
287 (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
288 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
289 ]
290 );
291
292 // Expand a subdirectory of one of the symlinked directories.
293 tree.read_with(cx, |tree, _| {
294 tree.as_local()
295 .unwrap()
296 .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
297 })
298 .recv()
299 .await;
300
301 // The expanded subdirectory's contents are loaded.
302 tree.read_with(cx, |tree, _| {
303 assert_eq!(
304 tree.entries(true)
305 .map(|entry| (entry.path.as_ref(), entry.is_external))
306 .collect::<Vec<_>>(),
307 vec![
308 (Path::new(""), false),
309 (Path::new("deps"), false),
310 (Path::new("deps/dep-dir2"), true),
311 (Path::new("deps/dep-dir3"), true),
312 (Path::new("deps/dep-dir3/deps"), true),
313 (Path::new("deps/dep-dir3/src"), true),
314 (Path::new("deps/dep-dir3/src/e.rs"), true),
315 (Path::new("deps/dep-dir3/src/f.rs"), true),
316 (Path::new("src"), false),
317 (Path::new("src/a.rs"), false),
318 (Path::new("src/b.rs"), false),
319 ]
320 );
321 });
322
323 assert_eq!(
324 mem::take(&mut *tree_updates.lock()),
325 &[
326 (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
327 (
328 Path::new("deps/dep-dir3/src/e.rs").into(),
329 PathChange::Loaded
330 ),
331 (
332 Path::new("deps/dep-dir3/src/f.rs").into(),
333 PathChange::Loaded
334 )
335 ]
336 );
337}
338
339#[cfg(target_os = "macos")]
340#[gpui::test]
341async fn test_renaming_case_only(cx: &mut TestAppContext) {
342 cx.executor().allow_parking();
343 init_test(cx);
344
345 const OLD_NAME: &str = "aaa.rs";
346 const NEW_NAME: &str = "AAA.rs";
347
348 let fs = Arc::new(RealFs::default());
349 let temp_root = temp_tree(json!({
350 OLD_NAME: "",
351 }));
352
353 let tree = Worktree::local(
354 build_client(cx),
355 temp_root.path(),
356 true,
357 fs.clone(),
358 Default::default(),
359 &mut cx.to_async(),
360 )
361 .await
362 .unwrap();
363
364 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
365 .await;
366 tree.read_with(cx, |tree, _| {
367 assert_eq!(
368 tree.entries(true)
369 .map(|entry| entry.path.as_ref())
370 .collect::<Vec<_>>(),
371 vec![Path::new(""), Path::new(OLD_NAME)]
372 );
373 });
374
375 fs.rename(
376 &temp_root.path().join(OLD_NAME),
377 &temp_root.path().join(NEW_NAME),
378 fs::RenameOptions {
379 overwrite: true,
380 ignore_if_exists: true,
381 },
382 )
383 .await
384 .unwrap();
385
386 tree.flush_fs_events(cx).await;
387
388 tree.read_with(cx, |tree, _| {
389 assert_eq!(
390 tree.entries(true)
391 .map(|entry| entry.path.as_ref())
392 .collect::<Vec<_>>(),
393 vec![Path::new(""), Path::new(NEW_NAME)]
394 );
395 });
396}
397
398#[gpui::test]
399async fn test_open_gitignored_files(cx: &mut TestAppContext) {
400 init_test(cx);
401 let fs = FakeFs::new(cx.background_executor.clone());
402 fs.insert_tree(
403 "/root",
404 json!({
405 ".gitignore": "node_modules\n",
406 "one": {
407 "node_modules": {
408 "a": {
409 "a1.js": "a1",
410 "a2.js": "a2",
411 },
412 "b": {
413 "b1.js": "b1",
414 "b2.js": "b2",
415 },
416 "c": {
417 "c1.js": "c1",
418 "c2.js": "c2",
419 }
420 },
421 },
422 "two": {
423 "x.js": "",
424 "y.js": "",
425 },
426 }),
427 )
428 .await;
429
430 let tree = Worktree::local(
431 build_client(cx),
432 Path::new("/root"),
433 true,
434 fs.clone(),
435 Default::default(),
436 &mut cx.to_async(),
437 )
438 .await
439 .unwrap();
440
441 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
442 .await;
443
444 tree.read_with(cx, |tree, _| {
445 assert_eq!(
446 tree.entries(true)
447 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
448 .collect::<Vec<_>>(),
449 vec![
450 (Path::new(""), false),
451 (Path::new(".gitignore"), false),
452 (Path::new("one"), false),
453 (Path::new("one/node_modules"), true),
454 (Path::new("two"), false),
455 (Path::new("two/x.js"), false),
456 (Path::new("two/y.js"), false),
457 ]
458 );
459 });
460
461 // Open a file that is nested inside of a gitignored directory that
462 // has not yet been expanded.
463 let prev_read_dir_count = fs.read_dir_call_count();
464 let buffer = tree
465 .update(cx, |tree, cx| {
466 tree.as_local_mut()
467 .unwrap()
468 .load_buffer("one/node_modules/b/b1.js".as_ref(), cx)
469 })
470 .await
471 .unwrap();
472
473 tree.read_with(cx, |tree, cx| {
474 assert_eq!(
475 tree.entries(true)
476 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
477 .collect::<Vec<_>>(),
478 vec![
479 (Path::new(""), false),
480 (Path::new(".gitignore"), false),
481 (Path::new("one"), false),
482 (Path::new("one/node_modules"), true),
483 (Path::new("one/node_modules/a"), true),
484 (Path::new("one/node_modules/b"), true),
485 (Path::new("one/node_modules/b/b1.js"), true),
486 (Path::new("one/node_modules/b/b2.js"), true),
487 (Path::new("one/node_modules/c"), true),
488 (Path::new("two"), false),
489 (Path::new("two/x.js"), false),
490 (Path::new("two/y.js"), false),
491 ]
492 );
493
494 assert_eq!(
495 buffer.read(cx).file().unwrap().path().as_ref(),
496 Path::new("one/node_modules/b/b1.js")
497 );
498
499 // Only the newly-expanded directories are scanned.
500 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
501 });
502
503 // Open another file in a different subdirectory of the same
504 // gitignored directory.
505 let prev_read_dir_count = fs.read_dir_call_count();
506 let buffer = tree
507 .update(cx, |tree, cx| {
508 tree.as_local_mut()
509 .unwrap()
510 .load_buffer("one/node_modules/a/a2.js".as_ref(), cx)
511 })
512 .await
513 .unwrap();
514
515 tree.read_with(cx, |tree, cx| {
516 assert_eq!(
517 tree.entries(true)
518 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
519 .collect::<Vec<_>>(),
520 vec![
521 (Path::new(""), false),
522 (Path::new(".gitignore"), false),
523 (Path::new("one"), false),
524 (Path::new("one/node_modules"), true),
525 (Path::new("one/node_modules/a"), true),
526 (Path::new("one/node_modules/a/a1.js"), true),
527 (Path::new("one/node_modules/a/a2.js"), true),
528 (Path::new("one/node_modules/b"), true),
529 (Path::new("one/node_modules/b/b1.js"), true),
530 (Path::new("one/node_modules/b/b2.js"), true),
531 (Path::new("one/node_modules/c"), true),
532 (Path::new("two"), false),
533 (Path::new("two/x.js"), false),
534 (Path::new("two/y.js"), false),
535 ]
536 );
537
538 assert_eq!(
539 buffer.read(cx).file().unwrap().path().as_ref(),
540 Path::new("one/node_modules/a/a2.js")
541 );
542
543 // Only the newly-expanded directory is scanned.
544 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
545 });
546
547 // No work happens when files and directories change within an unloaded directory.
548 let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
549 fs.create_dir("/root/one/node_modules/c/lib".as_ref())
550 .await
551 .unwrap();
552 cx.executor().run_until_parked();
553 assert_eq!(
554 fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
555 0
556 );
557}
558
559#[gpui::test]
560async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
561 init_test(cx);
562 let fs = FakeFs::new(cx.background_executor.clone());
563 fs.insert_tree(
564 "/root",
565 json!({
566 ".gitignore": "node_modules\n",
567 "a": {
568 "a.js": "",
569 },
570 "b": {
571 "b.js": "",
572 },
573 "node_modules": {
574 "c": {
575 "c.js": "",
576 },
577 "d": {
578 "d.js": "",
579 "e": {
580 "e1.js": "",
581 "e2.js": "",
582 },
583 "f": {
584 "f1.js": "",
585 "f2.js": "",
586 }
587 },
588 },
589 }),
590 )
591 .await;
592
593 let tree = Worktree::local(
594 build_client(cx),
595 Path::new("/root"),
596 true,
597 fs.clone(),
598 Default::default(),
599 &mut cx.to_async(),
600 )
601 .await
602 .unwrap();
603
604 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
605 .await;
606
607 // Open a file within the gitignored directory, forcing some of its
608 // subdirectories to be read, but not all.
609 let read_dir_count_1 = fs.read_dir_call_count();
610 tree.read_with(cx, |tree, _| {
611 tree.as_local()
612 .unwrap()
613 .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
614 })
615 .recv()
616 .await;
617
618 // Those subdirectories are now loaded.
619 tree.read_with(cx, |tree, _| {
620 assert_eq!(
621 tree.entries(true)
622 .map(|e| (e.path.as_ref(), e.is_ignored))
623 .collect::<Vec<_>>(),
624 &[
625 (Path::new(""), false),
626 (Path::new(".gitignore"), false),
627 (Path::new("a"), false),
628 (Path::new("a/a.js"), false),
629 (Path::new("b"), false),
630 (Path::new("b/b.js"), false),
631 (Path::new("node_modules"), true),
632 (Path::new("node_modules/c"), true),
633 (Path::new("node_modules/d"), true),
634 (Path::new("node_modules/d/d.js"), true),
635 (Path::new("node_modules/d/e"), true),
636 (Path::new("node_modules/d/f"), true),
637 ]
638 );
639 });
640 let read_dir_count_2 = fs.read_dir_call_count();
641 assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
642
643 // Update the gitignore so that node_modules is no longer ignored,
644 // but a subdirectory is ignored
645 fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
646 .await
647 .unwrap();
648 cx.executor().run_until_parked();
649
650 // All of the directories that are no longer ignored are now loaded.
651 tree.read_with(cx, |tree, _| {
652 assert_eq!(
653 tree.entries(true)
654 .map(|e| (e.path.as_ref(), e.is_ignored))
655 .collect::<Vec<_>>(),
656 &[
657 (Path::new(""), false),
658 (Path::new(".gitignore"), false),
659 (Path::new("a"), false),
660 (Path::new("a/a.js"), false),
661 (Path::new("b"), false),
662 (Path::new("b/b.js"), false),
663 // This directory is no longer ignored
664 (Path::new("node_modules"), false),
665 (Path::new("node_modules/c"), false),
666 (Path::new("node_modules/c/c.js"), false),
667 (Path::new("node_modules/d"), false),
668 (Path::new("node_modules/d/d.js"), false),
669 // This subdirectory is now ignored
670 (Path::new("node_modules/d/e"), true),
671 (Path::new("node_modules/d/f"), false),
672 (Path::new("node_modules/d/f/f1.js"), false),
673 (Path::new("node_modules/d/f/f2.js"), false),
674 ]
675 );
676 });
677
678 // Each of the newly-loaded directories is scanned only once.
679 let read_dir_count_3 = fs.read_dir_call_count();
680 assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
681}
682
683#[gpui::test(iterations = 10)]
684async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
685 init_test(cx);
686 cx.update(|cx| {
687 cx.update_global::<SettingsStore, _>(|store, cx| {
688 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
689 project_settings.file_scan_exclusions = Some(Vec::new());
690 });
691 });
692 });
693 let fs = FakeFs::new(cx.background_executor.clone());
694 fs.insert_tree(
695 "/root",
696 json!({
697 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
698 "tree": {
699 ".git": {},
700 ".gitignore": "ignored-dir\n",
701 "tracked-dir": {
702 "tracked-file1": "",
703 "ancestor-ignored-file1": "",
704 },
705 "ignored-dir": {
706 "ignored-file1": ""
707 }
708 }
709 }),
710 )
711 .await;
712
713 let tree = Worktree::local(
714 build_client(cx),
715 "/root/tree".as_ref(),
716 true,
717 fs.clone(),
718 Default::default(),
719 &mut cx.to_async(),
720 )
721 .await
722 .unwrap();
723 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
724 .await;
725
726 tree.read_with(cx, |tree, _| {
727 tree.as_local()
728 .unwrap()
729 .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
730 })
731 .recv()
732 .await;
733
734 cx.read(|cx| {
735 let tree = tree.read(cx);
736 assert_entry_git_state(tree, "tracked-dir/tracked-file1", None, false);
737 assert_entry_git_state(tree, "tracked-dir/ancestor-ignored-file1", None, true);
738 assert_entry_git_state(tree, "ignored-dir/ignored-file1", None, true);
739 });
740
741 fs.set_status_for_repo_via_working_copy_change(
742 &Path::new("/root/tree/.git"),
743 &[(Path::new("tracked-dir/tracked-file2"), GitFileStatus::Added)],
744 );
745
746 fs.create_file(
747 "/root/tree/tracked-dir/tracked-file2".as_ref(),
748 Default::default(),
749 )
750 .await
751 .unwrap();
752 fs.create_file(
753 "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
754 Default::default(),
755 )
756 .await
757 .unwrap();
758 fs.create_file(
759 "/root/tree/ignored-dir/ignored-file2".as_ref(),
760 Default::default(),
761 )
762 .await
763 .unwrap();
764
765 cx.executor().run_until_parked();
766 cx.read(|cx| {
767 let tree = tree.read(cx);
768 assert_entry_git_state(
769 tree,
770 "tracked-dir/tracked-file2",
771 Some(GitFileStatus::Added),
772 false,
773 );
774 assert_entry_git_state(tree, "tracked-dir/ancestor-ignored-file2", None, true);
775 assert_entry_git_state(tree, "ignored-dir/ignored-file2", None, true);
776 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
777 });
778}
779
780#[gpui::test]
781async fn test_update_gitignore(cx: &mut TestAppContext) {
782 init_test(cx);
783 let fs = FakeFs::new(cx.background_executor.clone());
784 fs.insert_tree(
785 "/root",
786 json!({
787 ".git": {},
788 ".gitignore": "*.txt\n",
789 "a.xml": "<a></a>",
790 "b.txt": "Some text"
791 }),
792 )
793 .await;
794
795 let tree = Worktree::local(
796 build_client(cx),
797 "/root".as_ref(),
798 true,
799 fs.clone(),
800 Default::default(),
801 &mut cx.to_async(),
802 )
803 .await
804 .unwrap();
805 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
806 .await;
807
808 tree.read_with(cx, |tree, _| {
809 tree.as_local()
810 .unwrap()
811 .refresh_entries_for_paths(vec![Path::new("").into()])
812 })
813 .recv()
814 .await;
815
816 cx.read(|cx| {
817 let tree = tree.read(cx);
818 assert_entry_git_state(tree, "a.xml", None, false);
819 assert_entry_git_state(tree, "b.txt", None, true);
820 });
821
822 fs.atomic_write("/root/.gitignore".into(), "*.xml".into())
823 .await
824 .unwrap();
825
826 fs.set_status_for_repo_via_working_copy_change(
827 &Path::new("/root/.git"),
828 &[(Path::new("b.txt"), GitFileStatus::Added)],
829 );
830
831 cx.executor().run_until_parked();
832 cx.read(|cx| {
833 let tree = tree.read(cx);
834 assert_entry_git_state(tree, "a.xml", None, true);
835 assert_entry_git_state(tree, "b.txt", Some(GitFileStatus::Added), false);
836 });
837}
838
839#[gpui::test]
840async fn test_write_file(cx: &mut TestAppContext) {
841 init_test(cx);
842 cx.executor().allow_parking();
843 let dir = temp_tree(json!({
844 ".git": {},
845 ".gitignore": "ignored-dir\n",
846 "tracked-dir": {},
847 "ignored-dir": {}
848 }));
849
850 let tree = Worktree::local(
851 build_client(cx),
852 dir.path(),
853 true,
854 Arc::new(RealFs::default()),
855 Default::default(),
856 &mut cx.to_async(),
857 )
858 .await
859 .unwrap();
860 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
861 .await;
862 tree.flush_fs_events(cx).await;
863
864 tree.update(cx, |tree, cx| {
865 tree.as_local().unwrap().write_file(
866 Path::new("tracked-dir/file.txt"),
867 "hello".into(),
868 Default::default(),
869 cx,
870 )
871 })
872 .await
873 .unwrap();
874 tree.update(cx, |tree, cx| {
875 tree.as_local().unwrap().write_file(
876 Path::new("ignored-dir/file.txt"),
877 "world".into(),
878 Default::default(),
879 cx,
880 )
881 })
882 .await
883 .unwrap();
884
885 tree.read_with(cx, |tree, _| {
886 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
887 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
888 assert!(!tracked.is_ignored);
889 assert!(ignored.is_ignored);
890 });
891}
892
893#[gpui::test]
894async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
895 init_test(cx);
896 cx.executor().allow_parking();
897 let dir = temp_tree(json!({
898 ".gitignore": "**/target\n/node_modules\n",
899 "target": {
900 "index": "blah2"
901 },
902 "node_modules": {
903 ".DS_Store": "",
904 "prettier": {
905 "package.json": "{}",
906 },
907 },
908 "src": {
909 ".DS_Store": "",
910 "foo": {
911 "foo.rs": "mod another;\n",
912 "another.rs": "// another",
913 },
914 "bar": {
915 "bar.rs": "// bar",
916 },
917 "lib.rs": "mod foo;\nmod bar;\n",
918 },
919 ".DS_Store": "",
920 }));
921 cx.update(|cx| {
922 cx.update_global::<SettingsStore, _>(|store, cx| {
923 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
924 project_settings.file_scan_exclusions =
925 Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
926 });
927 });
928 });
929
930 let tree = Worktree::local(
931 build_client(cx),
932 dir.path(),
933 true,
934 Arc::new(RealFs::default()),
935 Default::default(),
936 &mut cx.to_async(),
937 )
938 .await
939 .unwrap();
940 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
941 .await;
942 tree.flush_fs_events(cx).await;
943 tree.read_with(cx, |tree, _| {
944 check_worktree_entries(
945 tree,
946 &[
947 "src/foo/foo.rs",
948 "src/foo/another.rs",
949 "node_modules/.DS_Store",
950 "src/.DS_Store",
951 ".DS_Store",
952 ],
953 &["target", "node_modules"],
954 &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
955 )
956 });
957
958 cx.update(|cx| {
959 cx.update_global::<SettingsStore, _>(|store, cx| {
960 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
961 project_settings.file_scan_exclusions =
962 Some(vec!["**/node_modules/**".to_string()]);
963 });
964 });
965 });
966 tree.flush_fs_events(cx).await;
967 cx.executor().run_until_parked();
968 tree.read_with(cx, |tree, _| {
969 check_worktree_entries(
970 tree,
971 &[
972 "node_modules/prettier/package.json",
973 "node_modules/.DS_Store",
974 "node_modules",
975 ],
976 &["target"],
977 &[
978 ".gitignore",
979 "src/lib.rs",
980 "src/bar/bar.rs",
981 "src/foo/foo.rs",
982 "src/foo/another.rs",
983 "src/.DS_Store",
984 ".DS_Store",
985 ],
986 )
987 });
988}
989
990#[gpui::test]
991async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
992 init_test(cx);
993 cx.executor().allow_parking();
994 let dir = temp_tree(json!({
995 ".git": {
996 "HEAD": "ref: refs/heads/main\n",
997 "foo": "bar",
998 },
999 ".gitignore": "**/target\n/node_modules\ntest_output\n",
1000 "target": {
1001 "index": "blah2"
1002 },
1003 "node_modules": {
1004 ".DS_Store": "",
1005 "prettier": {
1006 "package.json": "{}",
1007 },
1008 },
1009 "src": {
1010 ".DS_Store": "",
1011 "foo": {
1012 "foo.rs": "mod another;\n",
1013 "another.rs": "// another",
1014 },
1015 "bar": {
1016 "bar.rs": "// bar",
1017 },
1018 "lib.rs": "mod foo;\nmod bar;\n",
1019 },
1020 ".DS_Store": "",
1021 }));
1022 cx.update(|cx| {
1023 cx.update_global::<SettingsStore, _>(|store, cx| {
1024 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
1025 project_settings.file_scan_exclusions = Some(vec![
1026 "**/.git".to_string(),
1027 "node_modules/".to_string(),
1028 "build_output".to_string(),
1029 ]);
1030 });
1031 });
1032 });
1033
1034 let tree = Worktree::local(
1035 build_client(cx),
1036 dir.path(),
1037 true,
1038 Arc::new(RealFs::default()),
1039 Default::default(),
1040 &mut cx.to_async(),
1041 )
1042 .await
1043 .unwrap();
1044 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1045 .await;
1046 tree.flush_fs_events(cx).await;
1047 tree.read_with(cx, |tree, _| {
1048 check_worktree_entries(
1049 tree,
1050 &[
1051 ".git/HEAD",
1052 ".git/foo",
1053 "node_modules",
1054 "node_modules/.DS_Store",
1055 "node_modules/prettier",
1056 "node_modules/prettier/package.json",
1057 ],
1058 &["target"],
1059 &[
1060 ".DS_Store",
1061 "src/.DS_Store",
1062 "src/lib.rs",
1063 "src/foo/foo.rs",
1064 "src/foo/another.rs",
1065 "src/bar/bar.rs",
1066 ".gitignore",
1067 ],
1068 )
1069 });
1070
1071 let new_excluded_dir = dir.path().join("build_output");
1072 let new_ignored_dir = dir.path().join("test_output");
1073 std::fs::create_dir_all(&new_excluded_dir)
1074 .unwrap_or_else(|e| panic!("Failed to create a {new_excluded_dir:?} directory: {e}"));
1075 std::fs::create_dir_all(&new_ignored_dir)
1076 .unwrap_or_else(|e| panic!("Failed to create a {new_ignored_dir:?} directory: {e}"));
1077 let node_modules_dir = dir.path().join("node_modules");
1078 let dot_git_dir = dir.path().join(".git");
1079 let src_dir = dir.path().join("src");
1080 for existing_dir in [&node_modules_dir, &dot_git_dir, &src_dir] {
1081 assert!(
1082 existing_dir.is_dir(),
1083 "Expect {existing_dir:?} to be present in the FS already"
1084 );
1085 }
1086
1087 for directory_for_new_file in [
1088 new_excluded_dir,
1089 new_ignored_dir,
1090 node_modules_dir,
1091 dot_git_dir,
1092 src_dir,
1093 ] {
1094 std::fs::write(directory_for_new_file.join("new_file"), "new file contents")
1095 .unwrap_or_else(|e| {
1096 panic!("Failed to create in {directory_for_new_file:?} a new file: {e}")
1097 });
1098 }
1099 tree.flush_fs_events(cx).await;
1100
1101 tree.read_with(cx, |tree, _| {
1102 check_worktree_entries(
1103 tree,
1104 &[
1105 ".git/HEAD",
1106 ".git/foo",
1107 ".git/new_file",
1108 "node_modules",
1109 "node_modules/.DS_Store",
1110 "node_modules/prettier",
1111 "node_modules/prettier/package.json",
1112 "node_modules/new_file",
1113 "build_output",
1114 "build_output/new_file",
1115 "test_output/new_file",
1116 ],
1117 &["target", "test_output"],
1118 &[
1119 ".DS_Store",
1120 "src/.DS_Store",
1121 "src/lib.rs",
1122 "src/foo/foo.rs",
1123 "src/foo/another.rs",
1124 "src/bar/bar.rs",
1125 "src/new_file",
1126 ".gitignore",
1127 ],
1128 )
1129 });
1130}
1131
1132#[gpui::test]
1133async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
1134 init_test(cx);
1135 cx.executor().allow_parking();
1136 let dir = temp_tree(json!({
1137 ".git": {
1138 "HEAD": "ref: refs/heads/main\n",
1139 "foo": "foo contents",
1140 },
1141 }));
1142 let dot_git_worktree_dir = dir.path().join(".git");
1143
1144 let tree = Worktree::local(
1145 build_client(cx),
1146 dot_git_worktree_dir.clone(),
1147 true,
1148 Arc::new(RealFs::default()),
1149 Default::default(),
1150 &mut cx.to_async(),
1151 )
1152 .await
1153 .unwrap();
1154 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1155 .await;
1156 tree.flush_fs_events(cx).await;
1157 tree.read_with(cx, |tree, _| {
1158 check_worktree_entries(tree, &[], &["HEAD", "foo"], &[])
1159 });
1160
1161 std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents")
1162 .unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}"));
1163 tree.flush_fs_events(cx).await;
1164 tree.read_with(cx, |tree, _| {
1165 check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[])
1166 });
1167}
1168
1169#[gpui::test(iterations = 30)]
1170async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
1171 init_test(cx);
1172 let fs = FakeFs::new(cx.background_executor.clone());
1173 fs.insert_tree(
1174 "/root",
1175 json!({
1176 "b": {},
1177 "c": {},
1178 "d": {},
1179 }),
1180 )
1181 .await;
1182
1183 let tree = Worktree::local(
1184 build_client(cx),
1185 "/root".as_ref(),
1186 true,
1187 fs,
1188 Default::default(),
1189 &mut cx.to_async(),
1190 )
1191 .await
1192 .unwrap();
1193
1194 let snapshot1 = tree.update(cx, |tree, cx| {
1195 let tree = tree.as_local_mut().unwrap();
1196 let snapshot = Arc::new(Mutex::new(tree.snapshot()));
1197 let _ = tree.observe_updates(0, cx, {
1198 let snapshot = snapshot.clone();
1199 move |update| {
1200 snapshot.lock().apply_remote_update(update).unwrap();
1201 async { true }
1202 }
1203 });
1204 snapshot
1205 });
1206
1207 let entry = tree
1208 .update(cx, |tree, cx| {
1209 tree.as_local_mut()
1210 .unwrap()
1211 .create_entry("a/e".as_ref(), true, cx)
1212 })
1213 .await
1214 .unwrap()
1215 .unwrap();
1216 assert!(entry.is_dir());
1217
1218 cx.executor().run_until_parked();
1219 tree.read_with(cx, |tree, _| {
1220 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
1221 });
1222
1223 let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
1224 assert_eq!(
1225 snapshot1.lock().entries(true).collect::<Vec<_>>(),
1226 snapshot2.entries(true).collect::<Vec<_>>()
1227 );
1228}
1229
1230#[gpui::test]
1231async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
1232 init_test(cx);
1233 cx.executor().allow_parking();
1234 let client_fake = cx.update(|cx| {
1235 Client::new(
1236 Arc::new(FakeSystemClock::default()),
1237 FakeHttpClient::with_404_response(),
1238 cx,
1239 )
1240 });
1241
1242 let fs_fake = FakeFs::new(cx.background_executor.clone());
1243 fs_fake
1244 .insert_tree(
1245 "/root",
1246 json!({
1247 "a": {},
1248 }),
1249 )
1250 .await;
1251
1252 let tree_fake = Worktree::local(
1253 client_fake,
1254 "/root".as_ref(),
1255 true,
1256 fs_fake,
1257 Default::default(),
1258 &mut cx.to_async(),
1259 )
1260 .await
1261 .unwrap();
1262
1263 let entry = tree_fake
1264 .update(cx, |tree, cx| {
1265 tree.as_local_mut()
1266 .unwrap()
1267 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1268 })
1269 .await
1270 .unwrap()
1271 .unwrap();
1272 assert!(entry.is_file());
1273
1274 cx.executor().run_until_parked();
1275 tree_fake.read_with(cx, |tree, _| {
1276 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1277 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1278 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1279 });
1280
1281 let client_real = cx.update(|cx| {
1282 Client::new(
1283 Arc::new(FakeSystemClock::default()),
1284 FakeHttpClient::with_404_response(),
1285 cx,
1286 )
1287 });
1288
1289 let fs_real = Arc::new(RealFs::default());
1290 let temp_root = temp_tree(json!({
1291 "a": {}
1292 }));
1293
1294 let tree_real = Worktree::local(
1295 client_real,
1296 temp_root.path(),
1297 true,
1298 fs_real,
1299 Default::default(),
1300 &mut cx.to_async(),
1301 )
1302 .await
1303 .unwrap();
1304
1305 let entry = tree_real
1306 .update(cx, |tree, cx| {
1307 tree.as_local_mut()
1308 .unwrap()
1309 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
1310 })
1311 .await
1312 .unwrap()
1313 .unwrap();
1314 assert!(entry.is_file());
1315
1316 cx.executor().run_until_parked();
1317 tree_real.read_with(cx, |tree, _| {
1318 assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
1319 assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
1320 assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
1321 });
1322
1323 // Test smallest change
1324 let entry = tree_real
1325 .update(cx, |tree, cx| {
1326 tree.as_local_mut()
1327 .unwrap()
1328 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
1329 })
1330 .await
1331 .unwrap()
1332 .unwrap();
1333 assert!(entry.is_file());
1334
1335 cx.executor().run_until_parked();
1336 tree_real.read_with(cx, |tree, _| {
1337 assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
1338 });
1339
1340 // Test largest change
1341 let entry = tree_real
1342 .update(cx, |tree, cx| {
1343 tree.as_local_mut()
1344 .unwrap()
1345 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
1346 })
1347 .await
1348 .unwrap()
1349 .unwrap();
1350 assert!(entry.is_file());
1351
1352 cx.executor().run_until_parked();
1353 tree_real.read_with(cx, |tree, _| {
1354 assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
1355 assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
1356 assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
1357 assert!(tree.entry_for_path("d/").unwrap().is_dir());
1358 });
1359}
1360
1361#[gpui::test(iterations = 100)]
1362async fn test_random_worktree_operations_during_initial_scan(
1363 cx: &mut TestAppContext,
1364 mut rng: StdRng,
1365) {
1366 init_test(cx);
1367 let operations = env::var("OPERATIONS")
1368 .map(|o| o.parse().unwrap())
1369 .unwrap_or(5);
1370 let initial_entries = env::var("INITIAL_ENTRIES")
1371 .map(|o| o.parse().unwrap())
1372 .unwrap_or(20);
1373
1374 let root_dir = Path::new("/test");
1375 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1376 fs.as_fake().insert_tree(root_dir, json!({})).await;
1377 for _ in 0..initial_entries {
1378 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1379 }
1380 log::info!("generated initial tree");
1381
1382 let worktree = Worktree::local(
1383 build_client(cx),
1384 root_dir,
1385 true,
1386 fs.clone(),
1387 Default::default(),
1388 &mut cx.to_async(),
1389 )
1390 .await
1391 .unwrap();
1392
1393 let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
1394 let updates = Arc::new(Mutex::new(Vec::new()));
1395 worktree.update(cx, |tree, cx| {
1396 check_worktree_change_events(tree, cx);
1397
1398 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1399 let updates = updates.clone();
1400 move |update| {
1401 updates.lock().push(update);
1402 async { true }
1403 }
1404 });
1405 });
1406
1407 for _ in 0..operations {
1408 worktree
1409 .update(cx, |worktree, cx| {
1410 randomly_mutate_worktree(worktree, &mut rng, cx)
1411 })
1412 .await
1413 .log_err();
1414 worktree.read_with(cx, |tree, _| {
1415 tree.as_local().unwrap().snapshot().check_invariants(true)
1416 });
1417
1418 if rng.gen_bool(0.6) {
1419 snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
1420 }
1421 }
1422
1423 worktree
1424 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1425 .await;
1426
1427 cx.executor().run_until_parked();
1428
1429 let final_snapshot = worktree.read_with(cx, |tree, _| {
1430 let tree = tree.as_local().unwrap();
1431 let snapshot = tree.snapshot();
1432 snapshot.check_invariants(true);
1433 snapshot
1434 });
1435
1436 for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
1437 let mut updated_snapshot = snapshot.clone();
1438 for update in updates.lock().iter() {
1439 if update.scan_id >= updated_snapshot.scan_id() as u64 {
1440 updated_snapshot
1441 .apply_remote_update(update.clone())
1442 .unwrap();
1443 }
1444 }
1445
1446 assert_eq!(
1447 updated_snapshot.entries(true).collect::<Vec<_>>(),
1448 final_snapshot.entries(true).collect::<Vec<_>>(),
1449 "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
1450 );
1451 }
1452}
1453
1454#[gpui::test(iterations = 100)]
1455async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
1456 init_test(cx);
1457 let operations = env::var("OPERATIONS")
1458 .map(|o| o.parse().unwrap())
1459 .unwrap_or(40);
1460 let initial_entries = env::var("INITIAL_ENTRIES")
1461 .map(|o| o.parse().unwrap())
1462 .unwrap_or(20);
1463
1464 let root_dir = Path::new("/test");
1465 let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
1466 fs.as_fake().insert_tree(root_dir, json!({})).await;
1467 for _ in 0..initial_entries {
1468 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1469 }
1470 log::info!("generated initial tree");
1471
1472 let worktree = Worktree::local(
1473 build_client(cx),
1474 root_dir,
1475 true,
1476 fs.clone(),
1477 Default::default(),
1478 &mut cx.to_async(),
1479 )
1480 .await
1481 .unwrap();
1482
1483 let updates = Arc::new(Mutex::new(Vec::new()));
1484 worktree.update(cx, |tree, cx| {
1485 check_worktree_change_events(tree, cx);
1486
1487 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
1488 let updates = updates.clone();
1489 move |update| {
1490 updates.lock().push(update);
1491 async { true }
1492 }
1493 });
1494 });
1495
1496 worktree
1497 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1498 .await;
1499
1500 fs.as_fake().pause_events();
1501 let mut snapshots = Vec::new();
1502 let mut mutations_len = operations;
1503 while mutations_len > 1 {
1504 if rng.gen_bool(0.2) {
1505 worktree
1506 .update(cx, |worktree, cx| {
1507 randomly_mutate_worktree(worktree, &mut rng, cx)
1508 })
1509 .await
1510 .log_err();
1511 } else {
1512 randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
1513 }
1514
1515 let buffered_event_count = fs.as_fake().buffered_event_count();
1516 if buffered_event_count > 0 && rng.gen_bool(0.3) {
1517 let len = rng.gen_range(0..=buffered_event_count);
1518 log::info!("flushing {} events", len);
1519 fs.as_fake().flush_events(len);
1520 } else {
1521 randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
1522 mutations_len -= 1;
1523 }
1524
1525 cx.executor().run_until_parked();
1526 if rng.gen_bool(0.2) {
1527 log::info!("storing snapshot {}", snapshots.len());
1528 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1529 snapshots.push(snapshot);
1530 }
1531 }
1532
1533 log::info!("quiescing");
1534 fs.as_fake().flush_events(usize::MAX);
1535 cx.executor().run_until_parked();
1536
1537 let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1538 snapshot.check_invariants(true);
1539 let expanded_paths = snapshot
1540 .expanded_entries()
1541 .map(|e| e.path.clone())
1542 .collect::<Vec<_>>();
1543
1544 {
1545 let new_worktree = Worktree::local(
1546 build_client(cx),
1547 root_dir,
1548 true,
1549 fs.clone(),
1550 Default::default(),
1551 &mut cx.to_async(),
1552 )
1553 .await
1554 .unwrap();
1555 new_worktree
1556 .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
1557 .await;
1558 new_worktree
1559 .update(cx, |tree, _| {
1560 tree.as_local_mut()
1561 .unwrap()
1562 .refresh_entries_for_paths(expanded_paths)
1563 })
1564 .recv()
1565 .await;
1566 let new_snapshot =
1567 new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
1568 assert_eq!(
1569 snapshot.entries_without_ids(true),
1570 new_snapshot.entries_without_ids(true)
1571 );
1572 }
1573
1574 for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
1575 for update in updates.lock().iter() {
1576 if update.scan_id >= prev_snapshot.scan_id() as u64 {
1577 prev_snapshot.apply_remote_update(update.clone()).unwrap();
1578 }
1579 }
1580
1581 assert_eq!(
1582 prev_snapshot
1583 .entries(true)
1584 .map(ignore_pending_dir)
1585 .collect::<Vec<_>>(),
1586 snapshot
1587 .entries(true)
1588 .map(ignore_pending_dir)
1589 .collect::<Vec<_>>(),
1590 "wrong updates after snapshot {i}: {updates:#?}",
1591 );
1592 }
1593
1594 fn ignore_pending_dir(entry: &Entry) -> Entry {
1595 let mut entry = entry.clone();
1596 if entry.kind.is_dir() {
1597 entry.kind = EntryKind::Dir
1598 }
1599 entry
1600 }
1601}
1602
1603// The worktree's `UpdatedEntries` event can be used to follow along with
1604// all changes to the worktree's snapshot.
1605fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
1606 let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
1607 cx.subscribe(&cx.handle(), move |tree, _, event, _| {
1608 if let Event::UpdatedEntries(changes) = event {
1609 for (path, _, change_type) in changes.iter() {
1610 let entry = tree.entry_for_path(&path).cloned();
1611 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
1612 Ok(ix) | Err(ix) => ix,
1613 };
1614 match change_type {
1615 PathChange::Added => entries.insert(ix, entry.unwrap()),
1616 PathChange::Removed => drop(entries.remove(ix)),
1617 PathChange::Updated => {
1618 let entry = entry.unwrap();
1619 let existing_entry = entries.get_mut(ix).unwrap();
1620 assert_eq!(existing_entry.path, entry.path);
1621 *existing_entry = entry;
1622 }
1623 PathChange::AddedOrUpdated | PathChange::Loaded => {
1624 let entry = entry.unwrap();
1625 if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
1626 *entries.get_mut(ix).unwrap() = entry;
1627 } else {
1628 entries.insert(ix, entry);
1629 }
1630 }
1631 }
1632 }
1633
1634 let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
1635 assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
1636 }
1637 })
1638 .detach();
1639}
1640
1641fn randomly_mutate_worktree(
1642 worktree: &mut Worktree,
1643 rng: &mut impl Rng,
1644 cx: &mut ModelContext<Worktree>,
1645) -> Task<Result<()>> {
1646 log::info!("mutating worktree");
1647 let worktree = worktree.as_local_mut().unwrap();
1648 let snapshot = worktree.snapshot();
1649 let entry = snapshot.entries(false).choose(rng).unwrap();
1650
1651 match rng.gen_range(0_u32..100) {
1652 0..=33 if entry.path.as_ref() != Path::new("") => {
1653 log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
1654 worktree.delete_entry(entry.id, false, cx).unwrap()
1655 }
1656 ..=66 if entry.path.as_ref() != Path::new("") => {
1657 let other_entry = snapshot.entries(false).choose(rng).unwrap();
1658 let new_parent_path = if other_entry.is_dir() {
1659 other_entry.path.clone()
1660 } else {
1661 other_entry.path.parent().unwrap().into()
1662 };
1663 let mut new_path = new_parent_path.join(random_filename(rng));
1664 if new_path.starts_with(&entry.path) {
1665 new_path = random_filename(rng).into();
1666 }
1667
1668 log::info!(
1669 "renaming entry {:?} ({}) to {:?}",
1670 entry.path,
1671 entry.id.0,
1672 new_path
1673 );
1674 let task = worktree.rename_entry(entry.id, new_path, cx);
1675 cx.background_executor().spawn(async move {
1676 task.await?.unwrap();
1677 Ok(())
1678 })
1679 }
1680 _ => {
1681 if entry.is_dir() {
1682 let child_path = entry.path.join(random_filename(rng));
1683 let is_dir = rng.gen_bool(0.3);
1684 log::info!(
1685 "creating {} at {:?}",
1686 if is_dir { "dir" } else { "file" },
1687 child_path,
1688 );
1689 let task = worktree.create_entry(child_path, is_dir, cx);
1690 cx.background_executor().spawn(async move {
1691 task.await?;
1692 Ok(())
1693 })
1694 } else {
1695 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
1696 let task =
1697 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
1698 cx.background_executor().spawn(async move {
1699 task.await?;
1700 Ok(())
1701 })
1702 }
1703 }
1704 }
1705}
1706
1707async fn randomly_mutate_fs(
1708 fs: &Arc<dyn Fs>,
1709 root_path: &Path,
1710 insertion_probability: f64,
1711 rng: &mut impl Rng,
1712) {
1713 log::info!("mutating fs");
1714 let mut files = Vec::new();
1715 let mut dirs = Vec::new();
1716 for path in fs.as_fake().paths(false) {
1717 if path.starts_with(root_path) {
1718 if fs.is_file(&path).await {
1719 files.push(path);
1720 } else {
1721 dirs.push(path);
1722 }
1723 }
1724 }
1725
1726 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
1727 let path = dirs.choose(rng).unwrap();
1728 let new_path = path.join(random_filename(rng));
1729
1730 if rng.gen() {
1731 log::info!(
1732 "creating dir {:?}",
1733 new_path.strip_prefix(root_path).unwrap()
1734 );
1735 fs.create_dir(&new_path).await.unwrap();
1736 } else {
1737 log::info!(
1738 "creating file {:?}",
1739 new_path.strip_prefix(root_path).unwrap()
1740 );
1741 fs.create_file(&new_path, Default::default()).await.unwrap();
1742 }
1743 } else if rng.gen_bool(0.05) {
1744 let ignore_dir_path = dirs.choose(rng).unwrap();
1745 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
1746
1747 let subdirs = dirs
1748 .iter()
1749 .filter(|d| d.starts_with(&ignore_dir_path))
1750 .cloned()
1751 .collect::<Vec<_>>();
1752 let subfiles = files
1753 .iter()
1754 .filter(|d| d.starts_with(&ignore_dir_path))
1755 .cloned()
1756 .collect::<Vec<_>>();
1757 let files_to_ignore = {
1758 let len = rng.gen_range(0..=subfiles.len());
1759 subfiles.choose_multiple(rng, len)
1760 };
1761 let dirs_to_ignore = {
1762 let len = rng.gen_range(0..subdirs.len());
1763 subdirs.choose_multiple(rng, len)
1764 };
1765
1766 let mut ignore_contents = String::new();
1767 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
1768 writeln!(
1769 ignore_contents,
1770 "{}",
1771 path_to_ignore
1772 .strip_prefix(&ignore_dir_path)
1773 .unwrap()
1774 .to_str()
1775 .unwrap()
1776 )
1777 .unwrap();
1778 }
1779 log::info!(
1780 "creating gitignore {:?} with contents:\n{}",
1781 ignore_path.strip_prefix(&root_path).unwrap(),
1782 ignore_contents
1783 );
1784 fs.save(
1785 &ignore_path,
1786 &ignore_contents.as_str().into(),
1787 Default::default(),
1788 )
1789 .await
1790 .unwrap();
1791 } else {
1792 let old_path = {
1793 let file_path = files.choose(rng);
1794 let dir_path = dirs[1..].choose(rng);
1795 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
1796 };
1797
1798 let is_rename = rng.gen();
1799 if is_rename {
1800 let new_path_parent = dirs
1801 .iter()
1802 .filter(|d| !d.starts_with(old_path))
1803 .choose(rng)
1804 .unwrap();
1805
1806 let overwrite_existing_dir =
1807 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
1808 let new_path = if overwrite_existing_dir {
1809 fs.remove_dir(
1810 &new_path_parent,
1811 RemoveOptions {
1812 recursive: true,
1813 ignore_if_not_exists: true,
1814 },
1815 )
1816 .await
1817 .unwrap();
1818 new_path_parent.to_path_buf()
1819 } else {
1820 new_path_parent.join(random_filename(rng))
1821 };
1822
1823 log::info!(
1824 "renaming {:?} to {}{:?}",
1825 old_path.strip_prefix(&root_path).unwrap(),
1826 if overwrite_existing_dir {
1827 "overwrite "
1828 } else {
1829 ""
1830 },
1831 new_path.strip_prefix(&root_path).unwrap()
1832 );
1833 fs.rename(
1834 &old_path,
1835 &new_path,
1836 fs::RenameOptions {
1837 overwrite: true,
1838 ignore_if_exists: true,
1839 },
1840 )
1841 .await
1842 .unwrap();
1843 } else if fs.is_file(&old_path).await {
1844 log::info!(
1845 "deleting file {:?}",
1846 old_path.strip_prefix(&root_path).unwrap()
1847 );
1848 fs.remove_file(old_path, Default::default()).await.unwrap();
1849 } else {
1850 log::info!(
1851 "deleting dir {:?}",
1852 old_path.strip_prefix(&root_path).unwrap()
1853 );
1854 fs.remove_dir(
1855 &old_path,
1856 RemoveOptions {
1857 recursive: true,
1858 ignore_if_not_exists: true,
1859 },
1860 )
1861 .await
1862 .unwrap();
1863 }
1864 }
1865}
1866
1867fn random_filename(rng: &mut impl Rng) -> String {
1868 (0..6)
1869 .map(|_| rng.sample(rand::distributions::Alphanumeric))
1870 .map(char::from)
1871 .collect()
1872}
1873
1874#[gpui::test]
1875async fn test_rename_work_directory(cx: &mut TestAppContext) {
1876 init_test(cx);
1877 cx.executor().allow_parking();
1878 let root = temp_tree(json!({
1879 "projects": {
1880 "project1": {
1881 "a": "",
1882 "b": "",
1883 }
1884 },
1885
1886 }));
1887 let root_path = root.path();
1888
1889 let tree = Worktree::local(
1890 build_client(cx),
1891 root_path,
1892 true,
1893 Arc::new(RealFs::default()),
1894 Default::default(),
1895 &mut cx.to_async(),
1896 )
1897 .await
1898 .unwrap();
1899
1900 let repo = git_init(&root_path.join("projects/project1"));
1901 git_add("a", &repo);
1902 git_commit("init", &repo);
1903 std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
1904
1905 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1906 .await;
1907
1908 tree.flush_fs_events(cx).await;
1909
1910 cx.read(|cx| {
1911 let tree = tree.read(cx);
1912 let (work_dir, _) = tree.repositories().next().unwrap();
1913 assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
1914 assert_eq!(
1915 tree.status_for_file(Path::new("projects/project1/a")),
1916 Some(GitFileStatus::Modified)
1917 );
1918 assert_eq!(
1919 tree.status_for_file(Path::new("projects/project1/b")),
1920 Some(GitFileStatus::Added)
1921 );
1922 });
1923
1924 std::fs::rename(
1925 root_path.join("projects/project1"),
1926 root_path.join("projects/project2"),
1927 )
1928 .ok();
1929 tree.flush_fs_events(cx).await;
1930
1931 cx.read(|cx| {
1932 let tree = tree.read(cx);
1933 let (work_dir, _) = tree.repositories().next().unwrap();
1934 assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
1935 assert_eq!(
1936 tree.status_for_file(Path::new("projects/project2/a")),
1937 Some(GitFileStatus::Modified)
1938 );
1939 assert_eq!(
1940 tree.status_for_file(Path::new("projects/project2/b")),
1941 Some(GitFileStatus::Added)
1942 );
1943 });
1944}
1945
1946#[gpui::test]
1947async fn test_git_repository_for_path(cx: &mut TestAppContext) {
1948 init_test(cx);
1949 cx.executor().allow_parking();
1950 let root = temp_tree(json!({
1951 "c.txt": "",
1952 "dir1": {
1953 ".git": {},
1954 "deps": {
1955 "dep1": {
1956 ".git": {},
1957 "src": {
1958 "a.txt": ""
1959 }
1960 }
1961 },
1962 "src": {
1963 "b.txt": ""
1964 }
1965 },
1966 }));
1967
1968 let tree = Worktree::local(
1969 build_client(cx),
1970 root.path(),
1971 true,
1972 Arc::new(RealFs::default()),
1973 Default::default(),
1974 &mut cx.to_async(),
1975 )
1976 .await
1977 .unwrap();
1978
1979 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
1980 .await;
1981 tree.flush_fs_events(cx).await;
1982
1983 tree.read_with(cx, |tree, _cx| {
1984 let tree = tree.as_local().unwrap();
1985
1986 assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
1987
1988 let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
1989 assert_eq!(
1990 entry
1991 .work_directory(tree)
1992 .map(|directory| directory.as_ref().to_owned()),
1993 Some(Path::new("dir1").to_owned())
1994 );
1995
1996 let entry = tree
1997 .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
1998 .unwrap();
1999 assert_eq!(
2000 entry
2001 .work_directory(tree)
2002 .map(|directory| directory.as_ref().to_owned()),
2003 Some(Path::new("dir1/deps/dep1").to_owned())
2004 );
2005
2006 let entries = tree.files(false, 0);
2007
2008 let paths_with_repos = tree
2009 .entries_with_repositories(entries)
2010 .map(|(entry, repo)| {
2011 (
2012 entry.path.as_ref(),
2013 repo.and_then(|repo| {
2014 repo.work_directory(&tree)
2015 .map(|work_directory| work_directory.0.to_path_buf())
2016 }),
2017 )
2018 })
2019 .collect::<Vec<_>>();
2020
2021 assert_eq!(
2022 paths_with_repos,
2023 &[
2024 (Path::new("c.txt"), None),
2025 (
2026 Path::new("dir1/deps/dep1/src/a.txt"),
2027 Some(Path::new("dir1/deps/dep1").into())
2028 ),
2029 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
2030 ]
2031 );
2032 });
2033
2034 let repo_update_events = Arc::new(Mutex::new(vec![]));
2035 tree.update(cx, |_, cx| {
2036 let repo_update_events = repo_update_events.clone();
2037 cx.subscribe(&tree, move |_, _, event, _| {
2038 if let Event::UpdatedGitRepositories(update) = event {
2039 repo_update_events.lock().push(update.clone());
2040 }
2041 })
2042 .detach();
2043 });
2044
2045 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
2046 tree.flush_fs_events(cx).await;
2047
2048 assert_eq!(
2049 repo_update_events.lock()[0]
2050 .iter()
2051 .map(|e| e.0.clone())
2052 .collect::<Vec<Arc<Path>>>(),
2053 vec![Path::new("dir1").into()]
2054 );
2055
2056 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
2057 tree.flush_fs_events(cx).await;
2058
2059 tree.read_with(cx, |tree, _cx| {
2060 let tree = tree.as_local().unwrap();
2061
2062 assert!(tree
2063 .repository_for_path("dir1/src/b.txt".as_ref())
2064 .is_none());
2065 });
2066}
2067
2068#[gpui::test]
2069async fn test_git_status(cx: &mut TestAppContext) {
2070 init_test(cx);
2071 cx.executor().allow_parking();
2072 const IGNORE_RULE: &str = "**/target";
2073
2074 let root = temp_tree(json!({
2075 "project": {
2076 "a.txt": "a",
2077 "b.txt": "bb",
2078 "c": {
2079 "d": {
2080 "e.txt": "eee"
2081 }
2082 },
2083 "f.txt": "ffff",
2084 "target": {
2085 "build_file": "???"
2086 },
2087 ".gitignore": IGNORE_RULE
2088 },
2089
2090 }));
2091
2092 const A_TXT: &str = "a.txt";
2093 const B_TXT: &str = "b.txt";
2094 const E_TXT: &str = "c/d/e.txt";
2095 const F_TXT: &str = "f.txt";
2096 const DOTGITIGNORE: &str = ".gitignore";
2097 const BUILD_FILE: &str = "target/build_file";
2098 let project_path = Path::new("project");
2099
2100 // Set up git repository before creating the worktree.
2101 let work_dir = root.path().join("project");
2102 let mut repo = git_init(work_dir.as_path());
2103 repo.add_ignore_rule(IGNORE_RULE).unwrap();
2104 git_add(A_TXT, &repo);
2105 git_add(E_TXT, &repo);
2106 git_add(DOTGITIGNORE, &repo);
2107 git_commit("Initial commit", &repo);
2108
2109 let tree = Worktree::local(
2110 build_client(cx),
2111 root.path(),
2112 true,
2113 Arc::new(RealFs::default()),
2114 Default::default(),
2115 &mut cx.to_async(),
2116 )
2117 .await
2118 .unwrap();
2119
2120 tree.flush_fs_events(cx).await;
2121 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2122 .await;
2123 cx.executor().run_until_parked();
2124
2125 // Check that the right git state is observed on startup
2126 tree.read_with(cx, |tree, _cx| {
2127 let snapshot = tree.snapshot();
2128 assert_eq!(snapshot.repositories().count(), 1);
2129 let (dir, repo_entry) = snapshot.repositories().next().unwrap();
2130 assert_eq!(dir.as_ref(), Path::new("project"));
2131 assert!(repo_entry.location_in_repo.is_none());
2132
2133 assert_eq!(
2134 snapshot.status_for_file(project_path.join(B_TXT)),
2135 Some(GitFileStatus::Added)
2136 );
2137 assert_eq!(
2138 snapshot.status_for_file(project_path.join(F_TXT)),
2139 Some(GitFileStatus::Added)
2140 );
2141 });
2142
2143 // Modify a file in the working copy.
2144 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
2145 tree.flush_fs_events(cx).await;
2146 cx.executor().run_until_parked();
2147
2148 // The worktree detects that the file's git status has changed.
2149 tree.read_with(cx, |tree, _cx| {
2150 let snapshot = tree.snapshot();
2151 assert_eq!(
2152 snapshot.status_for_file(project_path.join(A_TXT)),
2153 Some(GitFileStatus::Modified)
2154 );
2155 });
2156
2157 // Create a commit in the git repository.
2158 git_add(A_TXT, &repo);
2159 git_add(B_TXT, &repo);
2160 git_commit("Committing modified and added", &repo);
2161 tree.flush_fs_events(cx).await;
2162 cx.executor().run_until_parked();
2163
2164 // The worktree detects that the files' git status have changed.
2165 tree.read_with(cx, |tree, _cx| {
2166 let snapshot = tree.snapshot();
2167 assert_eq!(
2168 snapshot.status_for_file(project_path.join(F_TXT)),
2169 Some(GitFileStatus::Added)
2170 );
2171 assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
2172 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
2173 });
2174
2175 // Modify files in the working copy and perform git operations on other files.
2176 git_reset(0, &repo);
2177 git_remove_index(Path::new(B_TXT), &repo);
2178 git_stash(&mut repo);
2179 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
2180 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
2181 tree.flush_fs_events(cx).await;
2182 cx.executor().run_until_parked();
2183
2184 // Check that more complex repo changes are tracked
2185 tree.read_with(cx, |tree, _cx| {
2186 let snapshot = tree.snapshot();
2187
2188 assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
2189 assert_eq!(
2190 snapshot.status_for_file(project_path.join(B_TXT)),
2191 Some(GitFileStatus::Added)
2192 );
2193 assert_eq!(
2194 snapshot.status_for_file(project_path.join(E_TXT)),
2195 Some(GitFileStatus::Modified)
2196 );
2197 });
2198
2199 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
2200 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
2201 std::fs::write(
2202 work_dir.join(DOTGITIGNORE),
2203 [IGNORE_RULE, "f.txt"].join("\n"),
2204 )
2205 .unwrap();
2206
2207 git_add(Path::new(DOTGITIGNORE), &repo);
2208 git_commit("Committing modified git ignore", &repo);
2209
2210 tree.flush_fs_events(cx).await;
2211 cx.executor().run_until_parked();
2212
2213 let mut renamed_dir_name = "first_directory/second_directory";
2214 const RENAMED_FILE: &str = "rf.txt";
2215
2216 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
2217 std::fs::write(
2218 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
2219 "new-contents",
2220 )
2221 .unwrap();
2222
2223 tree.flush_fs_events(cx).await;
2224 cx.executor().run_until_parked();
2225
2226 tree.read_with(cx, |tree, _cx| {
2227 let snapshot = tree.snapshot();
2228 assert_eq!(
2229 snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
2230 Some(GitFileStatus::Added)
2231 );
2232 });
2233
2234 renamed_dir_name = "new_first_directory/second_directory";
2235
2236 std::fs::rename(
2237 work_dir.join("first_directory"),
2238 work_dir.join("new_first_directory"),
2239 )
2240 .unwrap();
2241
2242 tree.flush_fs_events(cx).await;
2243 cx.executor().run_until_parked();
2244
2245 tree.read_with(cx, |tree, _cx| {
2246 let snapshot = tree.snapshot();
2247
2248 assert_eq!(
2249 snapshot.status_for_file(
2250 project_path
2251 .join(Path::new(renamed_dir_name))
2252 .join(RENAMED_FILE)
2253 ),
2254 Some(GitFileStatus::Added)
2255 );
2256 });
2257}
2258
2259#[gpui::test]
2260async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) {
2261 init_test(cx);
2262 cx.executor().allow_parking();
2263
2264 let root = temp_tree(json!({
2265 "my-repo": {
2266 // .git folder will go here
2267 "a.txt": "a",
2268 "sub-folder-1": {
2269 "sub-folder-2": {
2270 "c.txt": "cc",
2271 "d": {
2272 "e.txt": "eee"
2273 }
2274 },
2275 }
2276 },
2277
2278 }));
2279
2280 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
2281 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
2282
2283 // Set up git repository before creating the worktree.
2284 let git_repo_work_dir = root.path().join("my-repo");
2285 let repo = git_init(git_repo_work_dir.as_path());
2286 git_add(C_TXT, &repo);
2287 git_commit("Initial commit", &repo);
2288
2289 // Open the worktree in subfolder
2290 let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2");
2291 let tree = Worktree::local(
2292 build_client(cx),
2293 root.path().join(project_root),
2294 true,
2295 Arc::new(RealFs::default()),
2296 Default::default(),
2297 &mut cx.to_async(),
2298 )
2299 .await
2300 .unwrap();
2301
2302 tree.flush_fs_events(cx).await;
2303 tree.flush_fs_events_in_root_git_repository(cx).await;
2304 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2305 .await;
2306 cx.executor().run_until_parked();
2307
2308 // Ensure that the git status is loaded correctly
2309 tree.read_with(cx, |tree, _cx| {
2310 let snapshot = tree.snapshot();
2311 assert_eq!(snapshot.repositories().count(), 1);
2312 let (dir, repo_entry) = snapshot.repositories().next().unwrap();
2313 // Path is blank because the working directory of
2314 // the git repository is located at the root of the project
2315 assert_eq!(dir.as_ref(), Path::new(""));
2316
2317 // This is the missing path between the root of the project (sub-folder-2) and its
2318 // location relative to the root of the repository.
2319 assert_eq!(
2320 repo_entry.location_in_repo,
2321 Some(Arc::from(Path::new("sub-folder-1/sub-folder-2")))
2322 );
2323
2324 assert_eq!(snapshot.status_for_file("c.txt"), None);
2325 assert_eq!(
2326 snapshot.status_for_file("d/e.txt"),
2327 Some(GitFileStatus::Added)
2328 );
2329 });
2330
2331 // Now we simulate FS events, but ONLY in the .git folder that's outside
2332 // of out project root.
2333 // Meaning: we don't produce any FS events for files inside the project.
2334 git_add(E_TXT, &repo);
2335 git_commit("Second commit", &repo);
2336 tree.flush_fs_events_in_root_git_repository(cx).await;
2337 cx.executor().run_until_parked();
2338
2339 tree.read_with(cx, |tree, _cx| {
2340 let snapshot = tree.snapshot();
2341
2342 assert!(snapshot.repositories().next().is_some());
2343
2344 assert_eq!(snapshot.status_for_file("c.txt"), None);
2345 assert_eq!(snapshot.status_for_file("d/e.txt"), None);
2346 });
2347}
2348
2349#[gpui::test]
2350async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
2351 init_test(cx);
2352 let fs = FakeFs::new(cx.background_executor.clone());
2353 fs.insert_tree(
2354 "/root",
2355 json!({
2356 ".git": {},
2357 "a": {
2358 "b": {
2359 "c1.txt": "",
2360 "c2.txt": "",
2361 },
2362 "d": {
2363 "e1.txt": "",
2364 "e2.txt": "",
2365 "e3.txt": "",
2366 }
2367 },
2368 "f": {
2369 "no-status.txt": ""
2370 },
2371 "g": {
2372 "h1.txt": "",
2373 "h2.txt": ""
2374 },
2375
2376 }),
2377 )
2378 .await;
2379
2380 fs.set_status_for_repo_via_git_operation(
2381 &Path::new("/root/.git"),
2382 &[
2383 (Path::new("a/b/c1.txt"), GitFileStatus::Added),
2384 (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
2385 (Path::new("g/h2.txt"), GitFileStatus::Conflict),
2386 ],
2387 );
2388
2389 let tree = Worktree::local(
2390 build_client(cx),
2391 Path::new("/root"),
2392 true,
2393 fs.clone(),
2394 Default::default(),
2395 &mut cx.to_async(),
2396 )
2397 .await
2398 .unwrap();
2399
2400 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2401 .await;
2402
2403 cx.executor().run_until_parked();
2404 let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
2405
2406 check_propagated_statuses(
2407 &snapshot,
2408 &[
2409 (Path::new(""), Some(GitFileStatus::Conflict)),
2410 (Path::new("a"), Some(GitFileStatus::Modified)),
2411 (Path::new("a/b"), Some(GitFileStatus::Added)),
2412 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2413 (Path::new("a/b/c2.txt"), None),
2414 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2415 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2416 (Path::new("f"), None),
2417 (Path::new("f/no-status.txt"), None),
2418 (Path::new("g"), Some(GitFileStatus::Conflict)),
2419 (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
2420 ],
2421 );
2422
2423 check_propagated_statuses(
2424 &snapshot,
2425 &[
2426 (Path::new("a/b"), Some(GitFileStatus::Added)),
2427 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2428 (Path::new("a/b/c2.txt"), None),
2429 (Path::new("a/d"), Some(GitFileStatus::Modified)),
2430 (Path::new("a/d/e1.txt"), None),
2431 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2432 (Path::new("f"), None),
2433 (Path::new("f/no-status.txt"), None),
2434 (Path::new("g"), Some(GitFileStatus::Conflict)),
2435 ],
2436 );
2437
2438 check_propagated_statuses(
2439 &snapshot,
2440 &[
2441 (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
2442 (Path::new("a/b/c2.txt"), None),
2443 (Path::new("a/d/e1.txt"), None),
2444 (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
2445 (Path::new("f/no-status.txt"), None),
2446 ],
2447 );
2448
2449 #[track_caller]
2450 fn check_propagated_statuses(
2451 snapshot: &Snapshot,
2452 expected_statuses: &[(&Path, Option<GitFileStatus>)],
2453 ) {
2454 let mut entries = expected_statuses
2455 .iter()
2456 .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
2457 .collect::<Vec<_>>();
2458 snapshot.propagate_git_statuses(&mut entries);
2459 assert_eq!(
2460 entries
2461 .iter()
2462 .map(|e| (e.path.as_ref(), e.git_status))
2463 .collect::<Vec<_>>(),
2464 expected_statuses
2465 );
2466 }
2467}
2468
2469fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
2470 let clock = Arc::new(FakeSystemClock::default());
2471 let http_client = FakeHttpClient::with_404_response();
2472 cx.update(|cx| Client::new(clock, http_client, cx))
2473}
2474
2475#[track_caller]
2476fn git_init(path: &Path) -> git2::Repository {
2477 git2::Repository::init(path).expect("Failed to initialize git repository")
2478}
2479
2480#[track_caller]
2481fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
2482 let path = path.as_ref();
2483 let mut index = repo.index().expect("Failed to get index");
2484 index.add_path(path).expect("Failed to add a.txt");
2485 index.write().expect("Failed to write index");
2486}
2487
2488#[track_caller]
2489fn git_remove_index(path: &Path, repo: &git2::Repository) {
2490 let mut index = repo.index().expect("Failed to get index");
2491 index.remove_path(path).expect("Failed to add a.txt");
2492 index.write().expect("Failed to write index");
2493}
2494
2495#[track_caller]
2496fn git_commit(msg: &'static str, repo: &git2::Repository) {
2497 use git2::Signature;
2498
2499 let signature = Signature::now("test", "test@zed.dev").unwrap();
2500 let oid = repo.index().unwrap().write_tree().unwrap();
2501 let tree = repo.find_tree(oid).unwrap();
2502 if let Some(head) = repo.head().ok() {
2503 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
2504
2505 let parent_commit = parent_obj.as_commit().unwrap();
2506
2507 repo.commit(
2508 Some("HEAD"),
2509 &signature,
2510 &signature,
2511 msg,
2512 &tree,
2513 &[parent_commit],
2514 )
2515 .expect("Failed to commit with parent");
2516 } else {
2517 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
2518 .expect("Failed to commit");
2519 }
2520}
2521
2522#[track_caller]
2523fn git_stash(repo: &mut git2::Repository) {
2524 use git2::Signature;
2525
2526 let signature = Signature::now("test", "test@zed.dev").unwrap();
2527 repo.stash_save(&signature, "N/A", None)
2528 .expect("Failed to stash");
2529}
2530
2531#[track_caller]
2532fn git_reset(offset: usize, repo: &git2::Repository) {
2533 let head = repo.head().expect("Couldn't get repo head");
2534 let object = head.peel(git2::ObjectType::Commit).unwrap();
2535 let commit = object.as_commit().unwrap();
2536 let new_head = commit
2537 .parents()
2538 .inspect(|parnet| {
2539 parnet.message();
2540 })
2541 .skip(offset)
2542 .next()
2543 .expect("Not enough history");
2544 repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
2545 .expect("Could not reset");
2546}
2547
2548#[allow(dead_code)]
2549#[track_caller]
2550fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
2551 repo.statuses(None)
2552 .unwrap()
2553 .iter()
2554 .map(|status| (status.path().unwrap().to_string(), status.status()))
2555 .collect()
2556}
2557
2558#[track_caller]
2559fn check_worktree_entries(
2560 tree: &Worktree,
2561 expected_excluded_paths: &[&str],
2562 expected_ignored_paths: &[&str],
2563 expected_tracked_paths: &[&str],
2564) {
2565 for path in expected_excluded_paths {
2566 let entry = tree.entry_for_path(path);
2567 assert!(
2568 entry.is_none(),
2569 "expected path '{path}' to be excluded, but got entry: {entry:?}",
2570 );
2571 }
2572 for path in expected_ignored_paths {
2573 let entry = tree
2574 .entry_for_path(path)
2575 .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
2576 assert!(
2577 entry.is_ignored,
2578 "expected path '{path}' to be ignored, but got entry: {entry:?}",
2579 );
2580 }
2581 for path in expected_tracked_paths {
2582 let entry = tree
2583 .entry_for_path(path)
2584 .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
2585 assert!(
2586 !entry.is_ignored,
2587 "expected path '{path}' to be tracked, but got entry: {entry:?}",
2588 );
2589 }
2590}
2591
2592fn init_test(cx: &mut gpui::TestAppContext) {
2593 if std::env::var("RUST_LOG").is_ok() {
2594 env_logger::try_init().ok();
2595 }
2596
2597 cx.update(|cx| {
2598 let settings_store = SettingsStore::test(cx);
2599 cx.set_global(settings_store);
2600 WorktreeSettings::register(cx);
2601 });
2602}
2603
2604fn assert_entry_git_state(
2605 tree: &Worktree,
2606 path: &str,
2607 git_status: Option<GitFileStatus>,
2608 is_ignored: bool,
2609) {
2610 let entry = tree.entry_for_path(path).expect("entry {path} not found");
2611 assert_eq!(entry.git_status, git_status);
2612 assert_eq!(entry.is_ignored, is_ignored);
2613}