1use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
2use anyhow::{Context as _, Result, bail};
3use collections::{HashMap, HashSet};
4use futures::future::{self, BoxFuture, join_all};
5use git::{
6 Oid, RunHook,
7 blame::Blame,
8 repository::{
9 AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions, FetchOptions,
10 GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder,
11 LogSource, PushOptions, Remote, RepoPath, ResetMode, Worktree,
12 },
13 status::{
14 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
15 UnmergedStatus,
16 },
17};
18use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
19use ignore::gitignore::GitignoreBuilder;
20use parking_lot::Mutex;
21use rope::Rope;
22use smol::{channel::Sender, future::FutureExt as _};
23use std::{path::PathBuf, sync::Arc};
24use text::LineEnding;
25use util::{paths::PathStyle, rel_path::RelPath};
26
27#[derive(Clone)]
28pub struct FakeGitRepository {
29 pub(crate) fs: Arc<FakeFs>,
30 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
31 pub(crate) executor: BackgroundExecutor,
32 pub(crate) dot_git_path: PathBuf,
33 pub(crate) repository_dir_path: PathBuf,
34 pub(crate) common_dir_path: PathBuf,
35}
36
37#[derive(Debug, Clone)]
38pub struct FakeGitRepositoryState {
39 pub event_emitter: smol::channel::Sender<PathBuf>,
40 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
41 pub head_contents: HashMap<RepoPath, String>,
42 pub index_contents: HashMap<RepoPath, String>,
43 // everything in commit contents is in oids
44 pub merge_base_contents: HashMap<RepoPath, Oid>,
45 pub oids: HashMap<Oid, String>,
46 pub blames: HashMap<RepoPath, Blame>,
47 pub current_branch_name: Option<String>,
48 pub branches: HashSet<String>,
49 /// List of remotes, keys are names and values are URLs
50 pub remotes: HashMap<String, String>,
51 pub simulated_index_write_error_message: Option<String>,
52 pub simulated_create_worktree_error: Option<String>,
53 pub refs: HashMap<String, String>,
54 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
55 pub worktrees: Vec<Worktree>,
56}
57
58impl FakeGitRepositoryState {
59 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
60 FakeGitRepositoryState {
61 event_emitter,
62 head_contents: Default::default(),
63 index_contents: Default::default(),
64 unmerged_paths: Default::default(),
65 blames: Default::default(),
66 current_branch_name: Default::default(),
67 branches: Default::default(),
68 simulated_index_write_error_message: Default::default(),
69 simulated_create_worktree_error: Default::default(),
70 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
71 merge_base_contents: Default::default(),
72 oids: Default::default(),
73 remotes: HashMap::default(),
74 graph_commits: Vec::new(),
75 worktrees: Vec::new(),
76 }
77 }
78}
79
80impl FakeGitRepository {
81 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
82 where
83 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
84 T: Send,
85 {
86 let fs = self.fs.clone();
87 let executor = self.executor.clone();
88 let dot_git_path = self.dot_git_path.clone();
89 async move {
90 executor.simulate_random_delay().await;
91 fs.with_git_state(&dot_git_path, write, f)?
92 }
93 .boxed()
94 }
95}
96
97impl GitRepository for FakeGitRepository {
98 fn reload_index(&self) {}
99
100 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
101 let fut = self.with_state_async(false, move |state| {
102 state
103 .index_contents
104 .get(&path)
105 .context("not present in index")
106 .cloned()
107 });
108 self.executor.spawn(async move { fut.await.ok() }).boxed()
109 }
110
111 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
112 let fut = self.with_state_async(false, move |state| {
113 state
114 .head_contents
115 .get(&path)
116 .context("not present in HEAD")
117 .cloned()
118 });
119 self.executor.spawn(async move { fut.await.ok() }).boxed()
120 }
121
122 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
123 self.with_state_async(false, move |state| {
124 state.oids.get(&oid).cloned().context("oid does not exist")
125 })
126 .boxed()
127 }
128
129 fn load_commit(
130 &self,
131 _commit: String,
132 _cx: AsyncApp,
133 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
134 unimplemented!()
135 }
136
137 fn set_index_text(
138 &self,
139 path: RepoPath,
140 content: Option<String>,
141 _env: Arc<HashMap<String, String>>,
142 _is_executable: bool,
143 ) -> BoxFuture<'_, anyhow::Result<()>> {
144 self.with_state_async(true, move |state| {
145 if let Some(message) = &state.simulated_index_write_error_message {
146 anyhow::bail!("{message}");
147 } else if let Some(content) = content {
148 state.index_contents.insert(path, content);
149 } else {
150 state.index_contents.remove(&path);
151 }
152 Ok(())
153 })
154 }
155
156 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
157 let name = name.to_string();
158 let fut = self.with_state_async(false, move |state| {
159 state
160 .remotes
161 .get(&name)
162 .context("remote not found")
163 .cloned()
164 });
165 async move { fut.await.ok() }.boxed()
166 }
167
168 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
169 let mut entries = HashMap::default();
170 self.with_state_async(false, |state| {
171 for (path, content) in &state.head_contents {
172 let status = if let Some((oid, original)) = state
173 .merge_base_contents
174 .get(path)
175 .map(|oid| (oid, &state.oids[oid]))
176 {
177 if original == content {
178 continue;
179 }
180 TreeDiffStatus::Modified { old: *oid }
181 } else {
182 TreeDiffStatus::Added
183 };
184 entries.insert(path.clone(), status);
185 }
186 for (path, oid) in &state.merge_base_contents {
187 if !entries.contains_key(path) {
188 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
189 }
190 }
191 Ok(TreeDiff { entries })
192 })
193 .boxed()
194 }
195
196 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
197 self.with_state_async(false, |state| {
198 Ok(revs
199 .into_iter()
200 .map(|rev| state.refs.get(&rev).cloned())
201 .collect())
202 })
203 }
204
205 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
206 async {
207 Ok(CommitDetails {
208 sha: commit.into(),
209 message: "initial commit".into(),
210 ..Default::default()
211 })
212 }
213 .boxed()
214 }
215
216 fn reset(
217 &self,
218 _commit: String,
219 _mode: ResetMode,
220 _env: Arc<HashMap<String, String>>,
221 ) -> BoxFuture<'_, Result<()>> {
222 unimplemented!()
223 }
224
225 fn checkout_files(
226 &self,
227 _commit: String,
228 _paths: Vec<RepoPath>,
229 _env: Arc<HashMap<String, String>>,
230 ) -> BoxFuture<'_, Result<()>> {
231 unimplemented!()
232 }
233
234 fn path(&self) -> PathBuf {
235 self.repository_dir_path.clone()
236 }
237
238 fn main_repository_path(&self) -> PathBuf {
239 self.common_dir_path.clone()
240 }
241
242 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
243 async move { None }.boxed()
244 }
245
246 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
247 let workdir_path = self.dot_git_path.parent().unwrap();
248
249 // Load gitignores
250 let ignores = workdir_path
251 .ancestors()
252 .filter_map(|dir| {
253 let ignore_path = dir.join(".gitignore");
254 let content = self.fs.read_file_sync(ignore_path).ok()?;
255 let content = String::from_utf8(content).ok()?;
256 let mut builder = GitignoreBuilder::new(dir);
257 for line in content.lines() {
258 builder.add_line(Some(dir.into()), line).ok()?;
259 }
260 builder.build().ok()
261 })
262 .collect::<Vec<_>>();
263
264 // Load working copy files.
265 let git_files: HashMap<RepoPath, (String, bool)> = self
266 .fs
267 .files()
268 .iter()
269 .filter_map(|path| {
270 // TODO better simulate git status output in the case of submodules and worktrees
271 let repo_path = path.strip_prefix(workdir_path).ok()?;
272 let mut is_ignored = repo_path.starts_with(".git");
273 for ignore in &ignores {
274 match ignore.matched_path_or_any_parents(path, false) {
275 ignore::Match::None => {}
276 ignore::Match::Ignore(_) => is_ignored = true,
277 ignore::Match::Whitelist(_) => break,
278 }
279 }
280 let content = self
281 .fs
282 .read_file_sync(path)
283 .ok()
284 .map(|content| String::from_utf8(content).unwrap())?;
285 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
286 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
287 })
288 .collect();
289
290 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
291 let mut entries = Vec::new();
292 let paths = state
293 .head_contents
294 .keys()
295 .chain(state.index_contents.keys())
296 .chain(git_files.keys())
297 .collect::<HashSet<_>>();
298 for path in paths {
299 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
300 continue;
301 }
302
303 let head = state.head_contents.get(path);
304 let index = state.index_contents.get(path);
305 let unmerged = state.unmerged_paths.get(path);
306 let fs = git_files.get(path);
307 let status = match (unmerged, head, index, fs) {
308 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
309 (_, Some(head), Some(index), Some((fs, _))) => {
310 FileStatus::Tracked(TrackedStatus {
311 index_status: if head == index {
312 StatusCode::Unmodified
313 } else {
314 StatusCode::Modified
315 },
316 worktree_status: if fs == index {
317 StatusCode::Unmodified
318 } else {
319 StatusCode::Modified
320 },
321 })
322 }
323 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
324 index_status: if head == index {
325 StatusCode::Unmodified
326 } else {
327 StatusCode::Modified
328 },
329 worktree_status: StatusCode::Deleted,
330 }),
331 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
332 index_status: StatusCode::Deleted,
333 worktree_status: StatusCode::Added,
334 }),
335 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
336 index_status: StatusCode::Deleted,
337 worktree_status: StatusCode::Deleted,
338 }),
339 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
340 index_status: StatusCode::Added,
341 worktree_status: if fs == index {
342 StatusCode::Unmodified
343 } else {
344 StatusCode::Modified
345 },
346 }),
347 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
348 index_status: StatusCode::Added,
349 worktree_status: StatusCode::Deleted,
350 }),
351 (_, None, None, Some((_, is_ignored))) => {
352 if *is_ignored {
353 continue;
354 }
355 FileStatus::Untracked
356 }
357 (_, None, None, None) => {
358 unreachable!();
359 }
360 };
361 if status
362 != FileStatus::Tracked(TrackedStatus {
363 index_status: StatusCode::Unmodified,
364 worktree_status: StatusCode::Unmodified,
365 })
366 {
367 entries.push((path.clone(), status));
368 }
369 }
370 entries.sort_by(|a, b| a.0.cmp(&b.0));
371 anyhow::Ok(GitStatus {
372 entries: entries.into(),
373 })
374 });
375 Task::ready(match result {
376 Ok(result) => result,
377 Err(e) => Err(e),
378 })
379 }
380
381 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
382 async { Ok(git::stash::GitStash::default()) }.boxed()
383 }
384
385 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
386 self.with_state_async(false, move |state| {
387 let current_branch = &state.current_branch_name;
388 Ok(state
389 .branches
390 .iter()
391 .map(|branch_name| {
392 let ref_name = if branch_name.starts_with("refs/") {
393 branch_name.into()
394 } else {
395 format!("refs/heads/{branch_name}").into()
396 };
397 Branch {
398 is_head: Some(branch_name) == current_branch.as_ref(),
399 ref_name,
400 most_recent_commit: None,
401 upstream: None,
402 }
403 })
404 .collect())
405 })
406 }
407
408 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
409 self.with_state_async(false, |state| Ok(state.worktrees.clone()))
410 }
411
412 fn create_worktree(
413 &self,
414 name: String,
415 directory: PathBuf,
416 from_commit: Option<String>,
417 ) -> BoxFuture<'_, Result<()>> {
418 let fs = self.fs.clone();
419 let executor = self.executor.clone();
420 let dot_git_path = self.dot_git_path.clone();
421 async move {
422 let path = directory.join(&name);
423 executor.simulate_random_delay().await;
424 // Check for simulated error before any side effects
425 fs.with_git_state(&dot_git_path, false, |state| {
426 if let Some(message) = &state.simulated_create_worktree_error {
427 anyhow::bail!("{message}");
428 }
429 Ok(())
430 })??;
431 // Create directory before updating state so state is never
432 // inconsistent with the filesystem
433 fs.create_dir(&path).await?;
434 fs.with_git_state(&dot_git_path, true, {
435 let path = path.clone();
436 move |state| {
437 if state.branches.contains(&name) {
438 bail!("a branch named '{}' already exists", name);
439 }
440 let ref_name = format!("refs/heads/{name}");
441 let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
442 state.refs.insert(ref_name.clone(), sha.clone());
443 state.worktrees.push(Worktree {
444 path,
445 ref_name: ref_name.into(),
446 sha: sha.into(),
447 });
448 state.branches.insert(name);
449 Ok::<(), anyhow::Error>(())
450 }
451 })??;
452 Ok(())
453 }
454 .boxed()
455 }
456
457 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
458 let fs = self.fs.clone();
459 let executor = self.executor.clone();
460 let dot_git_path = self.dot_git_path.clone();
461 async move {
462 executor.simulate_random_delay().await;
463 // Validate the worktree exists in state before touching the filesystem
464 fs.with_git_state(&dot_git_path, false, {
465 let path = path.clone();
466 move |state| {
467 if !state.worktrees.iter().any(|w| w.path == path) {
468 bail!("no worktree found at path: {}", path.display());
469 }
470 Ok(())
471 }
472 })??;
473 // Now remove the directory
474 fs.remove_dir(
475 &path,
476 RemoveOptions {
477 recursive: true,
478 ignore_if_not_exists: false,
479 },
480 )
481 .await?;
482 // Update state
483 fs.with_git_state(&dot_git_path, true, move |state| {
484 state.worktrees.retain(|worktree| worktree.path != path);
485 Ok::<(), anyhow::Error>(())
486 })??;
487 Ok(())
488 }
489 .boxed()
490 }
491
492 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
493 let fs = self.fs.clone();
494 let executor = self.executor.clone();
495 let dot_git_path = self.dot_git_path.clone();
496 async move {
497 executor.simulate_random_delay().await;
498 // Validate the worktree exists in state before touching the filesystem
499 fs.with_git_state(&dot_git_path, false, {
500 let old_path = old_path.clone();
501 move |state| {
502 if !state.worktrees.iter().any(|w| w.path == old_path) {
503 bail!("no worktree found at path: {}", old_path.display());
504 }
505 Ok(())
506 }
507 })??;
508 // Now move the directory
509 fs.rename(
510 &old_path,
511 &new_path,
512 RenameOptions {
513 overwrite: false,
514 ignore_if_exists: false,
515 create_parents: true,
516 },
517 )
518 .await?;
519 // Update state
520 fs.with_git_state(&dot_git_path, true, move |state| {
521 let worktree = state
522 .worktrees
523 .iter_mut()
524 .find(|worktree| worktree.path == old_path)
525 .expect("worktree was validated above");
526 worktree.path = new_path;
527 Ok::<(), anyhow::Error>(())
528 })??;
529 Ok(())
530 }
531 .boxed()
532 }
533
534 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
535 self.with_state_async(true, |state| {
536 state.current_branch_name = Some(name);
537 Ok(())
538 })
539 }
540
541 fn create_branch(
542 &self,
543 name: String,
544 _base_branch: Option<String>,
545 ) -> BoxFuture<'_, Result<()>> {
546 self.with_state_async(true, move |state| {
547 state.branches.insert(name);
548 Ok(())
549 })
550 }
551
552 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
553 self.with_state_async(true, move |state| {
554 if !state.branches.remove(&branch) {
555 bail!("no such branch: {branch}");
556 }
557 state.branches.insert(new_name.clone());
558 if state.current_branch_name == Some(branch) {
559 state.current_branch_name = Some(new_name);
560 }
561 Ok(())
562 })
563 }
564
565 fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
566 self.with_state_async(true, move |state| {
567 if !state.branches.remove(&name) {
568 bail!("no such branch: {name}");
569 }
570 Ok(())
571 })
572 }
573
574 fn blame(
575 &self,
576 path: RepoPath,
577 _content: Rope,
578 _line_ending: LineEnding,
579 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
580 self.with_state_async(false, move |state| {
581 state
582 .blames
583 .get(&path)
584 .with_context(|| format!("failed to get blame for {:?}", path))
585 .cloned()
586 })
587 }
588
589 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
590 self.file_history_paginated(path, 0, None)
591 }
592
593 fn file_history_paginated(
594 &self,
595 path: RepoPath,
596 _skip: usize,
597 _limit: Option<usize>,
598 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
599 async move {
600 Ok(git::repository::FileHistory {
601 entries: Vec::new(),
602 path,
603 })
604 }
605 .boxed()
606 }
607
608 fn stage_paths(
609 &self,
610 paths: Vec<RepoPath>,
611 _env: Arc<HashMap<String, String>>,
612 ) -> BoxFuture<'_, Result<()>> {
613 Box::pin(async move {
614 let contents = paths
615 .into_iter()
616 .map(|path| {
617 let abs_path = self
618 .dot_git_path
619 .parent()
620 .unwrap()
621 .join(&path.as_std_path());
622 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
623 })
624 .collect::<Vec<_>>();
625 let contents = join_all(contents).await;
626 self.with_state_async(true, move |state| {
627 for (path, content) in contents {
628 if let Some(content) = content {
629 state.index_contents.insert(path, content);
630 } else {
631 state.index_contents.remove(&path);
632 }
633 }
634 Ok(())
635 })
636 .await
637 })
638 }
639
640 fn unstage_paths(
641 &self,
642 paths: Vec<RepoPath>,
643 _env: Arc<HashMap<String, String>>,
644 ) -> BoxFuture<'_, Result<()>> {
645 self.with_state_async(true, move |state| {
646 for path in paths {
647 match state.head_contents.get(&path) {
648 Some(content) => state.index_contents.insert(path, content.clone()),
649 None => state.index_contents.remove(&path),
650 };
651 }
652 Ok(())
653 })
654 }
655
656 fn stash_paths(
657 &self,
658 _paths: Vec<RepoPath>,
659 _env: Arc<HashMap<String, String>>,
660 ) -> BoxFuture<'_, Result<()>> {
661 unimplemented!()
662 }
663
664 fn stash_pop(
665 &self,
666 _index: Option<usize>,
667 _env: Arc<HashMap<String, String>>,
668 ) -> BoxFuture<'_, Result<()>> {
669 unimplemented!()
670 }
671
672 fn stash_apply(
673 &self,
674 _index: Option<usize>,
675 _env: Arc<HashMap<String, String>>,
676 ) -> BoxFuture<'_, Result<()>> {
677 unimplemented!()
678 }
679
680 fn stash_drop(
681 &self,
682 _index: Option<usize>,
683 _env: Arc<HashMap<String, String>>,
684 ) -> BoxFuture<'_, Result<()>> {
685 unimplemented!()
686 }
687
688 fn commit(
689 &self,
690 _message: gpui::SharedString,
691 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
692 _options: CommitOptions,
693 _askpass: AskPassDelegate,
694 _env: Arc<HashMap<String, String>>,
695 ) -> BoxFuture<'_, Result<()>> {
696 async { Ok(()) }.boxed()
697 }
698
699 fn run_hook(
700 &self,
701 _hook: RunHook,
702 _env: Arc<HashMap<String, String>>,
703 ) -> BoxFuture<'_, Result<()>> {
704 async { Ok(()) }.boxed()
705 }
706
707 fn push(
708 &self,
709 _branch: String,
710 _remote_branch: String,
711 _remote: String,
712 _options: Option<PushOptions>,
713 _askpass: AskPassDelegate,
714 _env: Arc<HashMap<String, String>>,
715 _cx: AsyncApp,
716 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
717 unimplemented!()
718 }
719
720 fn pull(
721 &self,
722 _branch: Option<String>,
723 _remote: String,
724 _rebase: bool,
725 _askpass: AskPassDelegate,
726 _env: Arc<HashMap<String, String>>,
727 _cx: AsyncApp,
728 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
729 unimplemented!()
730 }
731
732 fn fetch(
733 &self,
734 _fetch_options: FetchOptions,
735 _askpass: AskPassDelegate,
736 _env: Arc<HashMap<String, String>>,
737 _cx: AsyncApp,
738 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
739 unimplemented!()
740 }
741
742 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
743 self.with_state_async(false, move |state| {
744 let remotes = state
745 .remotes
746 .keys()
747 .map(|r| Remote {
748 name: r.clone().into(),
749 })
750 .collect::<Vec<_>>();
751 Ok(remotes)
752 })
753 }
754
755 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
756 unimplemented!()
757 }
758
759 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
760 unimplemented!()
761 }
762
763 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
764 future::ready(Ok(Vec::new())).boxed()
765 }
766
767 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
768 unimplemented!()
769 }
770
771 fn diff_stat(
772 &self,
773 diff_type: git::repository::DiffType,
774 ) -> BoxFuture<'_, Result<HashMap<RepoPath, git::status::DiffStat>>> {
775 fn count_lines(s: &str) -> u32 {
776 if s.is_empty() {
777 0
778 } else {
779 s.lines().count() as u32
780 }
781 }
782
783 match diff_type {
784 git::repository::DiffType::HeadToIndex => self
785 .with_state_async(false, |state| {
786 let mut result = HashMap::default();
787 let all_paths: HashSet<&RepoPath> = state
788 .head_contents
789 .keys()
790 .chain(state.index_contents.keys())
791 .collect();
792 for path in all_paths {
793 let head = state.head_contents.get(path);
794 let index = state.index_contents.get(path);
795 match (head, index) {
796 (Some(old), Some(new)) if old != new => {
797 result.insert(
798 path.clone(),
799 git::status::DiffStat {
800 added: count_lines(new),
801 deleted: count_lines(old),
802 },
803 );
804 }
805 (Some(old), None) => {
806 result.insert(
807 path.clone(),
808 git::status::DiffStat {
809 added: 0,
810 deleted: count_lines(old),
811 },
812 );
813 }
814 (None, Some(new)) => {
815 result.insert(
816 path.clone(),
817 git::status::DiffStat {
818 added: count_lines(new),
819 deleted: 0,
820 },
821 );
822 }
823 _ => {}
824 }
825 }
826 Ok(result)
827 })
828 .boxed(),
829 git::repository::DiffType::HeadToWorktree => {
830 let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
831 let worktree_files: HashMap<RepoPath, String> = self
832 .fs
833 .files()
834 .iter()
835 .filter_map(|path| {
836 let repo_path = path.strip_prefix(&workdir_path).ok()?;
837 if repo_path.starts_with(".git") {
838 return None;
839 }
840 let content = self
841 .fs
842 .read_file_sync(path)
843 .ok()
844 .and_then(|bytes| String::from_utf8(bytes).ok())?;
845 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
846 Some((RepoPath::from_rel_path(&repo_path), content))
847 })
848 .collect();
849
850 self.with_state_async(false, move |state| {
851 let mut result = HashMap::default();
852 let all_paths: HashSet<&RepoPath> = state
853 .head_contents
854 .keys()
855 .chain(worktree_files.keys())
856 .collect();
857 for path in all_paths {
858 let head = state.head_contents.get(path);
859 let worktree = worktree_files.get(path);
860 match (head, worktree) {
861 (Some(old), Some(new)) if old != new => {
862 result.insert(
863 path.clone(),
864 git::status::DiffStat {
865 added: count_lines(new),
866 deleted: count_lines(old),
867 },
868 );
869 }
870 (Some(old), None) => {
871 result.insert(
872 path.clone(),
873 git::status::DiffStat {
874 added: 0,
875 deleted: count_lines(old),
876 },
877 );
878 }
879 (None, Some(new)) => {
880 result.insert(
881 path.clone(),
882 git::status::DiffStat {
883 added: count_lines(new),
884 deleted: 0,
885 },
886 );
887 }
888 _ => {}
889 }
890 }
891 Ok(result)
892 })
893 .boxed()
894 }
895 git::repository::DiffType::MergeBase { .. } => {
896 future::ready(Ok(HashMap::default())).boxed()
897 }
898 }
899 }
900
901 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
902 let executor = self.executor.clone();
903 let fs = self.fs.clone();
904 let checkpoints = self.checkpoints.clone();
905 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
906 async move {
907 executor.simulate_random_delay().await;
908 let oid = git::Oid::random(&mut *executor.rng().lock());
909 let entry = fs.entry(&repository_dir_path)?;
910 checkpoints.lock().insert(oid, entry);
911 Ok(GitRepositoryCheckpoint { commit_sha: oid })
912 }
913 .boxed()
914 }
915
916 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
917 let executor = self.executor.clone();
918 let fs = self.fs.clone();
919 let checkpoints = self.checkpoints.clone();
920 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
921 async move {
922 executor.simulate_random_delay().await;
923 let checkpoints = checkpoints.lock();
924 let entry = checkpoints
925 .get(&checkpoint.commit_sha)
926 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
927 fs.insert_entry(&repository_dir_path, entry.clone())?;
928 Ok(())
929 }
930 .boxed()
931 }
932
933 fn compare_checkpoints(
934 &self,
935 left: GitRepositoryCheckpoint,
936 right: GitRepositoryCheckpoint,
937 ) -> BoxFuture<'_, Result<bool>> {
938 let executor = self.executor.clone();
939 let checkpoints = self.checkpoints.clone();
940 async move {
941 executor.simulate_random_delay().await;
942 let checkpoints = checkpoints.lock();
943 let left = checkpoints
944 .get(&left.commit_sha)
945 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
946 let right = checkpoints
947 .get(&right.commit_sha)
948 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
949
950 Ok(left == right)
951 }
952 .boxed()
953 }
954
955 fn diff_checkpoints(
956 &self,
957 _base_checkpoint: GitRepositoryCheckpoint,
958 _target_checkpoint: GitRepositoryCheckpoint,
959 ) -> BoxFuture<'_, Result<String>> {
960 unimplemented!()
961 }
962
963 fn default_branch(
964 &self,
965 include_remote_name: bool,
966 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
967 async move {
968 Ok(Some(if include_remote_name {
969 "origin/main".into()
970 } else {
971 "main".into()
972 }))
973 }
974 .boxed()
975 }
976
977 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
978 self.with_state_async(true, move |state| {
979 state.remotes.insert(name, url);
980 Ok(())
981 })
982 }
983
984 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
985 self.with_state_async(true, move |state| {
986 state.remotes.remove(&name);
987 Ok(())
988 })
989 }
990
991 fn initial_graph_data(
992 &self,
993 _log_source: LogSource,
994 _log_order: LogOrder,
995 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
996 ) -> BoxFuture<'_, Result<()>> {
997 let fs = self.fs.clone();
998 let dot_git_path = self.dot_git_path.clone();
999 async move {
1000 let graph_commits =
1001 fs.with_git_state(&dot_git_path, false, |state| state.graph_commits.clone())?;
1002
1003 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
1004 request_tx.send(chunk.to_vec()).await.ok();
1005 }
1006 Ok(())
1007 }
1008 .boxed()
1009 }
1010
1011 fn commit_data_reader(&self) -> Result<CommitDataReader> {
1012 anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
1013 }
1014}
1015
1016#[cfg(test)]
1017mod tests {
1018 use super::*;
1019 use crate::{FakeFs, Fs};
1020 use gpui::TestAppContext;
1021 use serde_json::json;
1022 use std::path::Path;
1023
1024 #[gpui::test]
1025 async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
1026 let worktree_dir_settings = &["../worktrees", ".git/zed-worktrees", "my-worktrees/"];
1027
1028 for worktree_dir_setting in worktree_dir_settings {
1029 let fs = FakeFs::new(cx.executor());
1030 fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"}))
1031 .await;
1032 let repo = fs
1033 .open_repo(Path::new("/project/.git"), None)
1034 .expect("should open fake repo");
1035
1036 // Initially no worktrees
1037 let worktrees = repo.worktrees().await.unwrap();
1038 assert!(worktrees.is_empty());
1039
1040 let expected_dir = git::repository::resolve_worktree_directory(
1041 Path::new("/project"),
1042 worktree_dir_setting,
1043 );
1044
1045 // Create a worktree
1046 repo.create_worktree(
1047 "feature-branch".to_string(),
1048 expected_dir.clone(),
1049 Some("abc123".to_string()),
1050 )
1051 .await
1052 .unwrap();
1053
1054 // List worktrees — should have one
1055 let worktrees = repo.worktrees().await.unwrap();
1056 assert_eq!(worktrees.len(), 1);
1057 assert_eq!(
1058 worktrees[0].path,
1059 expected_dir.join("feature-branch"),
1060 "failed for worktree_directory setting: {worktree_dir_setting:?}"
1061 );
1062 assert_eq!(worktrees[0].ref_name.as_ref(), "refs/heads/feature-branch");
1063 assert_eq!(worktrees[0].sha.as_ref(), "abc123");
1064
1065 // Directory should exist in FakeFs after create
1066 assert!(
1067 fs.is_dir(&expected_dir.join("feature-branch")).await,
1068 "worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}"
1069 );
1070
1071 // Create a second worktree (without explicit commit)
1072 repo.create_worktree("bugfix-branch".to_string(), expected_dir.clone(), None)
1073 .await
1074 .unwrap();
1075
1076 let worktrees = repo.worktrees().await.unwrap();
1077 assert_eq!(worktrees.len(), 2);
1078 assert!(
1079 fs.is_dir(&expected_dir.join("bugfix-branch")).await,
1080 "second worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}"
1081 );
1082
1083 // Rename the first worktree
1084 repo.rename_worktree(
1085 expected_dir.join("feature-branch"),
1086 expected_dir.join("renamed-branch"),
1087 )
1088 .await
1089 .unwrap();
1090
1091 let worktrees = repo.worktrees().await.unwrap();
1092 assert_eq!(worktrees.len(), 2);
1093 assert!(
1094 worktrees
1095 .iter()
1096 .any(|w| w.path == expected_dir.join("renamed-branch")),
1097 "renamed worktree should exist at new path for setting {worktree_dir_setting:?}"
1098 );
1099 assert!(
1100 worktrees
1101 .iter()
1102 .all(|w| w.path != expected_dir.join("feature-branch")),
1103 "old path should no longer exist for setting {worktree_dir_setting:?}"
1104 );
1105
1106 // Directory should be moved in FakeFs after rename
1107 assert!(
1108 !fs.is_dir(&expected_dir.join("feature-branch")).await,
1109 "old worktree directory should not exist after rename for setting {worktree_dir_setting:?}"
1110 );
1111 assert!(
1112 fs.is_dir(&expected_dir.join("renamed-branch")).await,
1113 "new worktree directory should exist after rename for setting {worktree_dir_setting:?}"
1114 );
1115
1116 // Rename a nonexistent worktree should fail
1117 let result = repo
1118 .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere"))
1119 .await;
1120 assert!(result.is_err());
1121
1122 // Remove a worktree
1123 repo.remove_worktree(expected_dir.join("renamed-branch"), false)
1124 .await
1125 .unwrap();
1126
1127 let worktrees = repo.worktrees().await.unwrap();
1128 assert_eq!(worktrees.len(), 1);
1129 assert_eq!(worktrees[0].path, expected_dir.join("bugfix-branch"));
1130
1131 // Directory should be removed from FakeFs after remove
1132 assert!(
1133 !fs.is_dir(&expected_dir.join("renamed-branch")).await,
1134 "worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}"
1135 );
1136
1137 // Remove a nonexistent worktree should fail
1138 let result = repo
1139 .remove_worktree(PathBuf::from("/nonexistent"), false)
1140 .await;
1141 assert!(result.is_err());
1142
1143 // Remove the last worktree
1144 repo.remove_worktree(expected_dir.join("bugfix-branch"), false)
1145 .await
1146 .unwrap();
1147
1148 let worktrees = repo.worktrees().await.unwrap();
1149 assert!(worktrees.is_empty());
1150 assert!(
1151 !fs.is_dir(&expected_dir.join("bugfix-branch")).await,
1152 "last worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}"
1153 );
1154 }
1155 }
1156}