1use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
2use anyhow::{Context as _, Result, bail};
3use collections::{HashMap, HashSet};
4use futures::future::{self, BoxFuture, join_all};
5use git::{
6 Oid, RunHook,
7 blame::Blame,
8 repository::{
9 AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions, FetchOptions,
10 GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder,
11 LogSource, PushOptions, Remote, RepoPath, ResetMode, Worktree,
12 },
13 status::{
14 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
15 UnmergedStatus,
16 },
17};
18use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
19use ignore::gitignore::GitignoreBuilder;
20use parking_lot::Mutex;
21use rope::Rope;
22use smol::{channel::Sender, future::FutureExt as _};
23use std::{path::PathBuf, sync::Arc};
24use text::LineEnding;
25use util::{paths::PathStyle, rel_path::RelPath};
26
27#[derive(Clone)]
28pub struct FakeGitRepository {
29 pub(crate) fs: Arc<FakeFs>,
30 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
31 pub(crate) executor: BackgroundExecutor,
32 pub(crate) dot_git_path: PathBuf,
33 pub(crate) repository_dir_path: PathBuf,
34 pub(crate) common_dir_path: PathBuf,
35}
36
37#[derive(Debug, Clone)]
38pub struct FakeGitRepositoryState {
39 pub event_emitter: smol::channel::Sender<PathBuf>,
40 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
41 pub head_contents: HashMap<RepoPath, String>,
42 pub index_contents: HashMap<RepoPath, String>,
43 // everything in commit contents is in oids
44 pub merge_base_contents: HashMap<RepoPath, Oid>,
45 pub oids: HashMap<Oid, String>,
46 pub blames: HashMap<RepoPath, Blame>,
47 pub current_branch_name: Option<String>,
48 pub branches: HashSet<String>,
49 /// List of remotes, keys are names and values are URLs
50 pub remotes: HashMap<String, String>,
51 pub simulated_index_write_error_message: Option<String>,
52 pub simulated_create_worktree_error: Option<String>,
53 pub refs: HashMap<String, String>,
54 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
55 pub worktrees: Vec<Worktree>,
56}
57
58impl FakeGitRepositoryState {
59 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
60 FakeGitRepositoryState {
61 event_emitter,
62 head_contents: Default::default(),
63 index_contents: Default::default(),
64 unmerged_paths: Default::default(),
65 blames: Default::default(),
66 current_branch_name: Default::default(),
67 branches: Default::default(),
68 simulated_index_write_error_message: Default::default(),
69 simulated_create_worktree_error: Default::default(),
70 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
71 merge_base_contents: Default::default(),
72 oids: Default::default(),
73 remotes: HashMap::default(),
74 graph_commits: Vec::new(),
75 worktrees: Vec::new(),
76 }
77 }
78}
79
80impl FakeGitRepository {
81 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
82 where
83 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
84 T: Send,
85 {
86 let fs = self.fs.clone();
87 let executor = self.executor.clone();
88 let dot_git_path = self.dot_git_path.clone();
89 async move {
90 executor.simulate_random_delay().await;
91 fs.with_git_state(&dot_git_path, write, f)?
92 }
93 .boxed()
94 }
95}
96
97impl GitRepository for FakeGitRepository {
98 fn reload_index(&self) {}
99
100 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
101 let fut = self.with_state_async(false, move |state| {
102 state
103 .index_contents
104 .get(&path)
105 .context("not present in index")
106 .cloned()
107 });
108 self.executor.spawn(async move { fut.await.ok() }).boxed()
109 }
110
111 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
112 let fut = self.with_state_async(false, move |state| {
113 state
114 .head_contents
115 .get(&path)
116 .context("not present in HEAD")
117 .cloned()
118 });
119 self.executor.spawn(async move { fut.await.ok() }).boxed()
120 }
121
122 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
123 self.with_state_async(false, move |state| {
124 state.oids.get(&oid).cloned().context("oid does not exist")
125 })
126 .boxed()
127 }
128
129 fn load_commit(
130 &self,
131 _commit: String,
132 _cx: AsyncApp,
133 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
134 unimplemented!()
135 }
136
137 fn set_index_text(
138 &self,
139 path: RepoPath,
140 content: Option<String>,
141 _env: Arc<HashMap<String, String>>,
142 _is_executable: bool,
143 ) -> BoxFuture<'_, anyhow::Result<()>> {
144 self.with_state_async(true, move |state| {
145 if let Some(message) = &state.simulated_index_write_error_message {
146 anyhow::bail!("{message}");
147 } else if let Some(content) = content {
148 state.index_contents.insert(path, content);
149 } else {
150 state.index_contents.remove(&path);
151 }
152 Ok(())
153 })
154 }
155
156 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
157 let name = name.to_string();
158 let fut = self.with_state_async(false, move |state| {
159 state
160 .remotes
161 .get(&name)
162 .context("remote not found")
163 .cloned()
164 });
165 async move { fut.await.ok() }.boxed()
166 }
167
168 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
169 let mut entries = HashMap::default();
170 self.with_state_async(false, |state| {
171 for (path, content) in &state.head_contents {
172 let status = if let Some((oid, original)) = state
173 .merge_base_contents
174 .get(path)
175 .map(|oid| (oid, &state.oids[oid]))
176 {
177 if original == content {
178 continue;
179 }
180 TreeDiffStatus::Modified { old: *oid }
181 } else {
182 TreeDiffStatus::Added
183 };
184 entries.insert(path.clone(), status);
185 }
186 for (path, oid) in &state.merge_base_contents {
187 if !entries.contains_key(path) {
188 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
189 }
190 }
191 Ok(TreeDiff { entries })
192 })
193 .boxed()
194 }
195
196 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
197 self.with_state_async(false, |state| {
198 Ok(revs
199 .into_iter()
200 .map(|rev| state.refs.get(&rev).cloned())
201 .collect())
202 })
203 }
204
205 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
206 async {
207 Ok(CommitDetails {
208 sha: commit.into(),
209 message: "initial commit".into(),
210 ..Default::default()
211 })
212 }
213 .boxed()
214 }
215
216 fn reset(
217 &self,
218 _commit: String,
219 _mode: ResetMode,
220 _env: Arc<HashMap<String, String>>,
221 ) -> BoxFuture<'_, Result<()>> {
222 unimplemented!()
223 }
224
225 fn checkout_files(
226 &self,
227 _commit: String,
228 _paths: Vec<RepoPath>,
229 _env: Arc<HashMap<String, String>>,
230 ) -> BoxFuture<'_, Result<()>> {
231 unimplemented!()
232 }
233
234 fn path(&self) -> PathBuf {
235 self.repository_dir_path.clone()
236 }
237
238 fn main_repository_path(&self) -> PathBuf {
239 self.common_dir_path.clone()
240 }
241
242 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
243 async move { None }.boxed()
244 }
245
246 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
247 let workdir_path = self.dot_git_path.parent().unwrap();
248
249 // Load gitignores
250 let ignores = workdir_path
251 .ancestors()
252 .filter_map(|dir| {
253 let ignore_path = dir.join(".gitignore");
254 let content = self.fs.read_file_sync(ignore_path).ok()?;
255 let content = String::from_utf8(content).ok()?;
256 let mut builder = GitignoreBuilder::new(dir);
257 for line in content.lines() {
258 builder.add_line(Some(dir.into()), line).ok()?;
259 }
260 builder.build().ok()
261 })
262 .collect::<Vec<_>>();
263
264 // Load working copy files.
265 let git_files: HashMap<RepoPath, (String, bool)> = self
266 .fs
267 .files()
268 .iter()
269 .filter_map(|path| {
270 // TODO better simulate git status output in the case of submodules and worktrees
271 let repo_path = path.strip_prefix(workdir_path).ok()?;
272 let mut is_ignored = repo_path.starts_with(".git");
273 for ignore in &ignores {
274 match ignore.matched_path_or_any_parents(path, false) {
275 ignore::Match::None => {}
276 ignore::Match::Ignore(_) => is_ignored = true,
277 ignore::Match::Whitelist(_) => break,
278 }
279 }
280 let content = self
281 .fs
282 .read_file_sync(path)
283 .ok()
284 .map(|content| String::from_utf8(content).unwrap())?;
285 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
286 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
287 })
288 .collect();
289
290 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
291 let mut entries = Vec::new();
292 let paths = state
293 .head_contents
294 .keys()
295 .chain(state.index_contents.keys())
296 .chain(git_files.keys())
297 .collect::<HashSet<_>>();
298 for path in paths {
299 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
300 continue;
301 }
302
303 let head = state.head_contents.get(path);
304 let index = state.index_contents.get(path);
305 let unmerged = state.unmerged_paths.get(path);
306 let fs = git_files.get(path);
307 let status = match (unmerged, head, index, fs) {
308 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
309 (_, Some(head), Some(index), Some((fs, _))) => {
310 FileStatus::Tracked(TrackedStatus {
311 index_status: if head == index {
312 StatusCode::Unmodified
313 } else {
314 StatusCode::Modified
315 },
316 worktree_status: if fs == index {
317 StatusCode::Unmodified
318 } else {
319 StatusCode::Modified
320 },
321 })
322 }
323 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
324 index_status: if head == index {
325 StatusCode::Unmodified
326 } else {
327 StatusCode::Modified
328 },
329 worktree_status: StatusCode::Deleted,
330 }),
331 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
332 index_status: StatusCode::Deleted,
333 worktree_status: StatusCode::Added,
334 }),
335 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
336 index_status: StatusCode::Deleted,
337 worktree_status: StatusCode::Deleted,
338 }),
339 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
340 index_status: StatusCode::Added,
341 worktree_status: if fs == index {
342 StatusCode::Unmodified
343 } else {
344 StatusCode::Modified
345 },
346 }),
347 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
348 index_status: StatusCode::Added,
349 worktree_status: StatusCode::Deleted,
350 }),
351 (_, None, None, Some((_, is_ignored))) => {
352 if *is_ignored {
353 continue;
354 }
355 FileStatus::Untracked
356 }
357 (_, None, None, None) => {
358 unreachable!();
359 }
360 };
361 if status
362 != FileStatus::Tracked(TrackedStatus {
363 index_status: StatusCode::Unmodified,
364 worktree_status: StatusCode::Unmodified,
365 })
366 {
367 entries.push((path.clone(), status));
368 }
369 }
370 entries.sort_by(|a, b| a.0.cmp(&b.0));
371 anyhow::Ok(GitStatus {
372 entries: entries.into(),
373 })
374 });
375 Task::ready(match result {
376 Ok(result) => result,
377 Err(e) => Err(e),
378 })
379 }
380
381 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
382 async { Ok(git::stash::GitStash::default()) }.boxed()
383 }
384
385 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
386 self.with_state_async(false, move |state| {
387 let current_branch = &state.current_branch_name;
388 Ok(state
389 .branches
390 .iter()
391 .map(|branch_name| {
392 let ref_name = if branch_name.starts_with("refs/") {
393 branch_name.into()
394 } else {
395 format!("refs/heads/{branch_name}").into()
396 };
397 Branch {
398 is_head: Some(branch_name) == current_branch.as_ref(),
399 ref_name,
400 most_recent_commit: None,
401 upstream: None,
402 }
403 })
404 .collect())
405 })
406 }
407
408 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
409 self.with_state_async(false, |state| Ok(state.worktrees.clone()))
410 }
411
412 fn create_worktree(
413 &self,
414 name: String,
415 directory: PathBuf,
416 from_commit: Option<String>,
417 ) -> BoxFuture<'_, Result<()>> {
418 let fs = self.fs.clone();
419 let executor = self.executor.clone();
420 let dot_git_path = self.dot_git_path.clone();
421 async move {
422 let path = directory.join(&name);
423 executor.simulate_random_delay().await;
424 // Check for simulated error before any side effects
425 fs.with_git_state(&dot_git_path, false, |state| {
426 if let Some(message) = &state.simulated_create_worktree_error {
427 anyhow::bail!("{message}");
428 }
429 Ok(())
430 })??;
431 // Create directory before updating state so state is never
432 // inconsistent with the filesystem
433 fs.create_dir(&path).await?;
434 fs.with_git_state(&dot_git_path, true, {
435 let path = path.clone();
436 move |state| {
437 if state.branches.contains(&name) {
438 bail!("a branch named '{}' already exists", name);
439 }
440 let ref_name = format!("refs/heads/{name}");
441 let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
442 state.refs.insert(ref_name.clone(), sha.clone());
443 state.worktrees.push(Worktree {
444 path,
445 ref_name: ref_name.into(),
446 sha: sha.into(),
447 });
448 state.branches.insert(name);
449 Ok::<(), anyhow::Error>(())
450 }
451 })??;
452 Ok(())
453 }
454 .boxed()
455 }
456
457 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
458 let fs = self.fs.clone();
459 let executor = self.executor.clone();
460 let dot_git_path = self.dot_git_path.clone();
461 async move {
462 executor.simulate_random_delay().await;
463 // Validate the worktree exists in state before touching the filesystem
464 fs.with_git_state(&dot_git_path, false, {
465 let path = path.clone();
466 move |state| {
467 if !state.worktrees.iter().any(|w| w.path == path) {
468 bail!("no worktree found at path: {}", path.display());
469 }
470 Ok(())
471 }
472 })??;
473 // Now remove the directory
474 fs.remove_dir(
475 &path,
476 RemoveOptions {
477 recursive: true,
478 ignore_if_not_exists: false,
479 },
480 )
481 .await?;
482 // Update state
483 fs.with_git_state(&dot_git_path, true, move |state| {
484 state.worktrees.retain(|worktree| worktree.path != path);
485 Ok::<(), anyhow::Error>(())
486 })??;
487 Ok(())
488 }
489 .boxed()
490 }
491
492 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
493 let fs = self.fs.clone();
494 let executor = self.executor.clone();
495 let dot_git_path = self.dot_git_path.clone();
496 async move {
497 executor.simulate_random_delay().await;
498 // Validate the worktree exists in state before touching the filesystem
499 fs.with_git_state(&dot_git_path, false, {
500 let old_path = old_path.clone();
501 move |state| {
502 if !state.worktrees.iter().any(|w| w.path == old_path) {
503 bail!("no worktree found at path: {}", old_path.display());
504 }
505 Ok(())
506 }
507 })??;
508 // Now move the directory
509 fs.rename(
510 &old_path,
511 &new_path,
512 RenameOptions {
513 overwrite: false,
514 ignore_if_exists: false,
515 create_parents: true,
516 },
517 )
518 .await?;
519 // Update state
520 fs.with_git_state(&dot_git_path, true, move |state| {
521 let worktree = state
522 .worktrees
523 .iter_mut()
524 .find(|worktree| worktree.path == old_path)
525 .expect("worktree was validated above");
526 worktree.path = new_path;
527 Ok::<(), anyhow::Error>(())
528 })??;
529 Ok(())
530 }
531 .boxed()
532 }
533
534 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
535 self.with_state_async(true, |state| {
536 state.current_branch_name = Some(name);
537 Ok(())
538 })
539 }
540
541 fn create_branch(
542 &self,
543 name: String,
544 _base_branch: Option<String>,
545 ) -> BoxFuture<'_, Result<()>> {
546 self.with_state_async(true, move |state| {
547 state.branches.insert(name);
548 Ok(())
549 })
550 }
551
552 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
553 self.with_state_async(true, move |state| {
554 if !state.branches.remove(&branch) {
555 bail!("no such branch: {branch}");
556 }
557 state.branches.insert(new_name.clone());
558 if state.current_branch_name == Some(branch) {
559 state.current_branch_name = Some(new_name);
560 }
561 Ok(())
562 })
563 }
564
565 fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
566 self.with_state_async(true, move |state| {
567 if !state.branches.remove(&name) {
568 bail!("no such branch: {name}");
569 }
570 Ok(())
571 })
572 }
573
574 fn blame(
575 &self,
576 path: RepoPath,
577 _content: Rope,
578 _line_ending: LineEnding,
579 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
580 self.with_state_async(false, move |state| {
581 state
582 .blames
583 .get(&path)
584 .with_context(|| format!("failed to get blame for {:?}", path))
585 .cloned()
586 })
587 }
588
589 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
590 self.file_history_paginated(path, 0, None)
591 }
592
593 fn file_history_paginated(
594 &self,
595 path: RepoPath,
596 _skip: usize,
597 _limit: Option<usize>,
598 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
599 async move {
600 Ok(git::repository::FileHistory {
601 entries: Vec::new(),
602 path,
603 })
604 }
605 .boxed()
606 }
607
608 fn stage_paths(
609 &self,
610 paths: Vec<RepoPath>,
611 _env: Arc<HashMap<String, String>>,
612 ) -> BoxFuture<'_, Result<()>> {
613 Box::pin(async move {
614 let contents = paths
615 .into_iter()
616 .map(|path| {
617 let abs_path = self
618 .dot_git_path
619 .parent()
620 .unwrap()
621 .join(&path.as_std_path());
622 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
623 })
624 .collect::<Vec<_>>();
625 let contents = join_all(contents).await;
626 self.with_state_async(true, move |state| {
627 for (path, content) in contents {
628 if let Some(content) = content {
629 state.index_contents.insert(path, content);
630 } else {
631 state.index_contents.remove(&path);
632 }
633 }
634 Ok(())
635 })
636 .await
637 })
638 }
639
640 fn unstage_paths(
641 &self,
642 paths: Vec<RepoPath>,
643 _env: Arc<HashMap<String, String>>,
644 ) -> BoxFuture<'_, Result<()>> {
645 self.with_state_async(true, move |state| {
646 for path in paths {
647 match state.head_contents.get(&path) {
648 Some(content) => state.index_contents.insert(path, content.clone()),
649 None => state.index_contents.remove(&path),
650 };
651 }
652 Ok(())
653 })
654 }
655
656 fn stash_paths(
657 &self,
658 _paths: Vec<RepoPath>,
659 _env: Arc<HashMap<String, String>>,
660 ) -> BoxFuture<'_, Result<()>> {
661 unimplemented!()
662 }
663
664 fn stash_pop(
665 &self,
666 _index: Option<usize>,
667 _env: Arc<HashMap<String, String>>,
668 ) -> BoxFuture<'_, Result<()>> {
669 unimplemented!()
670 }
671
672 fn stash_apply(
673 &self,
674 _index: Option<usize>,
675 _env: Arc<HashMap<String, String>>,
676 ) -> BoxFuture<'_, Result<()>> {
677 unimplemented!()
678 }
679
680 fn stash_drop(
681 &self,
682 _index: Option<usize>,
683 _env: Arc<HashMap<String, String>>,
684 ) -> BoxFuture<'_, Result<()>> {
685 unimplemented!()
686 }
687
688 fn commit(
689 &self,
690 _message: gpui::SharedString,
691 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
692 _options: CommitOptions,
693 _askpass: AskPassDelegate,
694 _env: Arc<HashMap<String, String>>,
695 ) -> BoxFuture<'_, Result<()>> {
696 async { Ok(()) }.boxed()
697 }
698
699 fn run_hook(
700 &self,
701 _hook: RunHook,
702 _env: Arc<HashMap<String, String>>,
703 ) -> BoxFuture<'_, Result<()>> {
704 async { Ok(()) }.boxed()
705 }
706
707 fn push(
708 &self,
709 _branch: String,
710 _remote_branch: String,
711 _remote: String,
712 _options: Option<PushOptions>,
713 _askpass: AskPassDelegate,
714 _env: Arc<HashMap<String, String>>,
715 _cx: AsyncApp,
716 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
717 unimplemented!()
718 }
719
720 fn pull(
721 &self,
722 _branch: Option<String>,
723 _remote: String,
724 _rebase: bool,
725 _askpass: AskPassDelegate,
726 _env: Arc<HashMap<String, String>>,
727 _cx: AsyncApp,
728 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
729 unimplemented!()
730 }
731
732 fn fetch(
733 &self,
734 _fetch_options: FetchOptions,
735 _askpass: AskPassDelegate,
736 _env: Arc<HashMap<String, String>>,
737 _cx: AsyncApp,
738 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
739 unimplemented!()
740 }
741
742 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
743 self.with_state_async(false, move |state| {
744 let remotes = state
745 .remotes
746 .keys()
747 .map(|r| Remote {
748 name: r.clone().into(),
749 })
750 .collect::<Vec<_>>();
751 Ok(remotes)
752 })
753 }
754
755 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
756 unimplemented!()
757 }
758
759 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
760 unimplemented!()
761 }
762
763 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
764 future::ready(Ok(Vec::new())).boxed()
765 }
766
767 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
768 unimplemented!()
769 }
770
771 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
772 let executor = self.executor.clone();
773 let fs = self.fs.clone();
774 let checkpoints = self.checkpoints.clone();
775 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
776 async move {
777 executor.simulate_random_delay().await;
778 let oid = git::Oid::random(&mut *executor.rng().lock());
779 let entry = fs.entry(&repository_dir_path)?;
780 checkpoints.lock().insert(oid, entry);
781 Ok(GitRepositoryCheckpoint { commit_sha: oid })
782 }
783 .boxed()
784 }
785
786 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
787 let executor = self.executor.clone();
788 let fs = self.fs.clone();
789 let checkpoints = self.checkpoints.clone();
790 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
791 async move {
792 executor.simulate_random_delay().await;
793 let checkpoints = checkpoints.lock();
794 let entry = checkpoints
795 .get(&checkpoint.commit_sha)
796 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
797 fs.insert_entry(&repository_dir_path, entry.clone())?;
798 Ok(())
799 }
800 .boxed()
801 }
802
803 fn compare_checkpoints(
804 &self,
805 left: GitRepositoryCheckpoint,
806 right: GitRepositoryCheckpoint,
807 ) -> BoxFuture<'_, Result<bool>> {
808 let executor = self.executor.clone();
809 let checkpoints = self.checkpoints.clone();
810 async move {
811 executor.simulate_random_delay().await;
812 let checkpoints = checkpoints.lock();
813 let left = checkpoints
814 .get(&left.commit_sha)
815 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
816 let right = checkpoints
817 .get(&right.commit_sha)
818 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
819
820 Ok(left == right)
821 }
822 .boxed()
823 }
824
825 fn diff_checkpoints(
826 &self,
827 _base_checkpoint: GitRepositoryCheckpoint,
828 _target_checkpoint: GitRepositoryCheckpoint,
829 ) -> BoxFuture<'_, Result<String>> {
830 unimplemented!()
831 }
832
833 fn default_branch(
834 &self,
835 include_remote_name: bool,
836 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
837 async move {
838 Ok(Some(if include_remote_name {
839 "origin/main".into()
840 } else {
841 "main".into()
842 }))
843 }
844 .boxed()
845 }
846
847 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
848 self.with_state_async(true, move |state| {
849 state.remotes.insert(name, url);
850 Ok(())
851 })
852 }
853
854 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
855 self.with_state_async(true, move |state| {
856 state.remotes.remove(&name);
857 Ok(())
858 })
859 }
860
861 fn initial_graph_data(
862 &self,
863 _log_source: LogSource,
864 _log_order: LogOrder,
865 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
866 ) -> BoxFuture<'_, Result<()>> {
867 let fs = self.fs.clone();
868 let dot_git_path = self.dot_git_path.clone();
869 async move {
870 let graph_commits =
871 fs.with_git_state(&dot_git_path, false, |state| state.graph_commits.clone())?;
872
873 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
874 request_tx.send(chunk.to_vec()).await.ok();
875 }
876 Ok(())
877 }
878 .boxed()
879 }
880
881 fn commit_data_reader(&self) -> Result<CommitDataReader> {
882 anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
883 }
884}
885
886#[cfg(test)]
887mod tests {
888 use super::*;
889 use crate::{FakeFs, Fs};
890 use gpui::TestAppContext;
891 use serde_json::json;
892 use std::path::Path;
893
894 #[gpui::test]
895 async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
896 let fs = FakeFs::new(cx.executor());
897 fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"}))
898 .await;
899 let repo = fs
900 .open_repo(Path::new("/project/.git"), None)
901 .expect("should open fake repo");
902
903 // Initially no worktrees
904 let worktrees = repo.worktrees().await.unwrap();
905 assert!(worktrees.is_empty());
906
907 // Create a worktree
908 repo.create_worktree(
909 "feature-branch".to_string(),
910 PathBuf::from("/worktrees"),
911 Some("abc123".to_string()),
912 )
913 .await
914 .unwrap();
915
916 // List worktrees — should have one
917 let worktrees = repo.worktrees().await.unwrap();
918 assert_eq!(worktrees.len(), 1);
919 assert_eq!(worktrees[0].path, Path::new("/worktrees/feature-branch"));
920 assert_eq!(worktrees[0].ref_name.as_ref(), "refs/heads/feature-branch");
921 assert_eq!(worktrees[0].sha.as_ref(), "abc123");
922
923 // Directory should exist in FakeFs after create
924 assert!(
925 fs.is_dir(Path::new("/worktrees/feature-branch")).await,
926 "worktree directory should be created in FakeFs"
927 );
928
929 // Create a second worktree (without explicit commit)
930 repo.create_worktree(
931 "bugfix-branch".to_string(),
932 PathBuf::from("/worktrees"),
933 None,
934 )
935 .await
936 .unwrap();
937
938 let worktrees = repo.worktrees().await.unwrap();
939 assert_eq!(worktrees.len(), 2);
940 assert!(
941 fs.is_dir(Path::new("/worktrees/bugfix-branch")).await,
942 "second worktree directory should be created in FakeFs"
943 );
944
945 // Rename the first worktree
946 repo.rename_worktree(
947 PathBuf::from("/worktrees/feature-branch"),
948 PathBuf::from("/worktrees/renamed-branch"),
949 )
950 .await
951 .unwrap();
952
953 let worktrees = repo.worktrees().await.unwrap();
954 assert_eq!(worktrees.len(), 2);
955 assert!(
956 worktrees
957 .iter()
958 .any(|w| w.path == Path::new("/worktrees/renamed-branch")),
959 "renamed worktree should exist at new path"
960 );
961 assert!(
962 worktrees
963 .iter()
964 .all(|w| w.path != Path::new("/worktrees/feature-branch")),
965 "old path should no longer exist"
966 );
967
968 // Directory should be moved in FakeFs after rename
969 assert!(
970 !fs.is_dir(Path::new("/worktrees/feature-branch")).await,
971 "old worktree directory should not exist after rename"
972 );
973 assert!(
974 fs.is_dir(Path::new("/worktrees/renamed-branch")).await,
975 "new worktree directory should exist after rename"
976 );
977
978 // Rename a nonexistent worktree should fail
979 let result = repo
980 .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere"))
981 .await;
982 assert!(result.is_err());
983
984 // Remove a worktree
985 repo.remove_worktree(PathBuf::from("/worktrees/renamed-branch"), false)
986 .await
987 .unwrap();
988
989 let worktrees = repo.worktrees().await.unwrap();
990 assert_eq!(worktrees.len(), 1);
991 assert_eq!(worktrees[0].path, Path::new("/worktrees/bugfix-branch"));
992
993 // Directory should be removed from FakeFs after remove
994 assert!(
995 !fs.is_dir(Path::new("/worktrees/renamed-branch")).await,
996 "worktree directory should be removed from FakeFs"
997 );
998
999 // Remove a nonexistent worktree should fail
1000 let result = repo
1001 .remove_worktree(PathBuf::from("/nonexistent"), false)
1002 .await;
1003 assert!(result.is_err());
1004
1005 // Remove the last worktree
1006 repo.remove_worktree(PathBuf::from("/worktrees/bugfix-branch"), false)
1007 .await
1008 .unwrap();
1009
1010 let worktrees = repo.worktrees().await.unwrap();
1011 assert!(worktrees.is_empty());
1012 assert!(
1013 !fs.is_dir(Path::new("/worktrees/bugfix-branch")).await,
1014 "last worktree directory should be removed from FakeFs"
1015 );
1016 }
1017}