1use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
2use anyhow::{Context as _, Result, bail};
3use collections::{HashMap, HashSet};
4use futures::future::{self, BoxFuture, join_all};
5use git::{
6 Oid, RunHook,
7 blame::Blame,
8 repository::{
9 AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions, FetchOptions,
10 GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder,
11 LogSource, PushOptions, Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree,
12 },
13 status::{
14 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
15 UnmergedStatus,
16 },
17};
18use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
19use ignore::gitignore::GitignoreBuilder;
20use parking_lot::Mutex;
21use rope::Rope;
22use smol::{channel::Sender, future::FutureExt as _};
23use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
24use text::LineEnding;
25use util::{paths::PathStyle, rel_path::RelPath};
26
27#[derive(Clone)]
28pub struct FakeGitRepository {
29 pub(crate) fs: Arc<FakeFs>,
30 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
31 pub(crate) executor: BackgroundExecutor,
32 pub(crate) dot_git_path: PathBuf,
33 pub(crate) repository_dir_path: PathBuf,
34 pub(crate) common_dir_path: PathBuf,
35 pub(crate) is_trusted: Arc<AtomicBool>,
36}
37
38#[derive(Debug, Clone)]
39pub struct FakeGitRepositoryState {
40 pub event_emitter: smol::channel::Sender<PathBuf>,
41 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
42 pub head_contents: HashMap<RepoPath, String>,
43 pub index_contents: HashMap<RepoPath, String>,
44 // everything in commit contents is in oids
45 pub merge_base_contents: HashMap<RepoPath, Oid>,
46 pub oids: HashMap<Oid, String>,
47 pub blames: HashMap<RepoPath, Blame>,
48 pub current_branch_name: Option<String>,
49 pub branches: HashSet<String>,
50 /// List of remotes, keys are names and values are URLs
51 pub remotes: HashMap<String, String>,
52 pub simulated_index_write_error_message: Option<String>,
53 pub simulated_create_worktree_error: Option<String>,
54 pub refs: HashMap<String, String>,
55 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
56 pub worktrees: Vec<Worktree>,
57}
58
59impl FakeGitRepositoryState {
60 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
61 FakeGitRepositoryState {
62 event_emitter,
63 head_contents: Default::default(),
64 index_contents: Default::default(),
65 unmerged_paths: Default::default(),
66 blames: Default::default(),
67 current_branch_name: Default::default(),
68 branches: Default::default(),
69 simulated_index_write_error_message: Default::default(),
70 simulated_create_worktree_error: Default::default(),
71 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
72 merge_base_contents: Default::default(),
73 oids: Default::default(),
74 remotes: HashMap::default(),
75 graph_commits: Vec::new(),
76 worktrees: Vec::new(),
77 }
78 }
79}
80
81impl FakeGitRepository {
82 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
83 where
84 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
85 T: Send,
86 {
87 let fs = self.fs.clone();
88 let executor = self.executor.clone();
89 let dot_git_path = self.dot_git_path.clone();
90 async move {
91 executor.simulate_random_delay().await;
92 fs.with_git_state(&dot_git_path, write, f)?
93 }
94 .boxed()
95 }
96}
97
98impl GitRepository for FakeGitRepository {
99 fn reload_index(&self) {}
100
101 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
102 let fut = self.with_state_async(false, move |state| {
103 state
104 .index_contents
105 .get(&path)
106 .context("not present in index")
107 .cloned()
108 });
109 self.executor.spawn(async move { fut.await.ok() }).boxed()
110 }
111
112 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
113 let fut = self.with_state_async(false, move |state| {
114 state
115 .head_contents
116 .get(&path)
117 .context("not present in HEAD")
118 .cloned()
119 });
120 self.executor.spawn(async move { fut.await.ok() }).boxed()
121 }
122
123 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
124 self.with_state_async(false, move |state| {
125 state.oids.get(&oid).cloned().context("oid does not exist")
126 })
127 .boxed()
128 }
129
130 fn load_commit(
131 &self,
132 _commit: String,
133 _cx: AsyncApp,
134 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
135 unimplemented!()
136 }
137
138 fn set_index_text(
139 &self,
140 path: RepoPath,
141 content: Option<String>,
142 _env: Arc<HashMap<String, String>>,
143 _is_executable: bool,
144 ) -> BoxFuture<'_, anyhow::Result<()>> {
145 self.with_state_async(true, move |state| {
146 if let Some(message) = &state.simulated_index_write_error_message {
147 anyhow::bail!("{message}");
148 } else if let Some(content) = content {
149 state.index_contents.insert(path, content);
150 } else {
151 state.index_contents.remove(&path);
152 }
153 Ok(())
154 })
155 }
156
157 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
158 let name = name.to_string();
159 let fut = self.with_state_async(false, move |state| {
160 state
161 .remotes
162 .get(&name)
163 .context("remote not found")
164 .cloned()
165 });
166 async move { fut.await.ok() }.boxed()
167 }
168
169 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
170 let mut entries = HashMap::default();
171 self.with_state_async(false, |state| {
172 for (path, content) in &state.head_contents {
173 let status = if let Some((oid, original)) = state
174 .merge_base_contents
175 .get(path)
176 .map(|oid| (oid, &state.oids[oid]))
177 {
178 if original == content {
179 continue;
180 }
181 TreeDiffStatus::Modified { old: *oid }
182 } else {
183 TreeDiffStatus::Added
184 };
185 entries.insert(path.clone(), status);
186 }
187 for (path, oid) in &state.merge_base_contents {
188 if !entries.contains_key(path) {
189 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
190 }
191 }
192 Ok(TreeDiff { entries })
193 })
194 .boxed()
195 }
196
197 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
198 self.with_state_async(false, |state| {
199 Ok(revs
200 .into_iter()
201 .map(|rev| state.refs.get(&rev).cloned())
202 .collect())
203 })
204 }
205
206 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
207 async {
208 Ok(CommitDetails {
209 sha: commit.into(),
210 message: "initial commit".into(),
211 ..Default::default()
212 })
213 }
214 .boxed()
215 }
216
217 fn reset(
218 &self,
219 _commit: String,
220 _mode: ResetMode,
221 _env: Arc<HashMap<String, String>>,
222 ) -> BoxFuture<'_, Result<()>> {
223 unimplemented!()
224 }
225
226 fn checkout_files(
227 &self,
228 _commit: String,
229 _paths: Vec<RepoPath>,
230 _env: Arc<HashMap<String, String>>,
231 ) -> BoxFuture<'_, Result<()>> {
232 unimplemented!()
233 }
234
235 fn path(&self) -> PathBuf {
236 self.repository_dir_path.clone()
237 }
238
239 fn main_repository_path(&self) -> PathBuf {
240 self.common_dir_path.clone()
241 }
242
243 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
244 async move { None }.boxed()
245 }
246
247 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
248 let workdir_path = self.dot_git_path.parent().unwrap();
249
250 // Load gitignores
251 let ignores = workdir_path
252 .ancestors()
253 .filter_map(|dir| {
254 let ignore_path = dir.join(".gitignore");
255 let content = self.fs.read_file_sync(ignore_path).ok()?;
256 let content = String::from_utf8(content).ok()?;
257 let mut builder = GitignoreBuilder::new(dir);
258 for line in content.lines() {
259 builder.add_line(Some(dir.into()), line).ok()?;
260 }
261 builder.build().ok()
262 })
263 .collect::<Vec<_>>();
264
265 // Load working copy files.
266 let git_files: HashMap<RepoPath, (String, bool)> = self
267 .fs
268 .files()
269 .iter()
270 .filter_map(|path| {
271 // TODO better simulate git status output in the case of submodules and worktrees
272 let repo_path = path.strip_prefix(workdir_path).ok()?;
273 let mut is_ignored = repo_path.starts_with(".git");
274 for ignore in &ignores {
275 match ignore.matched_path_or_any_parents(path, false) {
276 ignore::Match::None => {}
277 ignore::Match::Ignore(_) => is_ignored = true,
278 ignore::Match::Whitelist(_) => break,
279 }
280 }
281 let content = self
282 .fs
283 .read_file_sync(path)
284 .ok()
285 .map(|content| String::from_utf8(content).unwrap())?;
286 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
287 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
288 })
289 .collect();
290
291 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
292 let mut entries = Vec::new();
293 let paths = state
294 .head_contents
295 .keys()
296 .chain(state.index_contents.keys())
297 .chain(git_files.keys())
298 .collect::<HashSet<_>>();
299 for path in paths {
300 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
301 continue;
302 }
303
304 let head = state.head_contents.get(path);
305 let index = state.index_contents.get(path);
306 let unmerged = state.unmerged_paths.get(path);
307 let fs = git_files.get(path);
308 let status = match (unmerged, head, index, fs) {
309 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
310 (_, Some(head), Some(index), Some((fs, _))) => {
311 FileStatus::Tracked(TrackedStatus {
312 index_status: if head == index {
313 StatusCode::Unmodified
314 } else {
315 StatusCode::Modified
316 },
317 worktree_status: if fs == index {
318 StatusCode::Unmodified
319 } else {
320 StatusCode::Modified
321 },
322 })
323 }
324 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
325 index_status: if head == index {
326 StatusCode::Unmodified
327 } else {
328 StatusCode::Modified
329 },
330 worktree_status: StatusCode::Deleted,
331 }),
332 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
333 index_status: StatusCode::Deleted,
334 worktree_status: StatusCode::Added,
335 }),
336 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
337 index_status: StatusCode::Deleted,
338 worktree_status: StatusCode::Deleted,
339 }),
340 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
341 index_status: StatusCode::Added,
342 worktree_status: if fs == index {
343 StatusCode::Unmodified
344 } else {
345 StatusCode::Modified
346 },
347 }),
348 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
349 index_status: StatusCode::Added,
350 worktree_status: StatusCode::Deleted,
351 }),
352 (_, None, None, Some((_, is_ignored))) => {
353 if *is_ignored {
354 continue;
355 }
356 FileStatus::Untracked
357 }
358 (_, None, None, None) => {
359 unreachable!();
360 }
361 };
362 if status
363 != FileStatus::Tracked(TrackedStatus {
364 index_status: StatusCode::Unmodified,
365 worktree_status: StatusCode::Unmodified,
366 })
367 {
368 entries.push((path.clone(), status));
369 }
370 }
371 entries.sort_by(|a, b| a.0.cmp(&b.0));
372 anyhow::Ok(GitStatus {
373 entries: entries.into(),
374 })
375 });
376 Task::ready(match result {
377 Ok(result) => result,
378 Err(e) => Err(e),
379 })
380 }
381
382 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
383 async { Ok(git::stash::GitStash::default()) }.boxed()
384 }
385
386 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
387 self.with_state_async(false, move |state| {
388 let current_branch = &state.current_branch_name;
389 Ok(state
390 .branches
391 .iter()
392 .map(|branch_name| {
393 let ref_name = if branch_name.starts_with("refs/") {
394 branch_name.into()
395 } else if branch_name.contains('/') {
396 format!("refs/remotes/{branch_name}").into()
397 } else {
398 format!("refs/heads/{branch_name}").into()
399 };
400 Branch {
401 is_head: Some(branch_name) == current_branch.as_ref(),
402 ref_name,
403 most_recent_commit: None,
404 upstream: None,
405 }
406 })
407 .collect())
408 })
409 }
410
411 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
412 let dot_git_path = self.dot_git_path.clone();
413 self.with_state_async(false, move |state| {
414 let work_dir = dot_git_path
415 .parent()
416 .map(PathBuf::from)
417 .unwrap_or(dot_git_path);
418 let head_sha = state
419 .refs
420 .get("HEAD")
421 .cloned()
422 .unwrap_or_else(|| "0000000".to_string());
423 let branch_ref = state
424 .current_branch_name
425 .as_ref()
426 .map(|name| format!("refs/heads/{name}"))
427 .unwrap_or_else(|| "refs/heads/main".to_string());
428 let main_worktree = Worktree {
429 path: work_dir,
430 ref_name: Some(branch_ref.into()),
431 sha: head_sha.into(),
432 };
433 let mut all = vec![main_worktree];
434 all.extend(state.worktrees.iter().cloned());
435 Ok(all)
436 })
437 }
438
439 fn create_worktree(
440 &self,
441 branch_name: String,
442 path: PathBuf,
443 from_commit: Option<String>,
444 ) -> BoxFuture<'_, Result<()>> {
445 let fs = self.fs.clone();
446 let executor = self.executor.clone();
447 let dot_git_path = self.dot_git_path.clone();
448 async move {
449 executor.simulate_random_delay().await;
450 // Check for simulated error before any side effects
451 fs.with_git_state(&dot_git_path, false, |state| {
452 if let Some(message) = &state.simulated_create_worktree_error {
453 anyhow::bail!("{message}");
454 }
455 Ok(())
456 })??;
457 // Create directory before updating state so state is never
458 // inconsistent with the filesystem
459 fs.create_dir(&path).await?;
460 fs.with_git_state(&dot_git_path, true, {
461 let path = path.clone();
462 move |state| {
463 if state.branches.contains(&branch_name) {
464 bail!("a branch named '{}' already exists", branch_name);
465 }
466 let ref_name = format!("refs/heads/{branch_name}");
467 let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
468 state.refs.insert(ref_name.clone(), sha.clone());
469 state.worktrees.push(Worktree {
470 path,
471 ref_name: Some(ref_name.into()),
472 sha: sha.into(),
473 });
474 state.branches.insert(branch_name);
475 Ok::<(), anyhow::Error>(())
476 }
477 })??;
478 Ok(())
479 }
480 .boxed()
481 }
482
483 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
484 let fs = self.fs.clone();
485 let executor = self.executor.clone();
486 let dot_git_path = self.dot_git_path.clone();
487 async move {
488 executor.simulate_random_delay().await;
489 // Validate the worktree exists in state before touching the filesystem
490 fs.with_git_state(&dot_git_path, false, {
491 let path = path.clone();
492 move |state| {
493 if !state.worktrees.iter().any(|w| w.path == path) {
494 bail!("no worktree found at path: {}", path.display());
495 }
496 Ok(())
497 }
498 })??;
499 // Now remove the directory
500 fs.remove_dir(
501 &path,
502 RemoveOptions {
503 recursive: true,
504 ignore_if_not_exists: false,
505 },
506 )
507 .await?;
508 // Update state
509 fs.with_git_state(&dot_git_path, true, move |state| {
510 state.worktrees.retain(|worktree| worktree.path != path);
511 Ok::<(), anyhow::Error>(())
512 })??;
513 Ok(())
514 }
515 .boxed()
516 }
517
518 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
519 let fs = self.fs.clone();
520 let executor = self.executor.clone();
521 let dot_git_path = self.dot_git_path.clone();
522 async move {
523 executor.simulate_random_delay().await;
524 // Validate the worktree exists in state before touching the filesystem
525 fs.with_git_state(&dot_git_path, false, {
526 let old_path = old_path.clone();
527 move |state| {
528 if !state.worktrees.iter().any(|w| w.path == old_path) {
529 bail!("no worktree found at path: {}", old_path.display());
530 }
531 Ok(())
532 }
533 })??;
534 // Now move the directory
535 fs.rename(
536 &old_path,
537 &new_path,
538 RenameOptions {
539 overwrite: false,
540 ignore_if_exists: false,
541 create_parents: true,
542 },
543 )
544 .await?;
545 // Update state
546 fs.with_git_state(&dot_git_path, true, move |state| {
547 let worktree = state
548 .worktrees
549 .iter_mut()
550 .find(|worktree| worktree.path == old_path)
551 .expect("worktree was validated above");
552 worktree.path = new_path;
553 Ok::<(), anyhow::Error>(())
554 })??;
555 Ok(())
556 }
557 .boxed()
558 }
559
560 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
561 self.with_state_async(true, |state| {
562 state.current_branch_name = Some(name);
563 Ok(())
564 })
565 }
566
567 fn create_branch(
568 &self,
569 name: String,
570 _base_branch: Option<String>,
571 ) -> BoxFuture<'_, Result<()>> {
572 self.with_state_async(true, move |state| {
573 if let Some((remote, _)) = name.split_once('/')
574 && !state.remotes.contains_key(remote)
575 {
576 state.remotes.insert(remote.to_owned(), "".to_owned());
577 }
578 state.branches.insert(name);
579 Ok(())
580 })
581 }
582
583 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
584 self.with_state_async(true, move |state| {
585 if !state.branches.remove(&branch) {
586 bail!("no such branch: {branch}");
587 }
588 state.branches.insert(new_name.clone());
589 if state.current_branch_name == Some(branch) {
590 state.current_branch_name = Some(new_name);
591 }
592 Ok(())
593 })
594 }
595
596 fn delete_branch(&self, _is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> {
597 self.with_state_async(true, move |state| {
598 if !state.branches.remove(&name) {
599 bail!("no such branch: {name}");
600 }
601 Ok(())
602 })
603 }
604
605 fn blame(
606 &self,
607 path: RepoPath,
608 _content: Rope,
609 _line_ending: LineEnding,
610 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
611 self.with_state_async(false, move |state| {
612 state
613 .blames
614 .get(&path)
615 .with_context(|| format!("failed to get blame for {:?}", path))
616 .cloned()
617 })
618 }
619
620 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
621 self.file_history_paginated(path, 0, None)
622 }
623
624 fn file_history_paginated(
625 &self,
626 path: RepoPath,
627 _skip: usize,
628 _limit: Option<usize>,
629 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
630 async move {
631 Ok(git::repository::FileHistory {
632 entries: Vec::new(),
633 path,
634 })
635 }
636 .boxed()
637 }
638
639 fn stage_paths(
640 &self,
641 paths: Vec<RepoPath>,
642 _env: Arc<HashMap<String, String>>,
643 ) -> BoxFuture<'_, Result<()>> {
644 Box::pin(async move {
645 let contents = paths
646 .into_iter()
647 .map(|path| {
648 let abs_path = self
649 .dot_git_path
650 .parent()
651 .unwrap()
652 .join(&path.as_std_path());
653 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
654 })
655 .collect::<Vec<_>>();
656 let contents = join_all(contents).await;
657 self.with_state_async(true, move |state| {
658 for (path, content) in contents {
659 if let Some(content) = content {
660 state.index_contents.insert(path, content);
661 } else {
662 state.index_contents.remove(&path);
663 }
664 }
665 Ok(())
666 })
667 .await
668 })
669 }
670
671 fn unstage_paths(
672 &self,
673 paths: Vec<RepoPath>,
674 _env: Arc<HashMap<String, String>>,
675 ) -> BoxFuture<'_, Result<()>> {
676 self.with_state_async(true, move |state| {
677 for path in paths {
678 match state.head_contents.get(&path) {
679 Some(content) => state.index_contents.insert(path, content.clone()),
680 None => state.index_contents.remove(&path),
681 };
682 }
683 Ok(())
684 })
685 }
686
687 fn stash_paths(
688 &self,
689 _paths: Vec<RepoPath>,
690 _env: Arc<HashMap<String, String>>,
691 ) -> BoxFuture<'_, Result<()>> {
692 unimplemented!()
693 }
694
695 fn stash_pop(
696 &self,
697 _index: Option<usize>,
698 _env: Arc<HashMap<String, String>>,
699 ) -> BoxFuture<'_, Result<()>> {
700 unimplemented!()
701 }
702
703 fn stash_apply(
704 &self,
705 _index: Option<usize>,
706 _env: Arc<HashMap<String, String>>,
707 ) -> BoxFuture<'_, Result<()>> {
708 unimplemented!()
709 }
710
711 fn stash_drop(
712 &self,
713 _index: Option<usize>,
714 _env: Arc<HashMap<String, String>>,
715 ) -> BoxFuture<'_, Result<()>> {
716 unimplemented!()
717 }
718
719 fn commit(
720 &self,
721 _message: gpui::SharedString,
722 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
723 _options: CommitOptions,
724 _askpass: AskPassDelegate,
725 _env: Arc<HashMap<String, String>>,
726 ) -> BoxFuture<'_, Result<()>> {
727 async { Ok(()) }.boxed()
728 }
729
730 fn run_hook(
731 &self,
732 _hook: RunHook,
733 _env: Arc<HashMap<String, String>>,
734 ) -> BoxFuture<'_, Result<()>> {
735 async { Ok(()) }.boxed()
736 }
737
738 fn push(
739 &self,
740 _branch: String,
741 _remote_branch: String,
742 _remote: String,
743 _options: Option<PushOptions>,
744 _askpass: AskPassDelegate,
745 _env: Arc<HashMap<String, String>>,
746 _cx: AsyncApp,
747 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
748 unimplemented!()
749 }
750
751 fn pull(
752 &self,
753 _branch: Option<String>,
754 _remote: String,
755 _rebase: bool,
756 _askpass: AskPassDelegate,
757 _env: Arc<HashMap<String, String>>,
758 _cx: AsyncApp,
759 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
760 unimplemented!()
761 }
762
763 fn fetch(
764 &self,
765 _fetch_options: FetchOptions,
766 _askpass: AskPassDelegate,
767 _env: Arc<HashMap<String, String>>,
768 _cx: AsyncApp,
769 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
770 unimplemented!()
771 }
772
773 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
774 self.with_state_async(false, move |state| {
775 let remotes = state
776 .remotes
777 .keys()
778 .map(|r| Remote {
779 name: r.clone().into(),
780 })
781 .collect::<Vec<_>>();
782 Ok(remotes)
783 })
784 }
785
786 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
787 unimplemented!()
788 }
789
790 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
791 unimplemented!()
792 }
793
794 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
795 future::ready(Ok(Vec::new())).boxed()
796 }
797
798 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
799 future::ready(Ok(String::new())).boxed()
800 }
801
802 fn diff_stat(
803 &self,
804 path_prefixes: &[RepoPath],
805 ) -> BoxFuture<'_, Result<git::status::GitDiffStat>> {
806 fn count_lines(s: &str) -> u32 {
807 if s.is_empty() {
808 0
809 } else {
810 s.lines().count() as u32
811 }
812 }
813
814 fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool {
815 if prefixes.is_empty() {
816 return true;
817 }
818 prefixes.iter().any(|prefix| {
819 let prefix_str = prefix.as_unix_str();
820 if prefix_str == "." {
821 return true;
822 }
823 path == prefix || path.starts_with(&prefix)
824 })
825 }
826
827 let path_prefixes = path_prefixes.to_vec();
828
829 let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
830 let worktree_files: HashMap<RepoPath, String> = self
831 .fs
832 .files()
833 .iter()
834 .filter_map(|path| {
835 let repo_path = path.strip_prefix(&workdir_path).ok()?;
836 if repo_path.starts_with(".git") {
837 return None;
838 }
839 let content = self
840 .fs
841 .read_file_sync(path)
842 .ok()
843 .and_then(|bytes| String::from_utf8(bytes).ok())?;
844 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
845 Some((RepoPath::from_rel_path(&repo_path), content))
846 })
847 .collect();
848
849 self.with_state_async(false, move |state| {
850 let mut entries = Vec::new();
851 let all_paths: HashSet<&RepoPath> = state
852 .head_contents
853 .keys()
854 .chain(
855 worktree_files
856 .keys()
857 .filter(|p| state.index_contents.contains_key(*p)),
858 )
859 .collect();
860 for path in all_paths {
861 if !matches_prefixes(path, &path_prefixes) {
862 continue;
863 }
864 let head = state.head_contents.get(path);
865 let worktree = worktree_files.get(path);
866 match (head, worktree) {
867 (Some(old), Some(new)) if old != new => {
868 entries.push((
869 path.clone(),
870 git::status::DiffStat {
871 added: count_lines(new),
872 deleted: count_lines(old),
873 },
874 ));
875 }
876 (Some(old), None) => {
877 entries.push((
878 path.clone(),
879 git::status::DiffStat {
880 added: 0,
881 deleted: count_lines(old),
882 },
883 ));
884 }
885 (None, Some(new)) => {
886 entries.push((
887 path.clone(),
888 git::status::DiffStat {
889 added: count_lines(new),
890 deleted: 0,
891 },
892 ));
893 }
894 _ => {}
895 }
896 }
897 entries.sort_by(|(a, _), (b, _)| a.cmp(b));
898 Ok(git::status::GitDiffStat {
899 entries: entries.into(),
900 })
901 })
902 .boxed()
903 }
904
905 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
906 let executor = self.executor.clone();
907 let fs = self.fs.clone();
908 let checkpoints = self.checkpoints.clone();
909 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
910 async move {
911 executor.simulate_random_delay().await;
912 let oid = git::Oid::random(&mut *executor.rng().lock());
913 let entry = fs.entry(&repository_dir_path)?;
914 checkpoints.lock().insert(oid, entry);
915 Ok(GitRepositoryCheckpoint { commit_sha: oid })
916 }
917 .boxed()
918 }
919
920 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
921 let executor = self.executor.clone();
922 let fs = self.fs.clone();
923 let checkpoints = self.checkpoints.clone();
924 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
925 async move {
926 executor.simulate_random_delay().await;
927 let checkpoints = checkpoints.lock();
928 let entry = checkpoints
929 .get(&checkpoint.commit_sha)
930 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
931 fs.insert_entry(&repository_dir_path, entry.clone())?;
932 Ok(())
933 }
934 .boxed()
935 }
936
937 fn compare_checkpoints(
938 &self,
939 left: GitRepositoryCheckpoint,
940 right: GitRepositoryCheckpoint,
941 ) -> BoxFuture<'_, Result<bool>> {
942 let executor = self.executor.clone();
943 let checkpoints = self.checkpoints.clone();
944 async move {
945 executor.simulate_random_delay().await;
946 let checkpoints = checkpoints.lock();
947 let left = checkpoints
948 .get(&left.commit_sha)
949 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
950 let right = checkpoints
951 .get(&right.commit_sha)
952 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
953
954 Ok(left == right)
955 }
956 .boxed()
957 }
958
959 fn diff_checkpoints(
960 &self,
961 _base_checkpoint: GitRepositoryCheckpoint,
962 _target_checkpoint: GitRepositoryCheckpoint,
963 ) -> BoxFuture<'_, Result<String>> {
964 unimplemented!()
965 }
966
967 fn default_branch(
968 &self,
969 include_remote_name: bool,
970 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
971 async move {
972 Ok(Some(if include_remote_name {
973 "origin/main".into()
974 } else {
975 "main".into()
976 }))
977 }
978 .boxed()
979 }
980
981 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
982 self.with_state_async(true, move |state| {
983 state.remotes.insert(name, url);
984 Ok(())
985 })
986 }
987
988 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
989 self.with_state_async(true, move |state| {
990 state.branches.retain(|branch| {
991 branch
992 .split_once('/')
993 .is_none_or(|(remote, _)| remote != name)
994 });
995 state.remotes.remove(&name);
996 Ok(())
997 })
998 }
999
1000 fn initial_graph_data(
1001 &self,
1002 _log_source: LogSource,
1003 _log_order: LogOrder,
1004 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
1005 ) -> BoxFuture<'_, Result<()>> {
1006 let fs = self.fs.clone();
1007 let dot_git_path = self.dot_git_path.clone();
1008 async move {
1009 let graph_commits =
1010 fs.with_git_state(&dot_git_path, false, |state| state.graph_commits.clone())?;
1011
1012 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
1013 request_tx.send(chunk.to_vec()).await.ok();
1014 }
1015 Ok(())
1016 }
1017 .boxed()
1018 }
1019
1020 fn search_commits(
1021 &self,
1022 _log_source: LogSource,
1023 _search_args: SearchCommitArgs,
1024 _request_tx: Sender<Oid>,
1025 ) -> BoxFuture<'_, Result<()>> {
1026 async { bail!("search_commits not supported for FakeGitRepository") }.boxed()
1027 }
1028
1029 fn commit_data_reader(&self) -> Result<CommitDataReader> {
1030 anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
1031 }
1032
1033 fn set_trusted(&self, trusted: bool) {
1034 self.is_trusted
1035 .store(trusted, std::sync::atomic::Ordering::Release);
1036 }
1037
1038 fn is_trusted(&self) -> bool {
1039 self.is_trusted.load(std::sync::atomic::Ordering::Acquire)
1040 }
1041}