1use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
2use anyhow::{Context as _, Result, bail};
3use collections::{HashMap, HashSet};
4use futures::future::{self, BoxFuture, join_all};
5use git::{
6 Oid, RunHook,
7 blame::Blame,
8 repository::{
9 AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions, FetchOptions,
10 GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder,
11 LogSource, PushOptions, Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree,
12 },
13 status::{
14 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
15 UnmergedStatus,
16 },
17};
18use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
19use ignore::gitignore::GitignoreBuilder;
20use parking_lot::Mutex;
21use rope::Rope;
22use smol::{channel::Sender, future::FutureExt as _};
23use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
24use text::LineEnding;
25use util::{paths::PathStyle, rel_path::RelPath};
26
27#[derive(Clone)]
28pub struct FakeGitRepository {
29 pub(crate) fs: Arc<FakeFs>,
30 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
31 pub(crate) executor: BackgroundExecutor,
32 pub(crate) dot_git_path: PathBuf,
33 pub(crate) repository_dir_path: PathBuf,
34 pub(crate) common_dir_path: PathBuf,
35 pub(crate) is_trusted: Arc<AtomicBool>,
36}
37
38#[derive(Debug, Clone)]
39pub struct FakeGitRepositoryState {
40 pub event_emitter: smol::channel::Sender<PathBuf>,
41 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
42 pub head_contents: HashMap<RepoPath, String>,
43 pub index_contents: HashMap<RepoPath, String>,
44 // everything in commit contents is in oids
45 pub merge_base_contents: HashMap<RepoPath, Oid>,
46 pub oids: HashMap<Oid, String>,
47 pub blames: HashMap<RepoPath, Blame>,
48 pub current_branch_name: Option<String>,
49 pub branches: HashSet<String>,
50 /// List of remotes, keys are names and values are URLs
51 pub remotes: HashMap<String, String>,
52 pub simulated_index_write_error_message: Option<String>,
53 pub simulated_create_worktree_error: Option<String>,
54 pub refs: HashMap<String, String>,
55 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
56 pub worktrees: Vec<Worktree>,
57}
58
59impl FakeGitRepositoryState {
60 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
61 FakeGitRepositoryState {
62 event_emitter,
63 head_contents: Default::default(),
64 index_contents: Default::default(),
65 unmerged_paths: Default::default(),
66 blames: Default::default(),
67 current_branch_name: Default::default(),
68 branches: Default::default(),
69 simulated_index_write_error_message: Default::default(),
70 simulated_create_worktree_error: Default::default(),
71 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
72 merge_base_contents: Default::default(),
73 oids: Default::default(),
74 remotes: HashMap::default(),
75 graph_commits: Vec::new(),
76 worktrees: Vec::new(),
77 }
78 }
79}
80
81impl FakeGitRepository {
82 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
83 where
84 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
85 T: Send,
86 {
87 let fs = self.fs.clone();
88 let executor = self.executor.clone();
89 let dot_git_path = self.dot_git_path.clone();
90 async move {
91 executor.simulate_random_delay().await;
92 fs.with_git_state(&dot_git_path, write, f)?
93 }
94 .boxed()
95 }
96}
97
98impl GitRepository for FakeGitRepository {
99 fn reload_index(&self) {}
100
101 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
102 let fut = self.with_state_async(false, move |state| {
103 state
104 .index_contents
105 .get(&path)
106 .context("not present in index")
107 .cloned()
108 });
109 self.executor.spawn(async move { fut.await.ok() }).boxed()
110 }
111
112 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
113 let fut = self.with_state_async(false, move |state| {
114 state
115 .head_contents
116 .get(&path)
117 .context("not present in HEAD")
118 .cloned()
119 });
120 self.executor.spawn(async move { fut.await.ok() }).boxed()
121 }
122
123 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
124 self.with_state_async(false, move |state| {
125 state.oids.get(&oid).cloned().context("oid does not exist")
126 })
127 .boxed()
128 }
129
130 fn load_commit(
131 &self,
132 _commit: String,
133 _cx: AsyncApp,
134 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
135 unimplemented!()
136 }
137
138 fn set_index_text(
139 &self,
140 path: RepoPath,
141 content: Option<String>,
142 _env: Arc<HashMap<String, String>>,
143 _is_executable: bool,
144 ) -> BoxFuture<'_, anyhow::Result<()>> {
145 self.with_state_async(true, move |state| {
146 if let Some(message) = &state.simulated_index_write_error_message {
147 anyhow::bail!("{message}");
148 } else if let Some(content) = content {
149 state.index_contents.insert(path, content);
150 } else {
151 state.index_contents.remove(&path);
152 }
153 Ok(())
154 })
155 }
156
157 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
158 let name = name.to_string();
159 let fut = self.with_state_async(false, move |state| {
160 state
161 .remotes
162 .get(&name)
163 .context("remote not found")
164 .cloned()
165 });
166 async move { fut.await.ok() }.boxed()
167 }
168
169 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
170 let mut entries = HashMap::default();
171 self.with_state_async(false, |state| {
172 for (path, content) in &state.head_contents {
173 let status = if let Some((oid, original)) = state
174 .merge_base_contents
175 .get(path)
176 .map(|oid| (oid, &state.oids[oid]))
177 {
178 if original == content {
179 continue;
180 }
181 TreeDiffStatus::Modified { old: *oid }
182 } else {
183 TreeDiffStatus::Added
184 };
185 entries.insert(path.clone(), status);
186 }
187 for (path, oid) in &state.merge_base_contents {
188 if !entries.contains_key(path) {
189 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
190 }
191 }
192 Ok(TreeDiff { entries })
193 })
194 .boxed()
195 }
196
197 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
198 self.with_state_async(false, |state| {
199 Ok(revs
200 .into_iter()
201 .map(|rev| state.refs.get(&rev).cloned())
202 .collect())
203 })
204 }
205
206 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
207 async {
208 Ok(CommitDetails {
209 sha: commit.into(),
210 message: "initial commit".into(),
211 ..Default::default()
212 })
213 }
214 .boxed()
215 }
216
217 fn reset(
218 &self,
219 _commit: String,
220 _mode: ResetMode,
221 _env: Arc<HashMap<String, String>>,
222 ) -> BoxFuture<'_, Result<()>> {
223 unimplemented!()
224 }
225
226 fn checkout_files(
227 &self,
228 _commit: String,
229 _paths: Vec<RepoPath>,
230 _env: Arc<HashMap<String, String>>,
231 ) -> BoxFuture<'_, Result<()>> {
232 unimplemented!()
233 }
234
235 fn path(&self) -> PathBuf {
236 self.repository_dir_path.clone()
237 }
238
239 fn main_repository_path(&self) -> PathBuf {
240 self.common_dir_path.clone()
241 }
242
243 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
244 async move { None }.boxed()
245 }
246
247 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
248 let workdir_path = self.dot_git_path.parent().unwrap();
249
250 // Load gitignores
251 let ignores = workdir_path
252 .ancestors()
253 .filter_map(|dir| {
254 let ignore_path = dir.join(".gitignore");
255 let content = self.fs.read_file_sync(ignore_path).ok()?;
256 let content = String::from_utf8(content).ok()?;
257 let mut builder = GitignoreBuilder::new(dir);
258 for line in content.lines() {
259 builder.add_line(Some(dir.into()), line).ok()?;
260 }
261 builder.build().ok()
262 })
263 .collect::<Vec<_>>();
264
265 // Load working copy files.
266 let git_files: HashMap<RepoPath, (String, bool)> = self
267 .fs
268 .files()
269 .iter()
270 .filter_map(|path| {
271 // TODO better simulate git status output in the case of submodules and worktrees
272 let repo_path = path.strip_prefix(workdir_path).ok()?;
273 let mut is_ignored = repo_path.starts_with(".git");
274 for ignore in &ignores {
275 match ignore.matched_path_or_any_parents(path, false) {
276 ignore::Match::None => {}
277 ignore::Match::Ignore(_) => is_ignored = true,
278 ignore::Match::Whitelist(_) => break,
279 }
280 }
281 let content = self
282 .fs
283 .read_file_sync(path)
284 .ok()
285 .map(|content| String::from_utf8(content).unwrap())?;
286 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
287 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
288 })
289 .collect();
290
291 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
292 let mut entries = Vec::new();
293 let paths = state
294 .head_contents
295 .keys()
296 .chain(state.index_contents.keys())
297 .chain(git_files.keys())
298 .collect::<HashSet<_>>();
299 for path in paths {
300 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
301 continue;
302 }
303
304 let head = state.head_contents.get(path);
305 let index = state.index_contents.get(path);
306 let unmerged = state.unmerged_paths.get(path);
307 let fs = git_files.get(path);
308 let status = match (unmerged, head, index, fs) {
309 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
310 (_, Some(head), Some(index), Some((fs, _))) => {
311 FileStatus::Tracked(TrackedStatus {
312 index_status: if head == index {
313 StatusCode::Unmodified
314 } else {
315 StatusCode::Modified
316 },
317 worktree_status: if fs == index {
318 StatusCode::Unmodified
319 } else {
320 StatusCode::Modified
321 },
322 })
323 }
324 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
325 index_status: if head == index {
326 StatusCode::Unmodified
327 } else {
328 StatusCode::Modified
329 },
330 worktree_status: StatusCode::Deleted,
331 }),
332 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
333 index_status: StatusCode::Deleted,
334 worktree_status: StatusCode::Added,
335 }),
336 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
337 index_status: StatusCode::Deleted,
338 worktree_status: StatusCode::Deleted,
339 }),
340 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
341 index_status: StatusCode::Added,
342 worktree_status: if fs == index {
343 StatusCode::Unmodified
344 } else {
345 StatusCode::Modified
346 },
347 }),
348 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
349 index_status: StatusCode::Added,
350 worktree_status: StatusCode::Deleted,
351 }),
352 (_, None, None, Some((_, is_ignored))) => {
353 if *is_ignored {
354 continue;
355 }
356 FileStatus::Untracked
357 }
358 (_, None, None, None) => {
359 unreachable!();
360 }
361 };
362 if status
363 != FileStatus::Tracked(TrackedStatus {
364 index_status: StatusCode::Unmodified,
365 worktree_status: StatusCode::Unmodified,
366 })
367 {
368 entries.push((path.clone(), status));
369 }
370 }
371 entries.sort_by(|a, b| a.0.cmp(&b.0));
372 anyhow::Ok(GitStatus {
373 entries: entries.into(),
374 })
375 });
376 Task::ready(match result {
377 Ok(result) => result,
378 Err(e) => Err(e),
379 })
380 }
381
382 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
383 async { Ok(git::stash::GitStash::default()) }.boxed()
384 }
385
386 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
387 self.with_state_async(false, move |state| {
388 let current_branch = &state.current_branch_name;
389 Ok(state
390 .branches
391 .iter()
392 .map(|branch_name| {
393 let ref_name = if branch_name.starts_with("refs/") {
394 branch_name.into()
395 } else if branch_name.contains('/') {
396 format!("refs/remotes/{branch_name}").into()
397 } else {
398 format!("refs/heads/{branch_name}").into()
399 };
400 Branch {
401 is_head: Some(branch_name) == current_branch.as_ref(),
402 ref_name,
403 most_recent_commit: None,
404 upstream: None,
405 }
406 })
407 .collect())
408 })
409 }
410
411 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
412 let dot_git_path = self.dot_git_path.clone();
413 self.with_state_async(false, move |state| {
414 let work_dir = dot_git_path
415 .parent()
416 .map(PathBuf::from)
417 .unwrap_or(dot_git_path);
418 let head_sha = state
419 .refs
420 .get("HEAD")
421 .cloned()
422 .unwrap_or_else(|| "0000000".to_string());
423 let branch_ref = state
424 .current_branch_name
425 .as_ref()
426 .map(|name| format!("refs/heads/{name}"))
427 .unwrap_or_else(|| "refs/heads/main".to_string());
428 let main_worktree = Worktree {
429 path: work_dir,
430 ref_name: Some(branch_ref.into()),
431 sha: head_sha.into(),
432 is_main: true,
433 };
434 let mut all = vec![main_worktree];
435 all.extend(state.worktrees.iter().cloned());
436 Ok(all)
437 })
438 }
439
440 fn create_worktree(
441 &self,
442 branch_name: String,
443 path: PathBuf,
444 from_commit: Option<String>,
445 ) -> BoxFuture<'_, Result<()>> {
446 let fs = self.fs.clone();
447 let executor = self.executor.clone();
448 let dot_git_path = self.dot_git_path.clone();
449 async move {
450 executor.simulate_random_delay().await;
451 // Check for simulated error before any side effects
452 fs.with_git_state(&dot_git_path, false, |state| {
453 if let Some(message) = &state.simulated_create_worktree_error {
454 anyhow::bail!("{message}");
455 }
456 Ok(())
457 })??;
458 // Create directory before updating state so state is never
459 // inconsistent with the filesystem
460 fs.create_dir(&path).await?;
461 fs.with_git_state(&dot_git_path, true, {
462 let path = path.clone();
463 move |state| {
464 if state.branches.contains(&branch_name) {
465 bail!("a branch named '{}' already exists", branch_name);
466 }
467 let ref_name = format!("refs/heads/{branch_name}");
468 let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
469 state.refs.insert(ref_name.clone(), sha.clone());
470 state.worktrees.push(Worktree {
471 path,
472 ref_name: Some(ref_name.into()),
473 sha: sha.into(),
474 is_main: false,
475 });
476 state.branches.insert(branch_name);
477 Ok::<(), anyhow::Error>(())
478 }
479 })??;
480 Ok(())
481 }
482 .boxed()
483 }
484
485 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
486 let fs = self.fs.clone();
487 let executor = self.executor.clone();
488 let dot_git_path = self.dot_git_path.clone();
489 async move {
490 executor.simulate_random_delay().await;
491 // Validate the worktree exists in state before touching the filesystem
492 fs.with_git_state(&dot_git_path, false, {
493 let path = path.clone();
494 move |state| {
495 if !state.worktrees.iter().any(|w| w.path == path) {
496 bail!("no worktree found at path: {}", path.display());
497 }
498 Ok(())
499 }
500 })??;
501 // Now remove the directory
502 fs.remove_dir(
503 &path,
504 RemoveOptions {
505 recursive: true,
506 ignore_if_not_exists: false,
507 },
508 )
509 .await?;
510 // Update state
511 fs.with_git_state(&dot_git_path, true, move |state| {
512 state.worktrees.retain(|worktree| worktree.path != path);
513 Ok::<(), anyhow::Error>(())
514 })??;
515 Ok(())
516 }
517 .boxed()
518 }
519
520 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
521 let fs = self.fs.clone();
522 let executor = self.executor.clone();
523 let dot_git_path = self.dot_git_path.clone();
524 async move {
525 executor.simulate_random_delay().await;
526 // Validate the worktree exists in state before touching the filesystem
527 fs.with_git_state(&dot_git_path, false, {
528 let old_path = old_path.clone();
529 move |state| {
530 if !state.worktrees.iter().any(|w| w.path == old_path) {
531 bail!("no worktree found at path: {}", old_path.display());
532 }
533 Ok(())
534 }
535 })??;
536 // Now move the directory
537 fs.rename(
538 &old_path,
539 &new_path,
540 RenameOptions {
541 overwrite: false,
542 ignore_if_exists: false,
543 create_parents: true,
544 },
545 )
546 .await?;
547 // Update state
548 fs.with_git_state(&dot_git_path, true, move |state| {
549 let worktree = state
550 .worktrees
551 .iter_mut()
552 .find(|worktree| worktree.path == old_path)
553 .expect("worktree was validated above");
554 worktree.path = new_path;
555 Ok::<(), anyhow::Error>(())
556 })??;
557 Ok(())
558 }
559 .boxed()
560 }
561
562 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
563 self.with_state_async(true, |state| {
564 state.current_branch_name = Some(name);
565 Ok(())
566 })
567 }
568
569 fn create_branch(
570 &self,
571 name: String,
572 _base_branch: Option<String>,
573 ) -> BoxFuture<'_, Result<()>> {
574 self.with_state_async(true, move |state| {
575 if let Some((remote, _)) = name.split_once('/')
576 && !state.remotes.contains_key(remote)
577 {
578 state.remotes.insert(remote.to_owned(), "".to_owned());
579 }
580 state.branches.insert(name);
581 Ok(())
582 })
583 }
584
585 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
586 self.with_state_async(true, move |state| {
587 if !state.branches.remove(&branch) {
588 bail!("no such branch: {branch}");
589 }
590 state.branches.insert(new_name.clone());
591 if state.current_branch_name == Some(branch) {
592 state.current_branch_name = Some(new_name);
593 }
594 Ok(())
595 })
596 }
597
598 fn delete_branch(&self, _is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> {
599 self.with_state_async(true, move |state| {
600 if !state.branches.remove(&name) {
601 bail!("no such branch: {name}");
602 }
603 Ok(())
604 })
605 }
606
607 fn blame(
608 &self,
609 path: RepoPath,
610 _content: Rope,
611 _line_ending: LineEnding,
612 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
613 self.with_state_async(false, move |state| {
614 state
615 .blames
616 .get(&path)
617 .with_context(|| format!("failed to get blame for {:?}", path))
618 .cloned()
619 })
620 }
621
622 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
623 self.file_history_paginated(path, 0, None)
624 }
625
626 fn file_history_paginated(
627 &self,
628 path: RepoPath,
629 _skip: usize,
630 _limit: Option<usize>,
631 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
632 async move {
633 Ok(git::repository::FileHistory {
634 entries: Vec::new(),
635 path,
636 })
637 }
638 .boxed()
639 }
640
641 fn stage_paths(
642 &self,
643 paths: Vec<RepoPath>,
644 _env: Arc<HashMap<String, String>>,
645 ) -> BoxFuture<'_, Result<()>> {
646 Box::pin(async move {
647 let contents = paths
648 .into_iter()
649 .map(|path| {
650 let abs_path = self
651 .dot_git_path
652 .parent()
653 .unwrap()
654 .join(&path.as_std_path());
655 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
656 })
657 .collect::<Vec<_>>();
658 let contents = join_all(contents).await;
659 self.with_state_async(true, move |state| {
660 for (path, content) in contents {
661 if let Some(content) = content {
662 state.index_contents.insert(path, content);
663 } else {
664 state.index_contents.remove(&path);
665 }
666 }
667 Ok(())
668 })
669 .await
670 })
671 }
672
673 fn unstage_paths(
674 &self,
675 paths: Vec<RepoPath>,
676 _env: Arc<HashMap<String, String>>,
677 ) -> BoxFuture<'_, Result<()>> {
678 self.with_state_async(true, move |state| {
679 for path in paths {
680 match state.head_contents.get(&path) {
681 Some(content) => state.index_contents.insert(path, content.clone()),
682 None => state.index_contents.remove(&path),
683 };
684 }
685 Ok(())
686 })
687 }
688
689 fn stash_paths(
690 &self,
691 _paths: Vec<RepoPath>,
692 _env: Arc<HashMap<String, String>>,
693 ) -> BoxFuture<'_, Result<()>> {
694 unimplemented!()
695 }
696
697 fn stash_pop(
698 &self,
699 _index: Option<usize>,
700 _env: Arc<HashMap<String, String>>,
701 ) -> BoxFuture<'_, Result<()>> {
702 unimplemented!()
703 }
704
705 fn stash_apply(
706 &self,
707 _index: Option<usize>,
708 _env: Arc<HashMap<String, String>>,
709 ) -> BoxFuture<'_, Result<()>> {
710 unimplemented!()
711 }
712
713 fn stash_drop(
714 &self,
715 _index: Option<usize>,
716 _env: Arc<HashMap<String, String>>,
717 ) -> BoxFuture<'_, Result<()>> {
718 unimplemented!()
719 }
720
721 fn commit(
722 &self,
723 _message: gpui::SharedString,
724 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
725 _options: CommitOptions,
726 _askpass: AskPassDelegate,
727 _env: Arc<HashMap<String, String>>,
728 ) -> BoxFuture<'_, Result<()>> {
729 async { Ok(()) }.boxed()
730 }
731
732 fn run_hook(
733 &self,
734 _hook: RunHook,
735 _env: Arc<HashMap<String, String>>,
736 ) -> BoxFuture<'_, Result<()>> {
737 async { Ok(()) }.boxed()
738 }
739
740 fn push(
741 &self,
742 _branch: String,
743 _remote_branch: String,
744 _remote: String,
745 _options: Option<PushOptions>,
746 _askpass: AskPassDelegate,
747 _env: Arc<HashMap<String, String>>,
748 _cx: AsyncApp,
749 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
750 unimplemented!()
751 }
752
753 fn pull(
754 &self,
755 _branch: Option<String>,
756 _remote: String,
757 _rebase: bool,
758 _askpass: AskPassDelegate,
759 _env: Arc<HashMap<String, String>>,
760 _cx: AsyncApp,
761 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
762 unimplemented!()
763 }
764
765 fn fetch(
766 &self,
767 _fetch_options: FetchOptions,
768 _askpass: AskPassDelegate,
769 _env: Arc<HashMap<String, String>>,
770 _cx: AsyncApp,
771 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
772 unimplemented!()
773 }
774
775 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
776 self.with_state_async(false, move |state| {
777 let remotes = state
778 .remotes
779 .keys()
780 .map(|r| Remote {
781 name: r.clone().into(),
782 })
783 .collect::<Vec<_>>();
784 Ok(remotes)
785 })
786 }
787
788 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
789 unimplemented!()
790 }
791
792 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
793 unimplemented!()
794 }
795
796 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
797 future::ready(Ok(Vec::new())).boxed()
798 }
799
800 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
801 future::ready(Ok(String::new())).boxed()
802 }
803
804 fn diff_stat(
805 &self,
806 path_prefixes: &[RepoPath],
807 ) -> BoxFuture<'_, Result<git::status::GitDiffStat>> {
808 fn count_lines(s: &str) -> u32 {
809 if s.is_empty() {
810 0
811 } else {
812 s.lines().count() as u32
813 }
814 }
815
816 fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool {
817 if prefixes.is_empty() {
818 return true;
819 }
820 prefixes.iter().any(|prefix| {
821 let prefix_str = prefix.as_unix_str();
822 if prefix_str == "." {
823 return true;
824 }
825 path == prefix || path.starts_with(&prefix)
826 })
827 }
828
829 let path_prefixes = path_prefixes.to_vec();
830
831 let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
832 let worktree_files: HashMap<RepoPath, String> = self
833 .fs
834 .files()
835 .iter()
836 .filter_map(|path| {
837 let repo_path = path.strip_prefix(&workdir_path).ok()?;
838 if repo_path.starts_with(".git") {
839 return None;
840 }
841 let content = self
842 .fs
843 .read_file_sync(path)
844 .ok()
845 .and_then(|bytes| String::from_utf8(bytes).ok())?;
846 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
847 Some((RepoPath::from_rel_path(&repo_path), content))
848 })
849 .collect();
850
851 self.with_state_async(false, move |state| {
852 let mut entries = Vec::new();
853 let all_paths: HashSet<&RepoPath> = state
854 .head_contents
855 .keys()
856 .chain(
857 worktree_files
858 .keys()
859 .filter(|p| state.index_contents.contains_key(*p)),
860 )
861 .collect();
862 for path in all_paths {
863 if !matches_prefixes(path, &path_prefixes) {
864 continue;
865 }
866 let head = state.head_contents.get(path);
867 let worktree = worktree_files.get(path);
868 match (head, worktree) {
869 (Some(old), Some(new)) if old != new => {
870 entries.push((
871 path.clone(),
872 git::status::DiffStat {
873 added: count_lines(new),
874 deleted: count_lines(old),
875 },
876 ));
877 }
878 (Some(old), None) => {
879 entries.push((
880 path.clone(),
881 git::status::DiffStat {
882 added: 0,
883 deleted: count_lines(old),
884 },
885 ));
886 }
887 (None, Some(new)) => {
888 entries.push((
889 path.clone(),
890 git::status::DiffStat {
891 added: count_lines(new),
892 deleted: 0,
893 },
894 ));
895 }
896 _ => {}
897 }
898 }
899 entries.sort_by(|(a, _), (b, _)| a.cmp(b));
900 Ok(git::status::GitDiffStat {
901 entries: entries.into(),
902 })
903 })
904 .boxed()
905 }
906
907 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
908 let executor = self.executor.clone();
909 let fs = self.fs.clone();
910 let checkpoints = self.checkpoints.clone();
911 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
912 async move {
913 executor.simulate_random_delay().await;
914 let oid = git::Oid::random(&mut *executor.rng().lock());
915 let entry = fs.entry(&repository_dir_path)?;
916 checkpoints.lock().insert(oid, entry);
917 Ok(GitRepositoryCheckpoint { commit_sha: oid })
918 }
919 .boxed()
920 }
921
922 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
923 let executor = self.executor.clone();
924 let fs = self.fs.clone();
925 let checkpoints = self.checkpoints.clone();
926 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
927 async move {
928 executor.simulate_random_delay().await;
929 let checkpoints = checkpoints.lock();
930 let entry = checkpoints
931 .get(&checkpoint.commit_sha)
932 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
933 fs.insert_entry(&repository_dir_path, entry.clone())?;
934 Ok(())
935 }
936 .boxed()
937 }
938
939 fn compare_checkpoints(
940 &self,
941 left: GitRepositoryCheckpoint,
942 right: GitRepositoryCheckpoint,
943 ) -> BoxFuture<'_, Result<bool>> {
944 let executor = self.executor.clone();
945 let checkpoints = self.checkpoints.clone();
946 async move {
947 executor.simulate_random_delay().await;
948 let checkpoints = checkpoints.lock();
949 let left = checkpoints
950 .get(&left.commit_sha)
951 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
952 let right = checkpoints
953 .get(&right.commit_sha)
954 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
955
956 Ok(left == right)
957 }
958 .boxed()
959 }
960
961 fn diff_checkpoints(
962 &self,
963 _base_checkpoint: GitRepositoryCheckpoint,
964 _target_checkpoint: GitRepositoryCheckpoint,
965 ) -> BoxFuture<'_, Result<String>> {
966 unimplemented!()
967 }
968
969 fn default_branch(
970 &self,
971 include_remote_name: bool,
972 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
973 async move {
974 Ok(Some(if include_remote_name {
975 "origin/main".into()
976 } else {
977 "main".into()
978 }))
979 }
980 .boxed()
981 }
982
983 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
984 self.with_state_async(true, move |state| {
985 state.remotes.insert(name, url);
986 Ok(())
987 })
988 }
989
990 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
991 self.with_state_async(true, move |state| {
992 state.branches.retain(|branch| {
993 branch
994 .split_once('/')
995 .is_none_or(|(remote, _)| remote != name)
996 });
997 state.remotes.remove(&name);
998 Ok(())
999 })
1000 }
1001
1002 fn initial_graph_data(
1003 &self,
1004 _log_source: LogSource,
1005 _log_order: LogOrder,
1006 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
1007 ) -> BoxFuture<'_, Result<()>> {
1008 let fs = self.fs.clone();
1009 let dot_git_path = self.dot_git_path.clone();
1010 async move {
1011 let graph_commits =
1012 fs.with_git_state(&dot_git_path, false, |state| state.graph_commits.clone())?;
1013
1014 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
1015 request_tx.send(chunk.to_vec()).await.ok();
1016 }
1017 Ok(())
1018 }
1019 .boxed()
1020 }
1021
1022 fn search_commits(
1023 &self,
1024 _log_source: LogSource,
1025 _search_args: SearchCommitArgs,
1026 _request_tx: Sender<Oid>,
1027 ) -> BoxFuture<'_, Result<()>> {
1028 async { bail!("search_commits not supported for FakeGitRepository") }.boxed()
1029 }
1030
1031 fn commit_data_reader(&self) -> Result<CommitDataReader> {
1032 anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
1033 }
1034
1035 fn set_trusted(&self, trusted: bool) {
1036 self.is_trusted
1037 .store(trusted, std::sync::atomic::Ordering::Release);
1038 }
1039
1040 fn is_trusted(&self) -> bool {
1041 self.is_trusted.load(std::sync::atomic::Ordering::Acquire)
1042 }
1043}