1use std::path::Path;
2
3use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
4use anyhow::{Context as _, Result, bail};
5use collections::{HashMap, HashSet};
6use futures::future::{self, BoxFuture, join_all};
7use git::repository::GitCommitTemplate;
8use git::{
9 Oid, RunHook,
10 blame::Blame,
11 repository::{
12 AskPassDelegate, Branch, CommitData, CommitDataReader, CommitDetails, CommitOptions,
13 CreateWorktreeTarget, FetchOptions, GRAPH_CHUNK_SIZE, GitRepository,
14 GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, RefEdit,
15 Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree,
16 },
17 stash::GitStash,
18 status::{
19 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
20 UnmergedStatus,
21 },
22};
23use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
24use ignore::gitignore::GitignoreBuilder;
25use parking_lot::Mutex;
26use rope::Rope;
27use smol::{channel::Sender, future::FutureExt as _};
28use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
29use text::LineEnding;
30use util::{paths::PathStyle, rel_path::RelPath};
31
32#[derive(Clone)]
33pub struct FakeGitRepository {
34 pub(crate) fs: Arc<FakeFs>,
35 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
36 pub(crate) executor: BackgroundExecutor,
37 pub(crate) dot_git_path: PathBuf,
38 pub(crate) repository_dir_path: PathBuf,
39 pub(crate) common_dir_path: PathBuf,
40 pub(crate) is_trusted: Arc<AtomicBool>,
41}
42
43#[derive(Debug, Clone)]
44pub struct FakeCommitSnapshot {
45 pub head_contents: HashMap<RepoPath, String>,
46 pub index_contents: HashMap<RepoPath, String>,
47 pub sha: String,
48}
49
50#[derive(Debug, Clone)]
51pub enum FakeCommitDataEntry {
52 Success(CommitData),
53 Fail(CommitData),
54}
55
56#[derive(Debug, Clone)]
57pub struct FakeGitRepositoryState {
58 pub commit_history: Vec<FakeCommitSnapshot>,
59 pub event_emitter: smol::channel::Sender<PathBuf>,
60 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
61 pub head_contents: HashMap<RepoPath, String>,
62 pub index_contents: HashMap<RepoPath, String>,
63 // everything in commit contents is in oids
64 pub merge_base_contents: HashMap<RepoPath, Oid>,
65 pub oids: HashMap<Oid, String>,
66 pub blames: HashMap<RepoPath, Blame>,
67 pub current_branch_name: Option<String>,
68 pub branches: HashSet<String>,
69 /// List of remotes, keys are names and values are URLs
70 pub remotes: HashMap<String, String>,
71 pub simulated_index_write_error_message: Option<String>,
72 pub simulated_create_worktree_error: Option<String>,
73 pub simulated_graph_error: Option<String>,
74 pub refs: HashMap<String, String>,
75 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
76 pub commit_data: HashMap<Oid, FakeCommitDataEntry>,
77 pub stash_entries: GitStash,
78}
79
80impl FakeGitRepositoryState {
81 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
82 FakeGitRepositoryState {
83 event_emitter,
84 head_contents: Default::default(),
85 index_contents: Default::default(),
86 unmerged_paths: Default::default(),
87 blames: Default::default(),
88 current_branch_name: Default::default(),
89 branches: Default::default(),
90 simulated_index_write_error_message: Default::default(),
91 simulated_create_worktree_error: Default::default(),
92 simulated_graph_error: None,
93 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
94 merge_base_contents: Default::default(),
95 oids: Default::default(),
96 remotes: HashMap::default(),
97 graph_commits: Vec::new(),
98 commit_data: Default::default(),
99 commit_history: Vec::new(),
100 stash_entries: Default::default(),
101 }
102 }
103}
104
105impl FakeGitRepository {
106 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
107 where
108 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
109 T: Send,
110 {
111 let fs = self.fs.clone();
112 let executor = self.executor.clone();
113 let dot_git_path = self.dot_git_path.clone();
114 async move {
115 executor.simulate_random_delay().await;
116 fs.with_git_state(&dot_git_path, write, f)?
117 }
118 .boxed()
119 }
120
121 fn edit_ref(&self, edit: RefEdit) -> BoxFuture<'_, Result<()>> {
122 self.with_state_async(true, move |state| {
123 match edit {
124 RefEdit::Update { ref_name, commit } => {
125 state.refs.insert(ref_name, commit);
126 }
127 RefEdit::Delete { ref_name } => {
128 state.refs.remove(&ref_name);
129 }
130 }
131 Ok(())
132 })
133 }
134
135 /// Scans `.git/worktrees/*/gitdir` to find the admin entry directory for a
136 /// worktree at the given checkout path. Used when the working tree directory
137 /// has already been deleted and we can't read its `.git` pointer file.
138 async fn find_worktree_entry_dir_by_path(&self, path: &Path) -> Option<PathBuf> {
139 use futures::StreamExt;
140
141 let worktrees_dir = self.common_dir_path.join("worktrees");
142 let mut entries = self.fs.read_dir(&worktrees_dir).await.ok()?;
143 while let Some(Ok(entry_path)) = entries.next().await {
144 if let Ok(gitdir_content) = self.fs.load(&entry_path.join("gitdir")).await {
145 let worktree_path = PathBuf::from(gitdir_content.trim())
146 .parent()
147 .map(PathBuf::from)
148 .unwrap_or_default();
149 if worktree_path == path {
150 return Some(entry_path);
151 }
152 }
153 }
154 None
155 }
156}
157
158impl GitRepository for FakeGitRepository {
159 fn reload_index(&self) {}
160
161 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
162 let fut = self.with_state_async(false, move |state| {
163 state
164 .index_contents
165 .get(&path)
166 .context("not present in index")
167 .cloned()
168 });
169 self.executor.spawn(async move { fut.await.ok() }).boxed()
170 }
171
172 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
173 let fut = self.with_state_async(false, move |state| {
174 state
175 .head_contents
176 .get(&path)
177 .context("not present in HEAD")
178 .cloned()
179 });
180 self.executor.spawn(async move { fut.await.ok() }).boxed()
181 }
182
183 fn load_commit_template(&self) -> BoxFuture<'_, Result<Option<GitCommitTemplate>>> {
184 async { Ok(None) }.boxed()
185 }
186
187 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
188 self.with_state_async(false, move |state| {
189 state.oids.get(&oid).cloned().context("oid does not exist")
190 })
191 .boxed()
192 }
193
194 fn load_commit(
195 &self,
196 _commit: String,
197 _cx: AsyncApp,
198 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
199 unimplemented!()
200 }
201
202 fn set_index_text(
203 &self,
204 path: RepoPath,
205 content: Option<String>,
206 _env: Arc<HashMap<String, String>>,
207 _is_executable: bool,
208 ) -> BoxFuture<'_, anyhow::Result<()>> {
209 self.with_state_async(true, move |state| {
210 if let Some(message) = &state.simulated_index_write_error_message {
211 anyhow::bail!("{message}");
212 } else if let Some(content) = content {
213 state.index_contents.insert(path, content);
214 } else {
215 state.index_contents.remove(&path);
216 }
217 Ok(())
218 })
219 }
220
221 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
222 let name = name.to_string();
223 let fut = self.with_state_async(false, move |state| {
224 state
225 .remotes
226 .get(&name)
227 .context("remote not found")
228 .cloned()
229 });
230 async move { fut.await.ok() }.boxed()
231 }
232
233 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
234 let mut entries = HashMap::default();
235 self.with_state_async(false, |state| {
236 for (path, content) in &state.head_contents {
237 let status = if let Some((oid, original)) = state
238 .merge_base_contents
239 .get(path)
240 .map(|oid| (oid, &state.oids[oid]))
241 {
242 if original == content {
243 continue;
244 }
245 TreeDiffStatus::Modified { old: *oid }
246 } else {
247 TreeDiffStatus::Added
248 };
249 entries.insert(path.clone(), status);
250 }
251 for (path, oid) in &state.merge_base_contents {
252 if !entries.contains_key(path) {
253 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
254 }
255 }
256 Ok(TreeDiff { entries })
257 })
258 .boxed()
259 }
260
261 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
262 self.with_state_async(false, |state| {
263 Ok(revs
264 .into_iter()
265 .map(|rev| state.refs.get(&rev).cloned())
266 .collect())
267 })
268 }
269
270 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
271 async {
272 Ok(CommitDetails {
273 sha: commit.into(),
274 message: "initial commit".into(),
275 ..Default::default()
276 })
277 }
278 .boxed()
279 }
280
281 fn reset(
282 &self,
283 commit: String,
284 mode: ResetMode,
285 _env: Arc<HashMap<String, String>>,
286 ) -> BoxFuture<'_, Result<()>> {
287 self.with_state_async(true, move |state| {
288 let pop_count = if commit == "HEAD~" || commit == "HEAD^" {
289 1
290 } else if let Some(suffix) = commit.strip_prefix("HEAD~") {
291 suffix
292 .parse::<usize>()
293 .with_context(|| format!("Invalid HEAD~ offset: {commit}"))?
294 } else {
295 match state
296 .commit_history
297 .iter()
298 .rposition(|entry| entry.sha == commit)
299 {
300 Some(index) => state.commit_history.len() - index,
301 None => anyhow::bail!("Unknown commit ref: {commit}"),
302 }
303 };
304
305 if pop_count == 0 || pop_count > state.commit_history.len() {
306 anyhow::bail!(
307 "Cannot reset {pop_count} commit(s): only {} in history",
308 state.commit_history.len()
309 );
310 }
311
312 let target_index = state.commit_history.len() - pop_count;
313 let snapshot = state.commit_history[target_index].clone();
314 state.commit_history.truncate(target_index);
315
316 match mode {
317 ResetMode::Soft => {
318 state.head_contents = snapshot.head_contents;
319 }
320 ResetMode::Mixed => {
321 state.head_contents = snapshot.head_contents;
322 state.index_contents = state.head_contents.clone();
323 }
324 }
325
326 state.refs.insert("HEAD".into(), snapshot.sha);
327 Ok(())
328 })
329 }
330
331 fn checkout_files(
332 &self,
333 _commit: String,
334 _paths: Vec<RepoPath>,
335 _env: Arc<HashMap<String, String>>,
336 ) -> BoxFuture<'_, Result<()>> {
337 unimplemented!()
338 }
339
340 fn path(&self) -> PathBuf {
341 self.repository_dir_path.clone()
342 }
343
344 fn main_repository_path(&self) -> PathBuf {
345 self.common_dir_path.clone()
346 }
347
348 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
349 async move { None }.boxed()
350 }
351
352 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
353 let workdir_path = self.dot_git_path.parent().unwrap();
354
355 // Load gitignores
356 let ignores = workdir_path
357 .ancestors()
358 .filter_map(|dir| {
359 let ignore_path = dir.join(".gitignore");
360 let content = self.fs.read_file_sync(ignore_path).ok()?;
361 let content = String::from_utf8(content).ok()?;
362 let mut builder = GitignoreBuilder::new(dir);
363 for line in content.lines() {
364 builder.add_line(Some(dir.into()), line).ok()?;
365 }
366 builder.build().ok()
367 })
368 .collect::<Vec<_>>();
369
370 // Load working copy files.
371 let git_files: HashMap<RepoPath, (String, bool)> = self
372 .fs
373 .files()
374 .iter()
375 .filter_map(|path| {
376 // TODO better simulate git status output in the case of submodules and worktrees
377 let repo_path = path.strip_prefix(workdir_path).ok()?;
378 let mut is_ignored = repo_path.starts_with(".git");
379 for ignore in &ignores {
380 match ignore.matched_path_or_any_parents(path, false) {
381 ignore::Match::None => {}
382 ignore::Match::Ignore(_) => is_ignored = true,
383 ignore::Match::Whitelist(_) => break,
384 }
385 }
386 let content = self
387 .fs
388 .read_file_sync(path)
389 .ok()
390 .map(|content| String::from_utf8(content).unwrap())?;
391 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
392 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
393 })
394 .collect();
395
396 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
397 let mut entries = Vec::new();
398 let paths = state
399 .head_contents
400 .keys()
401 .chain(state.index_contents.keys())
402 .chain(git_files.keys())
403 .collect::<HashSet<_>>();
404 for path in paths {
405 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
406 continue;
407 }
408
409 let head = state.head_contents.get(path);
410 let index = state.index_contents.get(path);
411 let unmerged = state.unmerged_paths.get(path);
412 let fs = git_files.get(path);
413 let status = match (unmerged, head, index, fs) {
414 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
415 (_, Some(head), Some(index), Some((fs, _))) => {
416 FileStatus::Tracked(TrackedStatus {
417 index_status: if head == index {
418 StatusCode::Unmodified
419 } else {
420 StatusCode::Modified
421 },
422 worktree_status: if fs == index {
423 StatusCode::Unmodified
424 } else {
425 StatusCode::Modified
426 },
427 })
428 }
429 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
430 index_status: if head == index {
431 StatusCode::Unmodified
432 } else {
433 StatusCode::Modified
434 },
435 worktree_status: StatusCode::Deleted,
436 }),
437 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
438 index_status: StatusCode::Deleted,
439 worktree_status: StatusCode::Added,
440 }),
441 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
442 index_status: StatusCode::Deleted,
443 worktree_status: StatusCode::Deleted,
444 }),
445 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
446 index_status: StatusCode::Added,
447 worktree_status: if fs == index {
448 StatusCode::Unmodified
449 } else {
450 StatusCode::Modified
451 },
452 }),
453 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
454 index_status: StatusCode::Added,
455 worktree_status: StatusCode::Deleted,
456 }),
457 (_, None, None, Some((_, is_ignored))) => {
458 if *is_ignored {
459 continue;
460 }
461 FileStatus::Untracked
462 }
463 (_, None, None, None) => {
464 unreachable!();
465 }
466 };
467 if status
468 != FileStatus::Tracked(TrackedStatus {
469 index_status: StatusCode::Unmodified,
470 worktree_status: StatusCode::Unmodified,
471 })
472 {
473 entries.push((path.clone(), status));
474 }
475 }
476 entries.sort_by(|a, b| a.0.cmp(&b.0));
477 anyhow::Ok(GitStatus {
478 entries: entries.into(),
479 })
480 });
481 Task::ready(match result {
482 Ok(result) => result,
483 Err(e) => Err(e),
484 })
485 }
486
487 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
488 self.with_state_async(false, |state| Ok(state.stash_entries.clone()))
489 }
490
491 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
492 self.with_state_async(false, move |state| {
493 let current_branch = &state.current_branch_name;
494 let mut branches = state
495 .branches
496 .iter()
497 .map(|branch_name| {
498 let ref_name = if branch_name.starts_with("refs/") {
499 branch_name.into()
500 } else if branch_name.contains('/') {
501 format!("refs/remotes/{branch_name}").into()
502 } else {
503 format!("refs/heads/{branch_name}").into()
504 };
505 Branch {
506 is_head: Some(branch_name) == current_branch.as_ref(),
507 ref_name,
508 most_recent_commit: None,
509 upstream: None,
510 }
511 })
512 .collect::<Vec<_>>();
513 // compute snapshot expects these to be sorted by ref_name
514 // because that's what git itself does
515 branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name));
516 Ok(branches)
517 })
518 }
519
520 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
521 let fs = self.fs.clone();
522 let common_dir_path = self.common_dir_path.clone();
523 let executor = self.executor.clone();
524
525 async move {
526 executor.simulate_random_delay().await;
527
528 let (main_worktree, refs) = fs.with_git_state(&common_dir_path, false, |state| {
529 let work_dir = common_dir_path
530 .parent()
531 .map(PathBuf::from)
532 .unwrap_or_else(|| common_dir_path.clone());
533 let head_sha = state
534 .refs
535 .get("HEAD")
536 .cloned()
537 .unwrap_or_else(|| "0000000".to_string());
538 let branch_ref = state
539 .current_branch_name
540 .as_ref()
541 .map(|name| format!("refs/heads/{name}"))
542 .unwrap_or_else(|| "refs/heads/main".to_string());
543 let main_wt = Worktree {
544 path: work_dir,
545 ref_name: Some(branch_ref.into()),
546 sha: head_sha.into(),
547 is_main: true,
548 is_bare: false,
549 };
550 (main_wt, state.refs.clone())
551 })?;
552
553 let mut all = vec![main_worktree];
554
555 let worktrees_dir = common_dir_path.join("worktrees");
556 if let Ok(mut entries) = fs.read_dir(&worktrees_dir).await {
557 use futures::StreamExt;
558 while let Some(Ok(entry_path)) = entries.next().await {
559 let head_content = match fs.load(&entry_path.join("HEAD")).await {
560 Ok(content) => content,
561 Err(_) => continue,
562 };
563 let gitdir_content = match fs.load(&entry_path.join("gitdir")).await {
564 Ok(content) => content,
565 Err(_) => continue,
566 };
567
568 let ref_name = head_content
569 .strip_prefix("ref: ")
570 .map(|s| s.trim().to_string());
571 let sha = ref_name
572 .as_ref()
573 .and_then(|r| refs.get(r))
574 .cloned()
575 .unwrap_or_else(|| head_content.trim().to_string());
576
577 let worktree_path = PathBuf::from(gitdir_content.trim())
578 .parent()
579 .map(PathBuf::from)
580 .unwrap_or_default();
581
582 all.push(Worktree {
583 path: worktree_path,
584 ref_name: ref_name.map(Into::into),
585 sha: sha.into(),
586 is_main: false,
587 is_bare: false,
588 });
589 }
590 }
591
592 Ok(all)
593 }
594 .boxed()
595 }
596
597 fn create_worktree(
598 &self,
599 target: CreateWorktreeTarget,
600 path: PathBuf,
601 ) -> BoxFuture<'_, Result<()>> {
602 let fs = self.fs.clone();
603 let executor = self.executor.clone();
604 let dot_git_path = self.dot_git_path.clone();
605 let common_dir_path = self.common_dir_path.clone();
606 async move {
607 executor.simulate_random_delay().await;
608
609 let branch_name = target.branch_name().map(ToOwned::to_owned);
610 let create_branch_ref = matches!(target, CreateWorktreeTarget::NewBranch { .. });
611
612 // Check for simulated error and validate branch state before any side effects.
613 fs.with_git_state(&dot_git_path, false, {
614 let branch_name = branch_name.clone();
615 move |state| {
616 if let Some(message) = &state.simulated_create_worktree_error {
617 anyhow::bail!("{message}");
618 }
619
620 match (create_branch_ref, branch_name.as_ref()) {
621 (true, Some(branch_name)) => {
622 if state.branches.contains(branch_name) {
623 bail!("a branch named '{}' already exists", branch_name);
624 }
625 }
626 (false, Some(branch_name)) => {
627 if !state.branches.contains(branch_name) {
628 bail!("no branch named '{}' exists", branch_name);
629 }
630 }
631 (false, None) => {}
632 (true, None) => bail!("branch name is required to create a branch"),
633 }
634
635 Ok(())
636 }
637 })??;
638
639 let (branch_name, sha, create_branch_ref) = match target {
640 CreateWorktreeTarget::ExistingBranch { branch_name } => {
641 let ref_name = format!("refs/heads/{branch_name}");
642 let sha = fs.with_git_state(&dot_git_path, false, {
643 move |state| {
644 Ok::<_, anyhow::Error>(
645 state
646 .refs
647 .get(&ref_name)
648 .cloned()
649 .unwrap_or_else(|| "fake-sha".to_string()),
650 )
651 }
652 })??;
653 (Some(branch_name), sha, false)
654 }
655 CreateWorktreeTarget::NewBranch {
656 branch_name,
657 base_sha: start_point,
658 } => (
659 Some(branch_name),
660 start_point.unwrap_or_else(|| "fake-sha".to_string()),
661 true,
662 ),
663 CreateWorktreeTarget::Detached {
664 base_sha: start_point,
665 } => (
666 None,
667 start_point.unwrap_or_else(|| "fake-sha".to_string()),
668 false,
669 ),
670 };
671
672 // Create the worktree checkout directory.
673 fs.create_dir(&path).await?;
674
675 // Create .git/worktrees/<name>/ directory with HEAD, commondir, gitdir.
676 let worktree_entry_name = branch_name.as_deref().unwrap_or_else(|| {
677 path.file_name()
678 .and_then(|name| name.to_str())
679 .unwrap_or("detached")
680 });
681 let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name);
682 fs.create_dir(&worktrees_entry_dir).await?;
683
684 let head_content = if let Some(ref branch_name) = branch_name {
685 let ref_name = format!("refs/heads/{branch_name}");
686 format!("ref: {ref_name}")
687 } else {
688 sha.clone()
689 };
690 fs.write_file_internal(
691 worktrees_entry_dir.join("HEAD"),
692 head_content.into_bytes(),
693 false,
694 )?;
695 fs.write_file_internal(
696 worktrees_entry_dir.join("commondir"),
697 common_dir_path.to_string_lossy().into_owned().into_bytes(),
698 false,
699 )?;
700 let worktree_dot_git = path.join(".git");
701 fs.write_file_internal(
702 worktrees_entry_dir.join("gitdir"),
703 worktree_dot_git.to_string_lossy().into_owned().into_bytes(),
704 false,
705 )?;
706
707 // Create .git file in the worktree checkout.
708 fs.write_file_internal(
709 &worktree_dot_git,
710 format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(),
711 false,
712 )?;
713
714 // Update git state for newly created branches.
715 if create_branch_ref {
716 fs.with_git_state(&dot_git_path, true, {
717 let branch_name = branch_name.clone();
718 let sha = sha.clone();
719 move |state| {
720 if let Some(branch_name) = branch_name {
721 let ref_name = format!("refs/heads/{branch_name}");
722 state.refs.insert(ref_name, sha);
723 state.branches.insert(branch_name);
724 }
725 Ok::<(), anyhow::Error>(())
726 }
727 })??;
728 }
729
730 Ok(())
731 }
732 .boxed()
733 }
734
735 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
736 let fs = self.fs.clone();
737 let executor = self.executor.clone();
738 let common_dir_path = self.common_dir_path.clone();
739 async move {
740 executor.simulate_random_delay().await;
741
742 // Try to read the worktree's .git file to find its entry
743 // directory. If the working tree is already gone (e.g. the
744 // caller deleted it before asking git to clean up), fall back
745 // to scanning `.git/worktrees/*/gitdir` for a matching path,
746 // mirroring real git's behavior with `--force`.
747 let dot_git_file = path.join(".git");
748 let worktree_entry_dir = if let Ok(content) = fs.load(&dot_git_file).await {
749 let gitdir = content
750 .strip_prefix("gitdir:")
751 .context("invalid .git file in worktree")?
752 .trim();
753 PathBuf::from(gitdir)
754 } else {
755 self.find_worktree_entry_dir_by_path(&path)
756 .await
757 .with_context(|| format!("no worktree found at path: {}", path.display()))?
758 };
759
760 // Remove the worktree checkout directory if it still exists.
761 fs.remove_dir(
762 &path,
763 RemoveOptions {
764 recursive: true,
765 ignore_if_not_exists: true,
766 },
767 )
768 .await?;
769
770 // Remove the .git/worktrees/<name>/ directory.
771 fs.remove_dir(
772 &worktree_entry_dir,
773 RemoveOptions {
774 recursive: true,
775 ignore_if_not_exists: false,
776 },
777 )
778 .await?;
779
780 // Emit a git event on the main .git directory so the scanner
781 // notices the change.
782 fs.with_git_state(&common_dir_path, true, |_| {})?;
783
784 Ok(())
785 }
786 .boxed()
787 }
788
789 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
790 let fs = self.fs.clone();
791 let executor = self.executor.clone();
792 let common_dir_path = self.common_dir_path.clone();
793 async move {
794 executor.simulate_random_delay().await;
795
796 // Read the worktree's .git file to find its entry directory.
797 let dot_git_file = old_path.join(".git");
798 let content = fs
799 .load(&dot_git_file)
800 .await
801 .with_context(|| format!("no worktree found at path: {}", old_path.display()))?;
802 let gitdir = content
803 .strip_prefix("gitdir:")
804 .context("invalid .git file in worktree")?
805 .trim();
806 let worktree_entry_dir = PathBuf::from(gitdir);
807
808 // Move the worktree checkout directory.
809 fs.rename(
810 &old_path,
811 &new_path,
812 RenameOptions {
813 overwrite: false,
814 ignore_if_exists: false,
815 create_parents: true,
816 },
817 )
818 .await?;
819
820 // Update the gitdir file in .git/worktrees/<name>/ to point to the
821 // new location.
822 let new_dot_git = new_path.join(".git");
823 fs.write_file_internal(
824 worktree_entry_dir.join("gitdir"),
825 new_dot_git.to_string_lossy().into_owned().into_bytes(),
826 false,
827 )?;
828
829 // Update the .git file in the moved worktree checkout.
830 fs.write_file_internal(
831 &new_dot_git,
832 format!("gitdir: {}", worktree_entry_dir.display()).into_bytes(),
833 false,
834 )?;
835
836 // Emit a git event on the main .git directory so the scanner
837 // notices the change.
838 fs.with_git_state(&common_dir_path, true, |_| {})?;
839
840 Ok(())
841 }
842 .boxed()
843 }
844
845 fn checkout_branch_in_worktree(
846 &self,
847 _branch_name: String,
848 _worktree_path: PathBuf,
849 _create: bool,
850 ) -> BoxFuture<'_, Result<()>> {
851 async { Ok(()) }.boxed()
852 }
853
854 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
855 self.with_state_async(true, |state| {
856 state.current_branch_name = Some(name);
857 Ok(())
858 })
859 }
860
861 fn create_branch(
862 &self,
863 name: String,
864 _base_branch: Option<String>,
865 ) -> BoxFuture<'_, Result<()>> {
866 self.with_state_async(true, move |state| {
867 if let Some((remote, _)) = name.split_once('/')
868 && !state.remotes.contains_key(remote)
869 {
870 state.remotes.insert(remote.to_owned(), "".to_owned());
871 }
872 state.branches.insert(name);
873 Ok(())
874 })
875 }
876
877 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
878 self.with_state_async(true, move |state| {
879 if !state.branches.remove(&branch) {
880 bail!("no such branch: {branch}");
881 }
882 state.branches.insert(new_name.clone());
883 if state.current_branch_name == Some(branch) {
884 state.current_branch_name = Some(new_name);
885 }
886 Ok(())
887 })
888 }
889
890 fn delete_branch(&self, _is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> {
891 self.with_state_async(true, move |state| {
892 if !state.branches.remove(&name) {
893 bail!("no such branch: {name}");
894 }
895 Ok(())
896 })
897 }
898
899 fn blame(
900 &self,
901 path: RepoPath,
902 _content: Rope,
903 _line_ending: LineEnding,
904 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
905 self.with_state_async(false, move |state| {
906 state
907 .blames
908 .get(&path)
909 .with_context(|| format!("failed to get blame for {:?}", path))
910 .cloned()
911 })
912 }
913
914 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
915 self.file_history_paginated(path, 0, None)
916 }
917
918 fn file_history_paginated(
919 &self,
920 path: RepoPath,
921 _skip: usize,
922 _limit: Option<usize>,
923 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
924 async move {
925 Ok(git::repository::FileHistory {
926 entries: Vec::new(),
927 path,
928 })
929 }
930 .boxed()
931 }
932
933 fn stage_paths(
934 &self,
935 paths: Vec<RepoPath>,
936 _env: Arc<HashMap<String, String>>,
937 ) -> BoxFuture<'_, Result<()>> {
938 Box::pin(async move {
939 let contents = paths
940 .into_iter()
941 .map(|path| {
942 let abs_path = self
943 .dot_git_path
944 .parent()
945 .unwrap()
946 .join(&path.as_std_path());
947 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
948 })
949 .collect::<Vec<_>>();
950 let contents = join_all(contents).await;
951 self.with_state_async(true, move |state| {
952 for (path, content) in contents {
953 if let Some(content) = content {
954 state.index_contents.insert(path, content);
955 } else {
956 state.index_contents.remove(&path);
957 }
958 }
959 Ok(())
960 })
961 .await
962 })
963 }
964
965 fn unstage_paths(
966 &self,
967 paths: Vec<RepoPath>,
968 _env: Arc<HashMap<String, String>>,
969 ) -> BoxFuture<'_, Result<()>> {
970 self.with_state_async(true, move |state| {
971 for path in paths {
972 match state.head_contents.get(&path) {
973 Some(content) => state.index_contents.insert(path, content.clone()),
974 None => state.index_contents.remove(&path),
975 };
976 }
977 Ok(())
978 })
979 }
980
981 fn stash_paths(
982 &self,
983 _paths: Vec<RepoPath>,
984 _env: Arc<HashMap<String, String>>,
985 ) -> BoxFuture<'_, Result<()>> {
986 unimplemented!()
987 }
988
989 fn stash_pop(
990 &self,
991 _index: Option<usize>,
992 _env: Arc<HashMap<String, String>>,
993 ) -> BoxFuture<'_, Result<()>> {
994 unimplemented!()
995 }
996
997 fn stash_apply(
998 &self,
999 _index: Option<usize>,
1000 _env: Arc<HashMap<String, String>>,
1001 ) -> BoxFuture<'_, Result<()>> {
1002 unimplemented!()
1003 }
1004
1005 fn stash_drop(
1006 &self,
1007 _index: Option<usize>,
1008 _env: Arc<HashMap<String, String>>,
1009 ) -> BoxFuture<'_, Result<()>> {
1010 unimplemented!()
1011 }
1012
1013 fn commit(
1014 &self,
1015 _message: gpui::SharedString,
1016 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
1017 options: CommitOptions,
1018 _askpass: AskPassDelegate,
1019 _env: Arc<HashMap<String, String>>,
1020 ) -> BoxFuture<'_, Result<()>> {
1021 self.with_state_async(true, move |state| {
1022 if !options.allow_empty && !options.amend && state.index_contents == state.head_contents
1023 {
1024 anyhow::bail!("nothing to commit (use allow_empty to create an empty commit)");
1025 }
1026
1027 let old_sha = state.refs.get("HEAD").cloned().unwrap_or_default();
1028 state.commit_history.push(FakeCommitSnapshot {
1029 head_contents: state.head_contents.clone(),
1030 index_contents: state.index_contents.clone(),
1031 sha: old_sha,
1032 });
1033
1034 state.head_contents = state.index_contents.clone();
1035
1036 let new_sha = format!("fake-commit-{}", state.commit_history.len());
1037 state.refs.insert("HEAD".into(), new_sha);
1038
1039 Ok(())
1040 })
1041 }
1042
1043 fn run_hook(
1044 &self,
1045 _hook: RunHook,
1046 _env: Arc<HashMap<String, String>>,
1047 ) -> BoxFuture<'_, Result<()>> {
1048 async { Ok(()) }.boxed()
1049 }
1050
1051 fn push(
1052 &self,
1053 _branch: String,
1054 _remote_branch: String,
1055 _remote: String,
1056 _options: Option<PushOptions>,
1057 _askpass: AskPassDelegate,
1058 _env: Arc<HashMap<String, String>>,
1059 _cx: AsyncApp,
1060 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1061 unimplemented!()
1062 }
1063
1064 fn pull(
1065 &self,
1066 _branch: Option<String>,
1067 _remote: String,
1068 _rebase: bool,
1069 _askpass: AskPassDelegate,
1070 _env: Arc<HashMap<String, String>>,
1071 _cx: AsyncApp,
1072 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1073 unimplemented!()
1074 }
1075
1076 fn fetch(
1077 &self,
1078 _fetch_options: FetchOptions,
1079 _askpass: AskPassDelegate,
1080 _env: Arc<HashMap<String, String>>,
1081 _cx: AsyncApp,
1082 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1083 unimplemented!()
1084 }
1085
1086 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
1087 self.with_state_async(false, move |state| {
1088 let remotes = state
1089 .remotes
1090 .keys()
1091 .map(|r| Remote {
1092 name: r.clone().into(),
1093 })
1094 .collect::<Vec<_>>();
1095 Ok(remotes)
1096 })
1097 }
1098
1099 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1100 unimplemented!()
1101 }
1102
1103 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1104 unimplemented!()
1105 }
1106
1107 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
1108 future::ready(Ok(Vec::new())).boxed()
1109 }
1110
1111 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
1112 future::ready(Ok(String::new())).boxed()
1113 }
1114
1115 fn diff_stat(
1116 &self,
1117 path_prefixes: &[RepoPath],
1118 ) -> BoxFuture<'_, Result<git::status::GitDiffStat>> {
1119 fn count_lines(s: &str) -> u32 {
1120 if s.is_empty() {
1121 0
1122 } else {
1123 s.lines().count() as u32
1124 }
1125 }
1126
1127 fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool {
1128 if prefixes.is_empty() {
1129 return true;
1130 }
1131 prefixes.iter().any(|prefix| {
1132 let prefix_str = prefix.as_unix_str();
1133 if prefix_str == "." {
1134 return true;
1135 }
1136 path == prefix || path.starts_with(&prefix)
1137 })
1138 }
1139
1140 let path_prefixes = path_prefixes.to_vec();
1141
1142 let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
1143 let worktree_files: HashMap<RepoPath, String> = self
1144 .fs
1145 .files()
1146 .iter()
1147 .filter_map(|path| {
1148 let repo_path = path.strip_prefix(&workdir_path).ok()?;
1149 if repo_path.starts_with(".git") {
1150 return None;
1151 }
1152 let content = self
1153 .fs
1154 .read_file_sync(path)
1155 .ok()
1156 .and_then(|bytes| String::from_utf8(bytes).ok())?;
1157 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
1158 Some((RepoPath::from_rel_path(&repo_path), content))
1159 })
1160 .collect();
1161
1162 self.with_state_async(false, move |state| {
1163 let mut entries = Vec::new();
1164 let all_paths: HashSet<&RepoPath> = state
1165 .head_contents
1166 .keys()
1167 .chain(
1168 worktree_files
1169 .keys()
1170 .filter(|p| state.index_contents.contains_key(*p)),
1171 )
1172 .collect();
1173 for path in all_paths {
1174 if !matches_prefixes(path, &path_prefixes) {
1175 continue;
1176 }
1177 let head = state.head_contents.get(path);
1178 let worktree = worktree_files.get(path);
1179 match (head, worktree) {
1180 (Some(old), Some(new)) if old != new => {
1181 entries.push((
1182 path.clone(),
1183 git::status::DiffStat {
1184 added: count_lines(new),
1185 deleted: count_lines(old),
1186 },
1187 ));
1188 }
1189 (Some(old), None) => {
1190 entries.push((
1191 path.clone(),
1192 git::status::DiffStat {
1193 added: 0,
1194 deleted: count_lines(old),
1195 },
1196 ));
1197 }
1198 (None, Some(new)) => {
1199 entries.push((
1200 path.clone(),
1201 git::status::DiffStat {
1202 added: count_lines(new),
1203 deleted: 0,
1204 },
1205 ));
1206 }
1207 _ => {}
1208 }
1209 }
1210 entries.sort_by(|(a, _), (b, _)| a.cmp(b));
1211 Ok(git::status::GitDiffStat {
1212 entries: entries.into(),
1213 })
1214 })
1215 .boxed()
1216 }
1217
1218 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
1219 let executor = self.executor.clone();
1220 let fs = self.fs.clone();
1221 let checkpoints = self.checkpoints.clone();
1222 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1223 async move {
1224 executor.simulate_random_delay().await;
1225 let oid = git::Oid::random(&mut *executor.rng().lock());
1226 let entry = fs.entry(&repository_dir_path)?;
1227 checkpoints.lock().insert(oid, entry);
1228 Ok(GitRepositoryCheckpoint { commit_sha: oid })
1229 }
1230 .boxed()
1231 }
1232
1233 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
1234 let executor = self.executor.clone();
1235 let fs = self.fs.clone();
1236 let checkpoints = self.checkpoints.clone();
1237 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1238 async move {
1239 executor.simulate_random_delay().await;
1240 let checkpoints = checkpoints.lock();
1241 let entry = checkpoints
1242 .get(&checkpoint.commit_sha)
1243 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
1244 fs.insert_entry(&repository_dir_path, entry.clone())?;
1245 Ok(())
1246 }
1247 .boxed()
1248 }
1249
1250 fn create_archive_checkpoint(&self) -> BoxFuture<'_, Result<(String, String)>> {
1251 let executor = self.executor.clone();
1252 let fs = self.fs.clone();
1253 let checkpoints = self.checkpoints.clone();
1254 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1255 async move {
1256 executor.simulate_random_delay().await;
1257 let staged_oid = git::Oid::random(&mut *executor.rng().lock());
1258 let unstaged_oid = git::Oid::random(&mut *executor.rng().lock());
1259 let entry = fs.entry(&repository_dir_path)?;
1260 checkpoints.lock().insert(staged_oid, entry.clone());
1261 checkpoints.lock().insert(unstaged_oid, entry);
1262 Ok((staged_oid.to_string(), unstaged_oid.to_string()))
1263 }
1264 .boxed()
1265 }
1266
1267 fn restore_archive_checkpoint(
1268 &self,
1269 // The fake filesystem doesn't model a separate index, so only the
1270 // unstaged (full working directory) snapshot is restored.
1271 _staged_sha: String,
1272 unstaged_sha: String,
1273 ) -> BoxFuture<'_, Result<()>> {
1274 match unstaged_sha.parse() {
1275 Ok(commit_sha) => self.restore_checkpoint(GitRepositoryCheckpoint { commit_sha }),
1276 Err(error) => async move {
1277 Err(anyhow::anyhow!(error).context("failed to parse unstaged SHA as Oid"))
1278 }
1279 .boxed(),
1280 }
1281 }
1282
1283 fn compare_checkpoints(
1284 &self,
1285 left: GitRepositoryCheckpoint,
1286 right: GitRepositoryCheckpoint,
1287 ) -> BoxFuture<'_, Result<bool>> {
1288 let executor = self.executor.clone();
1289 let checkpoints = self.checkpoints.clone();
1290 async move {
1291 executor.simulate_random_delay().await;
1292 let checkpoints = checkpoints.lock();
1293 let left = checkpoints
1294 .get(&left.commit_sha)
1295 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
1296 let right = checkpoints
1297 .get(&right.commit_sha)
1298 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
1299
1300 Ok(left == right)
1301 }
1302 .boxed()
1303 }
1304
1305 fn diff_checkpoints(
1306 &self,
1307 base_checkpoint: GitRepositoryCheckpoint,
1308 target_checkpoint: GitRepositoryCheckpoint,
1309 ) -> BoxFuture<'_, Result<String>> {
1310 let executor = self.executor.clone();
1311 let checkpoints = self.checkpoints.clone();
1312 async move {
1313 executor.simulate_random_delay().await;
1314 let checkpoints = checkpoints.lock();
1315 let base = checkpoints
1316 .get(&base_checkpoint.commit_sha)
1317 .context(format!(
1318 "invalid base checkpoint: {}",
1319 base_checkpoint.commit_sha
1320 ))?;
1321 let target = checkpoints
1322 .get(&target_checkpoint.commit_sha)
1323 .context(format!(
1324 "invalid target checkpoint: {}",
1325 target_checkpoint.commit_sha
1326 ))?;
1327
1328 fn collect_files(
1329 entry: &FakeFsEntry,
1330 prefix: String,
1331 out: &mut std::collections::BTreeMap<String, String>,
1332 ) {
1333 match entry {
1334 FakeFsEntry::File { content, .. } => {
1335 out.insert(prefix, String::from_utf8_lossy(content).into_owned());
1336 }
1337 FakeFsEntry::Dir { entries, .. } => {
1338 for (name, child) in entries {
1339 let path = if prefix.is_empty() {
1340 name.clone()
1341 } else {
1342 format!("{prefix}/{name}")
1343 };
1344 collect_files(child, path, out);
1345 }
1346 }
1347 FakeFsEntry::Symlink { .. } => {}
1348 }
1349 }
1350
1351 let mut base_files = std::collections::BTreeMap::new();
1352 let mut target_files = std::collections::BTreeMap::new();
1353 collect_files(base, String::new(), &mut base_files);
1354 collect_files(target, String::new(), &mut target_files);
1355
1356 let all_paths: std::collections::BTreeSet<&String> =
1357 base_files.keys().chain(target_files.keys()).collect();
1358
1359 let mut diff = String::new();
1360 for path in all_paths {
1361 match (base_files.get(path), target_files.get(path)) {
1362 (Some(base_content), Some(target_content))
1363 if base_content != target_content =>
1364 {
1365 diff.push_str(&format!("diff --git a/{path} b/{path}\n"));
1366 diff.push_str(&format!("--- a/{path}\n"));
1367 diff.push_str(&format!("+++ b/{path}\n"));
1368 for line in base_content.lines() {
1369 diff.push_str(&format!("-{line}\n"));
1370 }
1371 for line in target_content.lines() {
1372 diff.push_str(&format!("+{line}\n"));
1373 }
1374 }
1375 (Some(_), None) => {
1376 diff.push_str(&format!("diff --git a/{path} /dev/null\n"));
1377 diff.push_str("deleted file\n");
1378 }
1379 (None, Some(_)) => {
1380 diff.push_str(&format!("diff --git /dev/null b/{path}\n"));
1381 diff.push_str("new file\n");
1382 }
1383 _ => {}
1384 }
1385 }
1386 Ok(diff)
1387 }
1388 .boxed()
1389 }
1390
1391 fn default_branch(
1392 &self,
1393 include_remote_name: bool,
1394 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
1395 async move {
1396 Ok(Some(if include_remote_name {
1397 "origin/main".into()
1398 } else {
1399 "main".into()
1400 }))
1401 }
1402 .boxed()
1403 }
1404
1405 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
1406 self.with_state_async(true, move |state| {
1407 state.remotes.insert(name, url);
1408 Ok(())
1409 })
1410 }
1411
1412 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
1413 self.with_state_async(true, move |state| {
1414 state.branches.retain(|branch| {
1415 branch
1416 .split_once('/')
1417 .is_none_or(|(remote, _)| remote != name)
1418 });
1419 state.remotes.remove(&name);
1420 Ok(())
1421 })
1422 }
1423
1424 fn initial_graph_data(
1425 &self,
1426 _log_source: LogSource,
1427 _log_order: LogOrder,
1428 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
1429 ) -> BoxFuture<'_, Result<()>> {
1430 let fs = self.fs.clone();
1431 let dot_git_path = self.dot_git_path.clone();
1432 async move {
1433 let (graph_commits, simulated_error) =
1434 fs.with_git_state(&dot_git_path, false, |state| {
1435 (
1436 state.graph_commits.clone(),
1437 state.simulated_graph_error.clone(),
1438 )
1439 })?;
1440
1441 if let Some(error) = simulated_error {
1442 anyhow::bail!("{}", error);
1443 }
1444
1445 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
1446 request_tx.send(chunk.to_vec()).await.ok();
1447 }
1448 Ok(())
1449 }
1450 .boxed()
1451 }
1452
1453 fn search_commits(
1454 &self,
1455 _log_source: LogSource,
1456 _search_args: SearchCommitArgs,
1457 _request_tx: Sender<Oid>,
1458 ) -> BoxFuture<'_, Result<()>> {
1459 async { bail!("search_commits not supported for FakeGitRepository") }.boxed()
1460 }
1461
1462 fn commit_data_reader(&self) -> Result<CommitDataReader> {
1463 let fs = self.fs.clone();
1464 let dot_git_path = self.dot_git_path.clone();
1465 let executor = self.executor.clone();
1466 Ok(CommitDataReader::for_test(executor, move |sha| {
1467 fs.with_git_state(&dot_git_path, false, |state| {
1468 let commit = state
1469 .commit_data
1470 .get(&sha)
1471 .context(format!("graph commit data not found for {sha}"))?;
1472
1473 match commit {
1474 FakeCommitDataEntry::Success(data) => Ok(data.clone()),
1475 FakeCommitDataEntry::Fail(_) => {
1476 bail!("simulated commit data read failure for {sha}")
1477 }
1478 }
1479 })?
1480 }))
1481 }
1482
1483 fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> {
1484 self.edit_ref(RefEdit::Update { ref_name, commit })
1485 }
1486
1487 fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> {
1488 self.edit_ref(RefEdit::Delete { ref_name })
1489 }
1490
1491 fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> {
1492 async { Ok(()) }.boxed()
1493 }
1494
1495 fn set_trusted(&self, trusted: bool) {
1496 self.is_trusted
1497 .store(trusted, std::sync::atomic::Ordering::Release);
1498 }
1499
1500 fn is_trusted(&self) -> bool {
1501 self.is_trusted.load(std::sync::atomic::Ordering::Acquire)
1502 }
1503}