1use std::path::Path;
2
3use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
4use anyhow::{Context as _, Result, bail};
5use async_channel::Sender;
6use collections::{HashMap, HashSet};
7use futures::FutureExt as _;
8use futures::future::{self, BoxFuture, join_all};
9use git::repository::GitCommitTemplate;
10use git::{
11 Oid, RunHook,
12 blame::Blame,
13 repository::{
14 AskPassDelegate, Branch, CommitData, CommitDataReader, CommitDetails, CommitOptions,
15 CreateWorktreeTarget, FetchOptions, GRAPH_CHUNK_SIZE, GitRepository,
16 GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, RefEdit,
17 Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree,
18 },
19 stash::GitStash,
20 status::{
21 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
22 UnmergedStatus,
23 },
24};
25use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
26use ignore::gitignore::GitignoreBuilder;
27use parking_lot::Mutex;
28use rope::Rope;
29use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
30use text::LineEnding;
31use util::{paths::PathStyle, rel_path::RelPath};
32
33#[derive(Clone)]
34pub struct FakeGitRepository {
35 pub(crate) fs: Arc<FakeFs>,
36 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
37 pub(crate) executor: BackgroundExecutor,
38 pub(crate) dot_git_path: PathBuf,
39 pub(crate) repository_dir_path: PathBuf,
40 pub(crate) common_dir_path: PathBuf,
41 pub(crate) is_trusted: Arc<AtomicBool>,
42}
43
44#[derive(Debug, Clone)]
45pub struct FakeCommitSnapshot {
46 pub head_contents: HashMap<RepoPath, String>,
47 pub index_contents: HashMap<RepoPath, String>,
48 pub sha: String,
49}
50
51#[derive(Debug, Clone)]
52pub enum FakeCommitDataEntry {
53 Success(CommitData),
54 Fail(CommitData),
55}
56
57#[derive(Debug, Clone)]
58pub struct FakeGitRepositoryState {
59 pub commit_history: Vec<FakeCommitSnapshot>,
60 pub event_emitter: async_channel::Sender<PathBuf>,
61 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
62 pub head_contents: HashMap<RepoPath, String>,
63 pub index_contents: HashMap<RepoPath, String>,
64 // everything in commit contents is in oids
65 pub merge_base_contents: HashMap<RepoPath, Oid>,
66 pub oids: HashMap<Oid, String>,
67 pub blames: HashMap<RepoPath, Blame>,
68 pub current_branch_name: Option<String>,
69 pub branches: HashSet<String>,
70 /// List of remotes, keys are names and values are URLs
71 pub remotes: HashMap<String, String>,
72 pub simulated_index_write_error_message: Option<String>,
73 pub simulated_create_worktree_error: Option<String>,
74 pub simulated_graph_error: Option<String>,
75 pub refs: HashMap<String, String>,
76 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
77 pub commit_data: HashMap<Oid, FakeCommitDataEntry>,
78 pub stash_entries: GitStash,
79}
80
81impl FakeGitRepositoryState {
82 pub fn new(event_emitter: async_channel::Sender<PathBuf>) -> Self {
83 FakeGitRepositoryState {
84 event_emitter,
85 head_contents: Default::default(),
86 index_contents: Default::default(),
87 unmerged_paths: Default::default(),
88 blames: Default::default(),
89 current_branch_name: Default::default(),
90 branches: Default::default(),
91 simulated_index_write_error_message: Default::default(),
92 simulated_create_worktree_error: Default::default(),
93 simulated_graph_error: None,
94 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
95 merge_base_contents: Default::default(),
96 oids: Default::default(),
97 remotes: HashMap::default(),
98 graph_commits: Vec::new(),
99 commit_data: Default::default(),
100 commit_history: Vec::new(),
101 stash_entries: Default::default(),
102 }
103 }
104}
105
106impl FakeGitRepository {
107 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
108 where
109 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
110 T: Send,
111 {
112 let fs = self.fs.clone();
113 let executor = self.executor.clone();
114 let dot_git_path = self.dot_git_path.clone();
115 async move {
116 executor.simulate_random_delay().await;
117 fs.with_git_state(&dot_git_path, write, f)?
118 }
119 .boxed()
120 }
121
122 fn edit_ref(&self, edit: RefEdit) -> BoxFuture<'_, Result<()>> {
123 self.with_state_async(true, move |state| {
124 match edit {
125 RefEdit::Update { ref_name, commit } => {
126 state.refs.insert(ref_name, commit);
127 }
128 RefEdit::Delete { ref_name } => {
129 state.refs.remove(&ref_name);
130 }
131 }
132 Ok(())
133 })
134 }
135
136 /// Scans `.git/worktrees/*/gitdir` to find the admin entry directory for a
137 /// worktree at the given checkout path. Used when the working tree directory
138 /// has already been deleted and we can't read its `.git` pointer file.
139 async fn find_worktree_entry_dir_by_path(&self, path: &Path) -> Option<PathBuf> {
140 use futures::StreamExt;
141
142 let worktrees_dir = self.common_dir_path.join("worktrees");
143 let mut entries = self.fs.read_dir(&worktrees_dir).await.ok()?;
144 while let Some(Ok(entry_path)) = entries.next().await {
145 if let Ok(gitdir_content) = self.fs.load(&entry_path.join("gitdir")).await {
146 let worktree_path = PathBuf::from(gitdir_content.trim())
147 .parent()
148 .map(PathBuf::from)
149 .unwrap_or_default();
150 if worktree_path == path {
151 return Some(entry_path);
152 }
153 }
154 }
155 None
156 }
157}
158
159impl GitRepository for FakeGitRepository {
160 fn reload_index(&self) {}
161
162 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
163 let fut = self.with_state_async(false, move |state| {
164 state
165 .index_contents
166 .get(&path)
167 .context("not present in index")
168 .cloned()
169 });
170 self.executor.spawn(async move { fut.await.ok() }).boxed()
171 }
172
173 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
174 let fut = self.with_state_async(false, move |state| {
175 state
176 .head_contents
177 .get(&path)
178 .context("not present in HEAD")
179 .cloned()
180 });
181 self.executor.spawn(async move { fut.await.ok() }).boxed()
182 }
183
184 fn load_commit_template(&self) -> BoxFuture<'_, Result<Option<GitCommitTemplate>>> {
185 async { Ok(None) }.boxed()
186 }
187
188 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
189 self.with_state_async(false, move |state| {
190 state.oids.get(&oid).cloned().context("oid does not exist")
191 })
192 .boxed()
193 }
194
195 fn load_commit(
196 &self,
197 _commit: String,
198 _cx: AsyncApp,
199 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
200 async { Ok(git::repository::CommitDiff { files: Vec::new() }) }.boxed()
201 }
202
203 fn set_index_text(
204 &self,
205 path: RepoPath,
206 content: Option<String>,
207 _env: Arc<HashMap<String, String>>,
208 _is_executable: bool,
209 ) -> BoxFuture<'_, anyhow::Result<()>> {
210 self.with_state_async(true, move |state| {
211 if let Some(message) = &state.simulated_index_write_error_message {
212 anyhow::bail!("{message}");
213 } else if let Some(content) = content {
214 state.index_contents.insert(path, content);
215 } else {
216 state.index_contents.remove(&path);
217 }
218 Ok(())
219 })
220 }
221
222 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
223 let name = name.to_string();
224 let fut = self.with_state_async(false, move |state| {
225 state
226 .remotes
227 .get(&name)
228 .context("remote not found")
229 .cloned()
230 });
231 async move { fut.await.ok() }.boxed()
232 }
233
234 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
235 let mut entries = HashMap::default();
236 self.with_state_async(false, |state| {
237 for (path, content) in &state.head_contents {
238 let status = if let Some((oid, original)) = state
239 .merge_base_contents
240 .get(path)
241 .map(|oid| (oid, &state.oids[oid]))
242 {
243 if original == content {
244 continue;
245 }
246 TreeDiffStatus::Modified { old: *oid }
247 } else {
248 TreeDiffStatus::Added
249 };
250 entries.insert(path.clone(), status);
251 }
252 for (path, oid) in &state.merge_base_contents {
253 if !entries.contains_key(path) {
254 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
255 }
256 }
257 Ok(TreeDiff { entries })
258 })
259 .boxed()
260 }
261
262 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
263 self.with_state_async(false, |state| {
264 Ok(revs
265 .into_iter()
266 .map(|rev| state.refs.get(&rev).cloned())
267 .collect())
268 })
269 }
270
271 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
272 async {
273 Ok(CommitDetails {
274 sha: commit.into(),
275 message: "initial commit".into(),
276 ..Default::default()
277 })
278 }
279 .boxed()
280 }
281
282 fn reset(
283 &self,
284 commit: String,
285 mode: ResetMode,
286 _env: Arc<HashMap<String, String>>,
287 ) -> BoxFuture<'_, Result<()>> {
288 self.with_state_async(true, move |state| {
289 let pop_count = if commit == "HEAD~" || commit == "HEAD^" {
290 1
291 } else if let Some(suffix) = commit.strip_prefix("HEAD~") {
292 suffix
293 .parse::<usize>()
294 .with_context(|| format!("Invalid HEAD~ offset: {commit}"))?
295 } else {
296 match state
297 .commit_history
298 .iter()
299 .rposition(|entry| entry.sha == commit)
300 {
301 Some(index) => state.commit_history.len() - index,
302 None => anyhow::bail!("Unknown commit ref: {commit}"),
303 }
304 };
305
306 if pop_count == 0 || pop_count > state.commit_history.len() {
307 anyhow::bail!(
308 "Cannot reset {pop_count} commit(s): only {} in history",
309 state.commit_history.len()
310 );
311 }
312
313 let target_index = state.commit_history.len() - pop_count;
314 let snapshot = state.commit_history[target_index].clone();
315 state.commit_history.truncate(target_index);
316
317 match mode {
318 ResetMode::Soft => {
319 state.head_contents = snapshot.head_contents;
320 }
321 ResetMode::Mixed => {
322 state.head_contents = snapshot.head_contents;
323 state.index_contents = state.head_contents.clone();
324 }
325 }
326
327 state.refs.insert("HEAD".into(), snapshot.sha);
328 Ok(())
329 })
330 }
331
332 fn checkout_files(
333 &self,
334 _commit: String,
335 _paths: Vec<RepoPath>,
336 _env: Arc<HashMap<String, String>>,
337 ) -> BoxFuture<'_, Result<()>> {
338 unimplemented!()
339 }
340
341 fn path(&self) -> PathBuf {
342 self.repository_dir_path.clone()
343 }
344
345 fn main_repository_path(&self) -> PathBuf {
346 self.common_dir_path.clone()
347 }
348
349 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
350 async move { None }.boxed()
351 }
352
353 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
354 let workdir_path = self.dot_git_path.parent().unwrap();
355
356 // Load gitignores
357 let ignores = workdir_path
358 .ancestors()
359 .filter_map(|dir| {
360 let ignore_path = dir.join(".gitignore");
361 let content = self.fs.read_file_sync(ignore_path).ok()?;
362 let content = String::from_utf8(content).ok()?;
363 let mut builder = GitignoreBuilder::new(dir);
364 for line in content.lines() {
365 builder.add_line(Some(dir.into()), line).ok()?;
366 }
367 builder.build().ok()
368 })
369 .collect::<Vec<_>>();
370
371 // Load working copy files.
372 let git_files: HashMap<RepoPath, (String, bool)> = self
373 .fs
374 .files()
375 .iter()
376 .filter_map(|path| {
377 // TODO better simulate git status output in the case of submodules and worktrees
378 let repo_path = path.strip_prefix(workdir_path).ok()?;
379 let mut is_ignored = repo_path.starts_with(".git");
380 for ignore in &ignores {
381 match ignore.matched_path_or_any_parents(path, false) {
382 ignore::Match::None => {}
383 ignore::Match::Ignore(_) => is_ignored = true,
384 ignore::Match::Whitelist(_) => break,
385 }
386 }
387 let content = self
388 .fs
389 .read_file_sync(path)
390 .ok()
391 .map(|content| String::from_utf8(content).unwrap())?;
392 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
393 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
394 })
395 .collect();
396
397 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
398 let mut entries = Vec::new();
399 let paths = state
400 .head_contents
401 .keys()
402 .chain(state.index_contents.keys())
403 .chain(git_files.keys())
404 .collect::<HashSet<_>>();
405 for path in paths {
406 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
407 continue;
408 }
409
410 let head = state.head_contents.get(path);
411 let index = state.index_contents.get(path);
412 let unmerged = state.unmerged_paths.get(path);
413 let fs = git_files.get(path);
414 let status = match (unmerged, head, index, fs) {
415 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
416 (_, Some(head), Some(index), Some((fs, _))) => {
417 FileStatus::Tracked(TrackedStatus {
418 index_status: if head == index {
419 StatusCode::Unmodified
420 } else {
421 StatusCode::Modified
422 },
423 worktree_status: if fs == index {
424 StatusCode::Unmodified
425 } else {
426 StatusCode::Modified
427 },
428 })
429 }
430 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
431 index_status: if head == index {
432 StatusCode::Unmodified
433 } else {
434 StatusCode::Modified
435 },
436 worktree_status: StatusCode::Deleted,
437 }),
438 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
439 index_status: StatusCode::Deleted,
440 worktree_status: StatusCode::Added,
441 }),
442 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
443 index_status: StatusCode::Deleted,
444 worktree_status: StatusCode::Deleted,
445 }),
446 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
447 index_status: StatusCode::Added,
448 worktree_status: if fs == index {
449 StatusCode::Unmodified
450 } else {
451 StatusCode::Modified
452 },
453 }),
454 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
455 index_status: StatusCode::Added,
456 worktree_status: StatusCode::Deleted,
457 }),
458 (_, None, None, Some((_, is_ignored))) => {
459 if *is_ignored {
460 continue;
461 }
462 FileStatus::Untracked
463 }
464 (_, None, None, None) => {
465 unreachable!();
466 }
467 };
468 if status
469 != FileStatus::Tracked(TrackedStatus {
470 index_status: StatusCode::Unmodified,
471 worktree_status: StatusCode::Unmodified,
472 })
473 {
474 entries.push((path.clone(), status));
475 }
476 }
477 entries.sort_by(|a, b| a.0.cmp(&b.0));
478 anyhow::Ok(GitStatus {
479 entries: entries.into(),
480 })
481 });
482 Task::ready(match result {
483 Ok(result) => result,
484 Err(e) => Err(e),
485 })
486 }
487
488 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
489 self.with_state_async(false, |state| Ok(state.stash_entries.clone()))
490 }
491
492 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
493 self.with_state_async(false, move |state| {
494 let current_branch = &state.current_branch_name;
495 let mut branches = state
496 .branches
497 .iter()
498 .map(|branch_name| {
499 let ref_name = if branch_name.starts_with("refs/") {
500 branch_name.into()
501 } else if branch_name.contains('/') {
502 format!("refs/remotes/{branch_name}").into()
503 } else {
504 format!("refs/heads/{branch_name}").into()
505 };
506 Branch {
507 is_head: Some(branch_name) == current_branch.as_ref(),
508 ref_name,
509 most_recent_commit: None,
510 upstream: None,
511 }
512 })
513 .collect::<Vec<_>>();
514 // compute snapshot expects these to be sorted by ref_name
515 // because that's what git itself does
516 branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name));
517 Ok(branches)
518 })
519 }
520
521 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
522 let fs = self.fs.clone();
523 let common_dir_path = self.common_dir_path.clone();
524 let executor = self.executor.clone();
525
526 async move {
527 executor.simulate_random_delay().await;
528
529 let (main_worktree, refs) = fs.with_git_state(&common_dir_path, false, |state| {
530 let work_dir = common_dir_path
531 .parent()
532 .map(PathBuf::from)
533 .unwrap_or_else(|| common_dir_path.clone());
534 let head_sha = state
535 .refs
536 .get("HEAD")
537 .cloned()
538 .unwrap_or_else(|| "0000000".to_string());
539 let branch_ref = state
540 .current_branch_name
541 .as_ref()
542 .map(|name| format!("refs/heads/{name}"))
543 .unwrap_or_else(|| "refs/heads/main".to_string());
544 let main_wt = Worktree {
545 path: work_dir,
546 ref_name: Some(branch_ref.into()),
547 sha: head_sha.into(),
548 is_main: true,
549 is_bare: false,
550 };
551 (main_wt, state.refs.clone())
552 })?;
553
554 let mut all = vec![main_worktree];
555
556 let worktrees_dir = common_dir_path.join("worktrees");
557 if let Ok(mut entries) = fs.read_dir(&worktrees_dir).await {
558 use futures::StreamExt;
559 while let Some(Ok(entry_path)) = entries.next().await {
560 let head_content = match fs.load(&entry_path.join("HEAD")).await {
561 Ok(content) => content,
562 Err(_) => continue,
563 };
564 let gitdir_content = match fs.load(&entry_path.join("gitdir")).await {
565 Ok(content) => content,
566 Err(_) => continue,
567 };
568
569 let ref_name = head_content
570 .strip_prefix("ref: ")
571 .map(|s| s.trim().to_string());
572 let sha = ref_name
573 .as_ref()
574 .and_then(|r| refs.get(r))
575 .cloned()
576 .unwrap_or_else(|| head_content.trim().to_string());
577
578 let worktree_path = PathBuf::from(gitdir_content.trim())
579 .parent()
580 .map(PathBuf::from)
581 .unwrap_or_default();
582
583 all.push(Worktree {
584 path: worktree_path,
585 ref_name: ref_name.map(Into::into),
586 sha: sha.into(),
587 is_main: false,
588 is_bare: false,
589 });
590 }
591 }
592
593 Ok(all)
594 }
595 .boxed()
596 }
597
598 fn create_worktree(
599 &self,
600 target: CreateWorktreeTarget,
601 path: PathBuf,
602 ) -> BoxFuture<'_, Result<()>> {
603 let fs = self.fs.clone();
604 let executor = self.executor.clone();
605 let dot_git_path = self.dot_git_path.clone();
606 let common_dir_path = self.common_dir_path.clone();
607 async move {
608 executor.simulate_random_delay().await;
609
610 let branch_name = target.branch_name().map(ToOwned::to_owned);
611 let create_branch_ref = matches!(target, CreateWorktreeTarget::NewBranch { .. });
612
613 // Check for simulated error and validate branch state before any side effects.
614 fs.with_git_state(&dot_git_path, false, {
615 let branch_name = branch_name.clone();
616 move |state| {
617 if let Some(message) = &state.simulated_create_worktree_error {
618 anyhow::bail!("{message}");
619 }
620
621 match (create_branch_ref, branch_name.as_ref()) {
622 (true, Some(branch_name)) => {
623 if state.branches.contains(branch_name) {
624 bail!("a branch named '{}' already exists", branch_name);
625 }
626 }
627 (false, Some(branch_name)) => {
628 if !state.branches.contains(branch_name) {
629 bail!("no branch named '{}' exists", branch_name);
630 }
631 }
632 (false, None) => {}
633 (true, None) => bail!("branch name is required to create a branch"),
634 }
635
636 Ok(())
637 }
638 })??;
639
640 let (branch_name, sha, create_branch_ref) = match target {
641 CreateWorktreeTarget::ExistingBranch { branch_name } => {
642 let ref_name = format!("refs/heads/{branch_name}");
643 let sha = fs.with_git_state(&dot_git_path, false, {
644 move |state| {
645 Ok::<_, anyhow::Error>(
646 state
647 .refs
648 .get(&ref_name)
649 .cloned()
650 .unwrap_or_else(|| "fake-sha".to_string()),
651 )
652 }
653 })??;
654 (Some(branch_name), sha, false)
655 }
656 CreateWorktreeTarget::NewBranch {
657 branch_name,
658 base_sha: start_point,
659 } => (
660 Some(branch_name),
661 start_point.unwrap_or_else(|| "fake-sha".to_string()),
662 true,
663 ),
664 CreateWorktreeTarget::Detached {
665 base_sha: start_point,
666 } => (
667 None,
668 start_point.unwrap_or_else(|| "fake-sha".to_string()),
669 false,
670 ),
671 };
672
673 // Create the worktree checkout directory.
674 fs.create_dir(&path).await?;
675
676 // Create .git/worktrees/<name>/ directory with HEAD, commondir, gitdir.
677 let worktree_entry_name = branch_name.as_deref().unwrap_or_else(|| {
678 path.file_name()
679 .and_then(|name| name.to_str())
680 .unwrap_or("detached")
681 });
682 let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name);
683 fs.create_dir(&worktrees_entry_dir).await?;
684
685 let head_content = if let Some(ref branch_name) = branch_name {
686 let ref_name = format!("refs/heads/{branch_name}");
687 format!("ref: {ref_name}")
688 } else {
689 sha.clone()
690 };
691 fs.write_file_internal(
692 worktrees_entry_dir.join("HEAD"),
693 head_content.into_bytes(),
694 false,
695 )?;
696 fs.write_file_internal(
697 worktrees_entry_dir.join("commondir"),
698 common_dir_path.to_string_lossy().into_owned().into_bytes(),
699 false,
700 )?;
701 let worktree_dot_git = path.join(".git");
702 fs.write_file_internal(
703 worktrees_entry_dir.join("gitdir"),
704 worktree_dot_git.to_string_lossy().into_owned().into_bytes(),
705 false,
706 )?;
707
708 // Create .git file in the worktree checkout.
709 fs.write_file_internal(
710 &worktree_dot_git,
711 format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(),
712 false,
713 )?;
714
715 // Update git state for newly created branches.
716 if create_branch_ref {
717 fs.with_git_state(&dot_git_path, true, {
718 let branch_name = branch_name.clone();
719 let sha = sha.clone();
720 move |state| {
721 if let Some(branch_name) = branch_name {
722 let ref_name = format!("refs/heads/{branch_name}");
723 state.refs.insert(ref_name, sha);
724 state.branches.insert(branch_name);
725 }
726 Ok::<(), anyhow::Error>(())
727 }
728 })??;
729 }
730
731 Ok(())
732 }
733 .boxed()
734 }
735
736 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
737 let fs = self.fs.clone();
738 let executor = self.executor.clone();
739 let common_dir_path = self.common_dir_path.clone();
740 async move {
741 executor.simulate_random_delay().await;
742
743 // Try to read the worktree's .git file to find its entry
744 // directory. If the working tree is already gone (e.g. the
745 // caller deleted it before asking git to clean up), fall back
746 // to scanning `.git/worktrees/*/gitdir` for a matching path,
747 // mirroring real git's behavior with `--force`.
748 let dot_git_file = path.join(".git");
749 let worktree_entry_dir = if let Ok(content) = fs.load(&dot_git_file).await {
750 let gitdir = content
751 .strip_prefix("gitdir:")
752 .context("invalid .git file in worktree")?
753 .trim();
754 PathBuf::from(gitdir)
755 } else {
756 self.find_worktree_entry_dir_by_path(&path)
757 .await
758 .with_context(|| format!("no worktree found at path: {}", path.display()))?
759 };
760
761 // Remove the worktree checkout directory if it still exists.
762 fs.remove_dir(
763 &path,
764 RemoveOptions {
765 recursive: true,
766 ignore_if_not_exists: true,
767 },
768 )
769 .await?;
770
771 // Remove the .git/worktrees/<name>/ directory.
772 fs.remove_dir(
773 &worktree_entry_dir,
774 RemoveOptions {
775 recursive: true,
776 ignore_if_not_exists: false,
777 },
778 )
779 .await?;
780
781 // Emit a git event on the main .git directory so the scanner
782 // notices the change.
783 fs.with_git_state(&common_dir_path, true, |_| {})?;
784
785 Ok(())
786 }
787 .boxed()
788 }
789
790 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
791 let fs = self.fs.clone();
792 let executor = self.executor.clone();
793 let common_dir_path = self.common_dir_path.clone();
794 async move {
795 executor.simulate_random_delay().await;
796
797 // Read the worktree's .git file to find its entry directory.
798 let dot_git_file = old_path.join(".git");
799 let content = fs
800 .load(&dot_git_file)
801 .await
802 .with_context(|| format!("no worktree found at path: {}", old_path.display()))?;
803 let gitdir = content
804 .strip_prefix("gitdir:")
805 .context("invalid .git file in worktree")?
806 .trim();
807 let worktree_entry_dir = PathBuf::from(gitdir);
808
809 // Move the worktree checkout directory.
810 fs.rename(
811 &old_path,
812 &new_path,
813 RenameOptions {
814 overwrite: false,
815 ignore_if_exists: false,
816 create_parents: true,
817 },
818 )
819 .await?;
820
821 // Update the gitdir file in .git/worktrees/<name>/ to point to the
822 // new location.
823 let new_dot_git = new_path.join(".git");
824 fs.write_file_internal(
825 worktree_entry_dir.join("gitdir"),
826 new_dot_git.to_string_lossy().into_owned().into_bytes(),
827 false,
828 )?;
829
830 // Update the .git file in the moved worktree checkout.
831 fs.write_file_internal(
832 &new_dot_git,
833 format!("gitdir: {}", worktree_entry_dir.display()).into_bytes(),
834 false,
835 )?;
836
837 // Emit a git event on the main .git directory so the scanner
838 // notices the change.
839 fs.with_git_state(&common_dir_path, true, |_| {})?;
840
841 Ok(())
842 }
843 .boxed()
844 }
845
846 fn checkout_branch_in_worktree(
847 &self,
848 _branch_name: String,
849 _worktree_path: PathBuf,
850 _create: bool,
851 ) -> BoxFuture<'_, Result<()>> {
852 async { Ok(()) }.boxed()
853 }
854
855 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
856 self.with_state_async(true, |state| {
857 state.current_branch_name = Some(name);
858 Ok(())
859 })
860 }
861
862 fn create_branch(
863 &self,
864 name: String,
865 _base_branch: Option<String>,
866 ) -> BoxFuture<'_, Result<()>> {
867 self.with_state_async(true, move |state| {
868 if let Some((remote, _)) = name.split_once('/')
869 && !state.remotes.contains_key(remote)
870 {
871 state.remotes.insert(remote.to_owned(), "".to_owned());
872 }
873 state.branches.insert(name);
874 Ok(())
875 })
876 }
877
878 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
879 self.with_state_async(true, move |state| {
880 if !state.branches.remove(&branch) {
881 bail!("no such branch: {branch}");
882 }
883 state.branches.insert(new_name.clone());
884 if state.current_branch_name == Some(branch) {
885 state.current_branch_name = Some(new_name);
886 }
887 Ok(())
888 })
889 }
890
891 fn delete_branch(&self, _is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> {
892 self.with_state_async(true, move |state| {
893 if !state.branches.remove(&name) {
894 bail!("no such branch: {name}");
895 }
896 Ok(())
897 })
898 }
899
900 fn blame(
901 &self,
902 path: RepoPath,
903 _content: Rope,
904 _line_ending: LineEnding,
905 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
906 self.with_state_async(false, move |state| {
907 state
908 .blames
909 .get(&path)
910 .with_context(|| format!("failed to get blame for {:?}", path))
911 .cloned()
912 })
913 }
914
915 fn stage_paths(
916 &self,
917 paths: Vec<RepoPath>,
918 _env: Arc<HashMap<String, String>>,
919 ) -> BoxFuture<'_, Result<()>> {
920 Box::pin(async move {
921 let contents = paths
922 .into_iter()
923 .map(|path| {
924 let abs_path = self
925 .dot_git_path
926 .parent()
927 .unwrap()
928 .join(&path.as_std_path());
929 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
930 })
931 .collect::<Vec<_>>();
932 let contents = join_all(contents).await;
933 self.with_state_async(true, move |state| {
934 for (path, content) in contents {
935 if let Some(content) = content {
936 state.index_contents.insert(path, content);
937 } else {
938 state.index_contents.remove(&path);
939 }
940 }
941 Ok(())
942 })
943 .await
944 })
945 }
946
947 fn unstage_paths(
948 &self,
949 paths: Vec<RepoPath>,
950 _env: Arc<HashMap<String, String>>,
951 ) -> BoxFuture<'_, Result<()>> {
952 self.with_state_async(true, move |state| {
953 for path in paths {
954 match state.head_contents.get(&path) {
955 Some(content) => state.index_contents.insert(path, content.clone()),
956 None => state.index_contents.remove(&path),
957 };
958 }
959 Ok(())
960 })
961 }
962
963 fn stash_paths(
964 &self,
965 _paths: Vec<RepoPath>,
966 _env: Arc<HashMap<String, String>>,
967 ) -> BoxFuture<'_, Result<()>> {
968 unimplemented!()
969 }
970
971 fn stash_pop(
972 &self,
973 _index: Option<usize>,
974 _env: Arc<HashMap<String, String>>,
975 ) -> BoxFuture<'_, Result<()>> {
976 unimplemented!()
977 }
978
979 fn stash_apply(
980 &self,
981 _index: Option<usize>,
982 _env: Arc<HashMap<String, String>>,
983 ) -> BoxFuture<'_, Result<()>> {
984 unimplemented!()
985 }
986
987 fn stash_drop(
988 &self,
989 _index: Option<usize>,
990 _env: Arc<HashMap<String, String>>,
991 ) -> BoxFuture<'_, Result<()>> {
992 unimplemented!()
993 }
994
995 fn commit(
996 &self,
997 _message: gpui::SharedString,
998 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
999 options: CommitOptions,
1000 _askpass: AskPassDelegate,
1001 _env: Arc<HashMap<String, String>>,
1002 ) -> BoxFuture<'_, Result<()>> {
1003 self.with_state_async(true, move |state| {
1004 if !options.allow_empty && !options.amend && state.index_contents == state.head_contents
1005 {
1006 anyhow::bail!("nothing to commit (use allow_empty to create an empty commit)");
1007 }
1008
1009 let old_sha = state.refs.get("HEAD").cloned().unwrap_or_default();
1010 state.commit_history.push(FakeCommitSnapshot {
1011 head_contents: state.head_contents.clone(),
1012 index_contents: state.index_contents.clone(),
1013 sha: old_sha,
1014 });
1015
1016 state.head_contents = state.index_contents.clone();
1017
1018 let new_sha = format!("fake-commit-{}", state.commit_history.len());
1019 state.refs.insert("HEAD".into(), new_sha);
1020
1021 Ok(())
1022 })
1023 }
1024
1025 fn run_hook(
1026 &self,
1027 _hook: RunHook,
1028 _env: Arc<HashMap<String, String>>,
1029 ) -> BoxFuture<'_, Result<()>> {
1030 async { Ok(()) }.boxed()
1031 }
1032
1033 fn push(
1034 &self,
1035 _branch: String,
1036 _remote_branch: String,
1037 _remote: String,
1038 _options: Option<PushOptions>,
1039 _askpass: AskPassDelegate,
1040 _env: Arc<HashMap<String, String>>,
1041 _cx: AsyncApp,
1042 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1043 unimplemented!()
1044 }
1045
1046 fn pull(
1047 &self,
1048 _branch: Option<String>,
1049 _remote: String,
1050 _rebase: bool,
1051 _askpass: AskPassDelegate,
1052 _env: Arc<HashMap<String, String>>,
1053 _cx: AsyncApp,
1054 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1055 unimplemented!()
1056 }
1057
1058 fn fetch(
1059 &self,
1060 _fetch_options: FetchOptions,
1061 _askpass: AskPassDelegate,
1062 _env: Arc<HashMap<String, String>>,
1063 _cx: AsyncApp,
1064 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1065 unimplemented!()
1066 }
1067
1068 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
1069 self.with_state_async(false, move |state| {
1070 let remotes = state
1071 .remotes
1072 .keys()
1073 .map(|r| Remote {
1074 name: r.clone().into(),
1075 })
1076 .collect::<Vec<_>>();
1077 Ok(remotes)
1078 })
1079 }
1080
1081 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1082 unimplemented!()
1083 }
1084
1085 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1086 unimplemented!()
1087 }
1088
1089 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
1090 future::ready(Ok(Vec::new())).boxed()
1091 }
1092
1093 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
1094 future::ready(Ok(String::new())).boxed()
1095 }
1096
1097 fn diff_stat(
1098 &self,
1099 path_prefixes: &[RepoPath],
1100 ) -> BoxFuture<'_, Result<git::status::GitDiffStat>> {
1101 fn count_lines(s: &str) -> u32 {
1102 if s.is_empty() {
1103 0
1104 } else {
1105 s.lines().count() as u32
1106 }
1107 }
1108
1109 fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool {
1110 if prefixes.is_empty() {
1111 return true;
1112 }
1113 prefixes.iter().any(|prefix| {
1114 let prefix_str = prefix.as_unix_str();
1115 if prefix_str == "." {
1116 return true;
1117 }
1118 path == prefix || path.starts_with(&prefix)
1119 })
1120 }
1121
1122 let path_prefixes = path_prefixes.to_vec();
1123
1124 let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
1125 let worktree_files: HashMap<RepoPath, String> = self
1126 .fs
1127 .files()
1128 .iter()
1129 .filter_map(|path| {
1130 let repo_path = path.strip_prefix(&workdir_path).ok()?;
1131 if repo_path.starts_with(".git") {
1132 return None;
1133 }
1134 let content = self
1135 .fs
1136 .read_file_sync(path)
1137 .ok()
1138 .and_then(|bytes| String::from_utf8(bytes).ok())?;
1139 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
1140 Some((RepoPath::from_rel_path(&repo_path), content))
1141 })
1142 .collect();
1143
1144 self.with_state_async(false, move |state| {
1145 let mut entries = Vec::new();
1146 let all_paths: HashSet<&RepoPath> = state
1147 .head_contents
1148 .keys()
1149 .chain(
1150 worktree_files
1151 .keys()
1152 .filter(|p| state.index_contents.contains_key(*p)),
1153 )
1154 .collect();
1155 for path in all_paths {
1156 if !matches_prefixes(path, &path_prefixes) {
1157 continue;
1158 }
1159 let head = state.head_contents.get(path);
1160 let worktree = worktree_files.get(path);
1161 match (head, worktree) {
1162 (Some(old), Some(new)) if old != new => {
1163 entries.push((
1164 path.clone(),
1165 git::status::DiffStat {
1166 added: count_lines(new),
1167 deleted: count_lines(old),
1168 },
1169 ));
1170 }
1171 (Some(old), None) => {
1172 entries.push((
1173 path.clone(),
1174 git::status::DiffStat {
1175 added: 0,
1176 deleted: count_lines(old),
1177 },
1178 ));
1179 }
1180 (None, Some(new)) => {
1181 entries.push((
1182 path.clone(),
1183 git::status::DiffStat {
1184 added: count_lines(new),
1185 deleted: 0,
1186 },
1187 ));
1188 }
1189 _ => {}
1190 }
1191 }
1192 entries.sort_by(|(a, _), (b, _)| a.cmp(b));
1193 Ok(git::status::GitDiffStat {
1194 entries: entries.into(),
1195 })
1196 })
1197 .boxed()
1198 }
1199
1200 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
1201 let executor = self.executor.clone();
1202 let fs = self.fs.clone();
1203 let checkpoints = self.checkpoints.clone();
1204 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1205 async move {
1206 executor.simulate_random_delay().await;
1207 let oid = git::Oid::random(&mut *executor.rng().lock());
1208 let entry = fs.entry(&repository_dir_path)?;
1209 checkpoints.lock().insert(oid, entry);
1210 Ok(GitRepositoryCheckpoint { commit_sha: oid })
1211 }
1212 .boxed()
1213 }
1214
1215 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
1216 let executor = self.executor.clone();
1217 let fs = self.fs.clone();
1218 let checkpoints = self.checkpoints.clone();
1219 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1220 async move {
1221 executor.simulate_random_delay().await;
1222 let checkpoints = checkpoints.lock();
1223 let entry = checkpoints
1224 .get(&checkpoint.commit_sha)
1225 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
1226 fs.insert_entry(&repository_dir_path, entry.clone())?;
1227 Ok(())
1228 }
1229 .boxed()
1230 }
1231
1232 fn create_archive_checkpoint(&self) -> BoxFuture<'_, Result<(String, String)>> {
1233 let executor = self.executor.clone();
1234 let fs = self.fs.clone();
1235 let checkpoints = self.checkpoints.clone();
1236 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1237 async move {
1238 executor.simulate_random_delay().await;
1239 let staged_oid = git::Oid::random(&mut *executor.rng().lock());
1240 let unstaged_oid = git::Oid::random(&mut *executor.rng().lock());
1241 let entry = fs.entry(&repository_dir_path)?;
1242 checkpoints.lock().insert(staged_oid, entry.clone());
1243 checkpoints.lock().insert(unstaged_oid, entry);
1244 Ok((staged_oid.to_string(), unstaged_oid.to_string()))
1245 }
1246 .boxed()
1247 }
1248
1249 fn restore_archive_checkpoint(
1250 &self,
1251 // The fake filesystem doesn't model a separate index, so only the
1252 // unstaged (full working directory) snapshot is restored.
1253 _staged_sha: String,
1254 unstaged_sha: String,
1255 ) -> BoxFuture<'_, Result<()>> {
1256 match unstaged_sha.parse() {
1257 Ok(commit_sha) => self.restore_checkpoint(GitRepositoryCheckpoint { commit_sha }),
1258 Err(error) => async move {
1259 Err(anyhow::anyhow!(error).context("failed to parse unstaged SHA as Oid"))
1260 }
1261 .boxed(),
1262 }
1263 }
1264
1265 fn compare_checkpoints(
1266 &self,
1267 left: GitRepositoryCheckpoint,
1268 right: GitRepositoryCheckpoint,
1269 ) -> BoxFuture<'_, Result<bool>> {
1270 let executor = self.executor.clone();
1271 let checkpoints = self.checkpoints.clone();
1272 async move {
1273 executor.simulate_random_delay().await;
1274 let checkpoints = checkpoints.lock();
1275 let left = checkpoints
1276 .get(&left.commit_sha)
1277 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
1278 let right = checkpoints
1279 .get(&right.commit_sha)
1280 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
1281
1282 Ok(left == right)
1283 }
1284 .boxed()
1285 }
1286
1287 fn diff_checkpoints(
1288 &self,
1289 base_checkpoint: GitRepositoryCheckpoint,
1290 target_checkpoint: GitRepositoryCheckpoint,
1291 ) -> BoxFuture<'_, Result<String>> {
1292 let executor = self.executor.clone();
1293 let checkpoints = self.checkpoints.clone();
1294 async move {
1295 executor.simulate_random_delay().await;
1296 let checkpoints = checkpoints.lock();
1297 let base = checkpoints
1298 .get(&base_checkpoint.commit_sha)
1299 .context(format!(
1300 "invalid base checkpoint: {}",
1301 base_checkpoint.commit_sha
1302 ))?;
1303 let target = checkpoints
1304 .get(&target_checkpoint.commit_sha)
1305 .context(format!(
1306 "invalid target checkpoint: {}",
1307 target_checkpoint.commit_sha
1308 ))?;
1309
1310 fn collect_files(
1311 entry: &FakeFsEntry,
1312 prefix: String,
1313 out: &mut std::collections::BTreeMap<String, String>,
1314 ) {
1315 match entry {
1316 FakeFsEntry::File { content, .. } => {
1317 out.insert(prefix, String::from_utf8_lossy(content).into_owned());
1318 }
1319 FakeFsEntry::Dir { entries, .. } => {
1320 for (name, child) in entries {
1321 let path = if prefix.is_empty() {
1322 name.clone()
1323 } else {
1324 format!("{prefix}/{name}")
1325 };
1326 collect_files(child, path, out);
1327 }
1328 }
1329 FakeFsEntry::Symlink { .. } => {}
1330 }
1331 }
1332
1333 let mut base_files = std::collections::BTreeMap::new();
1334 let mut target_files = std::collections::BTreeMap::new();
1335 collect_files(base, String::new(), &mut base_files);
1336 collect_files(target, String::new(), &mut target_files);
1337
1338 let all_paths: std::collections::BTreeSet<&String> =
1339 base_files.keys().chain(target_files.keys()).collect();
1340
1341 let mut diff = String::new();
1342 for path in all_paths {
1343 match (base_files.get(path), target_files.get(path)) {
1344 (Some(base_content), Some(target_content))
1345 if base_content != target_content =>
1346 {
1347 diff.push_str(&format!("diff --git a/{path} b/{path}\n"));
1348 diff.push_str(&format!("--- a/{path}\n"));
1349 diff.push_str(&format!("+++ b/{path}\n"));
1350 for line in base_content.lines() {
1351 diff.push_str(&format!("-{line}\n"));
1352 }
1353 for line in target_content.lines() {
1354 diff.push_str(&format!("+{line}\n"));
1355 }
1356 }
1357 (Some(_), None) => {
1358 diff.push_str(&format!("diff --git a/{path} /dev/null\n"));
1359 diff.push_str("deleted file\n");
1360 }
1361 (None, Some(_)) => {
1362 diff.push_str(&format!("diff --git /dev/null b/{path}\n"));
1363 diff.push_str("new file\n");
1364 }
1365 _ => {}
1366 }
1367 }
1368 Ok(diff)
1369 }
1370 .boxed()
1371 }
1372
1373 fn default_branch(
1374 &self,
1375 include_remote_name: bool,
1376 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
1377 async move {
1378 Ok(Some(if include_remote_name {
1379 "origin/main".into()
1380 } else {
1381 "main".into()
1382 }))
1383 }
1384 .boxed()
1385 }
1386
1387 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
1388 self.with_state_async(true, move |state| {
1389 state.remotes.insert(name, url);
1390 Ok(())
1391 })
1392 }
1393
1394 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
1395 self.with_state_async(true, move |state| {
1396 state.branches.retain(|branch| {
1397 branch
1398 .split_once('/')
1399 .is_none_or(|(remote, _)| remote != name)
1400 });
1401 state.remotes.remove(&name);
1402 Ok(())
1403 })
1404 }
1405
1406 fn initial_graph_data(
1407 &self,
1408 _log_source: LogSource,
1409 _log_order: LogOrder,
1410 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
1411 ) -> BoxFuture<'_, Result<()>> {
1412 let fs = self.fs.clone();
1413 let dot_git_path = self.dot_git_path.clone();
1414 async move {
1415 let (graph_commits, simulated_error) =
1416 fs.with_git_state(&dot_git_path, false, |state| {
1417 (
1418 state.graph_commits.clone(),
1419 state.simulated_graph_error.clone(),
1420 )
1421 })?;
1422
1423 if let Some(error) = simulated_error {
1424 anyhow::bail!("{}", error);
1425 }
1426
1427 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
1428 request_tx.send(chunk.to_vec()).await.ok();
1429 }
1430 Ok(())
1431 }
1432 .boxed()
1433 }
1434
1435 fn search_commits(
1436 &self,
1437 _log_source: LogSource,
1438 _search_args: SearchCommitArgs,
1439 _request_tx: Sender<Oid>,
1440 ) -> BoxFuture<'_, Result<()>> {
1441 async { bail!("search_commits not supported for FakeGitRepository") }.boxed()
1442 }
1443
1444 fn commit_data_reader(&self) -> Result<CommitDataReader> {
1445 let fs = self.fs.clone();
1446 let dot_git_path = self.dot_git_path.clone();
1447 let executor = self.executor.clone();
1448 Ok(CommitDataReader::for_test(executor, move |sha| {
1449 fs.with_git_state(&dot_git_path, false, |state| {
1450 let commit = state
1451 .commit_data
1452 .get(&sha)
1453 .context(format!("graph commit data not found for {sha}"))?;
1454
1455 match commit {
1456 FakeCommitDataEntry::Success(data) => Ok(data.clone()),
1457 FakeCommitDataEntry::Fail(_) => {
1458 bail!("simulated commit data read failure for {sha}")
1459 }
1460 }
1461 })?
1462 }))
1463 }
1464
1465 fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> {
1466 self.edit_ref(RefEdit::Update { ref_name, commit })
1467 }
1468
1469 fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> {
1470 self.edit_ref(RefEdit::Delete { ref_name })
1471 }
1472
1473 fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> {
1474 async { Ok(()) }.boxed()
1475 }
1476
1477 fn set_trusted(&self, trusted: bool) {
1478 self.is_trusted
1479 .store(trusted, std::sync::atomic::Ordering::Release);
1480 }
1481
1482 fn is_trusted(&self) -> bool {
1483 self.is_trusted.load(std::sync::atomic::Ordering::Acquire)
1484 }
1485}