1use std::path::Path;
2
3use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
4use anyhow::{Context as _, Result, bail};
5use collections::{HashMap, HashSet};
6use futures::future::{self, BoxFuture, join_all};
7use git::repository::GitCommitTemplate;
8use git::{
9 Oid, RunHook,
10 blame::Blame,
11 repository::{
12 AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions,
13 CreateWorktreeTarget, FetchOptions, GRAPH_CHUNK_SIZE, GitRepository,
14 GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, RefEdit,
15 Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree,
16 },
17 stash::GitStash,
18 status::{
19 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
20 UnmergedStatus,
21 },
22};
23use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
24use ignore::gitignore::GitignoreBuilder;
25use parking_lot::Mutex;
26use rope::Rope;
27use smol::{channel::Sender, future::FutureExt as _};
28use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
29use text::LineEnding;
30use util::{paths::PathStyle, rel_path::RelPath};
31
32#[derive(Clone)]
33pub struct FakeGitRepository {
34 pub(crate) fs: Arc<FakeFs>,
35 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
36 pub(crate) executor: BackgroundExecutor,
37 pub(crate) dot_git_path: PathBuf,
38 pub(crate) repository_dir_path: PathBuf,
39 pub(crate) common_dir_path: PathBuf,
40 pub(crate) is_trusted: Arc<AtomicBool>,
41}
42
43#[derive(Debug, Clone)]
44pub struct FakeCommitSnapshot {
45 pub head_contents: HashMap<RepoPath, String>,
46 pub index_contents: HashMap<RepoPath, String>,
47 pub sha: String,
48}
49
50#[derive(Debug, Clone)]
51pub struct FakeGitRepositoryState {
52 pub commit_history: Vec<FakeCommitSnapshot>,
53 pub event_emitter: smol::channel::Sender<PathBuf>,
54 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
55 pub head_contents: HashMap<RepoPath, String>,
56 pub index_contents: HashMap<RepoPath, String>,
57 // everything in commit contents is in oids
58 pub merge_base_contents: HashMap<RepoPath, Oid>,
59 pub oids: HashMap<Oid, String>,
60 pub blames: HashMap<RepoPath, Blame>,
61 pub current_branch_name: Option<String>,
62 pub branches: HashSet<String>,
63 /// List of remotes, keys are names and values are URLs
64 pub remotes: HashMap<String, String>,
65 pub simulated_index_write_error_message: Option<String>,
66 pub simulated_create_worktree_error: Option<String>,
67 pub simulated_graph_error: Option<String>,
68 pub refs: HashMap<String, String>,
69 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
70 pub stash_entries: GitStash,
71}
72
73impl FakeGitRepositoryState {
74 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
75 FakeGitRepositoryState {
76 event_emitter,
77 head_contents: Default::default(),
78 index_contents: Default::default(),
79 unmerged_paths: Default::default(),
80 blames: Default::default(),
81 current_branch_name: Default::default(),
82 branches: Default::default(),
83 simulated_index_write_error_message: Default::default(),
84 simulated_create_worktree_error: Default::default(),
85 simulated_graph_error: None,
86 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
87 merge_base_contents: Default::default(),
88 oids: Default::default(),
89 remotes: HashMap::default(),
90 graph_commits: Vec::new(),
91 commit_history: Vec::new(),
92 stash_entries: Default::default(),
93 }
94 }
95}
96
97impl FakeGitRepository {
98 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
99 where
100 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
101 T: Send,
102 {
103 let fs = self.fs.clone();
104 let executor = self.executor.clone();
105 let dot_git_path = self.dot_git_path.clone();
106 async move {
107 executor.simulate_random_delay().await;
108 fs.with_git_state(&dot_git_path, write, f)?
109 }
110 .boxed()
111 }
112
113 fn edit_ref(&self, edit: RefEdit) -> BoxFuture<'_, Result<()>> {
114 self.with_state_async(true, move |state| {
115 match edit {
116 RefEdit::Update { ref_name, commit } => {
117 state.refs.insert(ref_name, commit);
118 }
119 RefEdit::Delete { ref_name } => {
120 state.refs.remove(&ref_name);
121 }
122 }
123 Ok(())
124 })
125 }
126
127 /// Scans `.git/worktrees/*/gitdir` to find the admin entry directory for a
128 /// worktree at the given checkout path. Used when the working tree directory
129 /// has already been deleted and we can't read its `.git` pointer file.
130 async fn find_worktree_entry_dir_by_path(&self, path: &Path) -> Option<PathBuf> {
131 use futures::StreamExt;
132
133 let worktrees_dir = self.common_dir_path.join("worktrees");
134 let mut entries = self.fs.read_dir(&worktrees_dir).await.ok()?;
135 while let Some(Ok(entry_path)) = entries.next().await {
136 if let Ok(gitdir_content) = self.fs.load(&entry_path.join("gitdir")).await {
137 let worktree_path = PathBuf::from(gitdir_content.trim())
138 .parent()
139 .map(PathBuf::from)
140 .unwrap_or_default();
141 if worktree_path == path {
142 return Some(entry_path);
143 }
144 }
145 }
146 None
147 }
148}
149
150impl GitRepository for FakeGitRepository {
151 fn reload_index(&self) {}
152
153 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
154 let fut = self.with_state_async(false, move |state| {
155 state
156 .index_contents
157 .get(&path)
158 .context("not present in index")
159 .cloned()
160 });
161 self.executor.spawn(async move { fut.await.ok() }).boxed()
162 }
163
164 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
165 let fut = self.with_state_async(false, move |state| {
166 state
167 .head_contents
168 .get(&path)
169 .context("not present in HEAD")
170 .cloned()
171 });
172 self.executor.spawn(async move { fut.await.ok() }).boxed()
173 }
174
175 fn load_commit_template(&self) -> BoxFuture<'_, Result<Option<GitCommitTemplate>>> {
176 async { Ok(None) }.boxed()
177 }
178
179 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
180 self.with_state_async(false, move |state| {
181 state.oids.get(&oid).cloned().context("oid does not exist")
182 })
183 .boxed()
184 }
185
186 fn load_commit(
187 &self,
188 _commit: String,
189 _cx: AsyncApp,
190 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
191 unimplemented!()
192 }
193
194 fn set_index_text(
195 &self,
196 path: RepoPath,
197 content: Option<String>,
198 _env: Arc<HashMap<String, String>>,
199 _is_executable: bool,
200 ) -> BoxFuture<'_, anyhow::Result<()>> {
201 self.with_state_async(true, move |state| {
202 if let Some(message) = &state.simulated_index_write_error_message {
203 anyhow::bail!("{message}");
204 } else if let Some(content) = content {
205 state.index_contents.insert(path, content);
206 } else {
207 state.index_contents.remove(&path);
208 }
209 Ok(())
210 })
211 }
212
213 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
214 let name = name.to_string();
215 let fut = self.with_state_async(false, move |state| {
216 state
217 .remotes
218 .get(&name)
219 .context("remote not found")
220 .cloned()
221 });
222 async move { fut.await.ok() }.boxed()
223 }
224
225 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
226 let mut entries = HashMap::default();
227 self.with_state_async(false, |state| {
228 for (path, content) in &state.head_contents {
229 let status = if let Some((oid, original)) = state
230 .merge_base_contents
231 .get(path)
232 .map(|oid| (oid, &state.oids[oid]))
233 {
234 if original == content {
235 continue;
236 }
237 TreeDiffStatus::Modified { old: *oid }
238 } else {
239 TreeDiffStatus::Added
240 };
241 entries.insert(path.clone(), status);
242 }
243 for (path, oid) in &state.merge_base_contents {
244 if !entries.contains_key(path) {
245 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
246 }
247 }
248 Ok(TreeDiff { entries })
249 })
250 .boxed()
251 }
252
253 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
254 self.with_state_async(false, |state| {
255 Ok(revs
256 .into_iter()
257 .map(|rev| state.refs.get(&rev).cloned())
258 .collect())
259 })
260 }
261
262 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
263 async {
264 Ok(CommitDetails {
265 sha: commit.into(),
266 message: "initial commit".into(),
267 ..Default::default()
268 })
269 }
270 .boxed()
271 }
272
273 fn reset(
274 &self,
275 commit: String,
276 mode: ResetMode,
277 _env: Arc<HashMap<String, String>>,
278 ) -> BoxFuture<'_, Result<()>> {
279 self.with_state_async(true, move |state| {
280 let pop_count = if commit == "HEAD~" || commit == "HEAD^" {
281 1
282 } else if let Some(suffix) = commit.strip_prefix("HEAD~") {
283 suffix
284 .parse::<usize>()
285 .with_context(|| format!("Invalid HEAD~ offset: {commit}"))?
286 } else {
287 match state
288 .commit_history
289 .iter()
290 .rposition(|entry| entry.sha == commit)
291 {
292 Some(index) => state.commit_history.len() - index,
293 None => anyhow::bail!("Unknown commit ref: {commit}"),
294 }
295 };
296
297 if pop_count == 0 || pop_count > state.commit_history.len() {
298 anyhow::bail!(
299 "Cannot reset {pop_count} commit(s): only {} in history",
300 state.commit_history.len()
301 );
302 }
303
304 let target_index = state.commit_history.len() - pop_count;
305 let snapshot = state.commit_history[target_index].clone();
306 state.commit_history.truncate(target_index);
307
308 match mode {
309 ResetMode::Soft => {
310 state.head_contents = snapshot.head_contents;
311 }
312 ResetMode::Mixed => {
313 state.head_contents = snapshot.head_contents;
314 state.index_contents = state.head_contents.clone();
315 }
316 }
317
318 state.refs.insert("HEAD".into(), snapshot.sha);
319 Ok(())
320 })
321 }
322
323 fn checkout_files(
324 &self,
325 _commit: String,
326 _paths: Vec<RepoPath>,
327 _env: Arc<HashMap<String, String>>,
328 ) -> BoxFuture<'_, Result<()>> {
329 unimplemented!()
330 }
331
332 fn path(&self) -> PathBuf {
333 self.repository_dir_path.clone()
334 }
335
336 fn main_repository_path(&self) -> PathBuf {
337 self.common_dir_path.clone()
338 }
339
340 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
341 async move { None }.boxed()
342 }
343
344 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
345 let workdir_path = self.dot_git_path.parent().unwrap();
346
347 // Load gitignores
348 let ignores = workdir_path
349 .ancestors()
350 .filter_map(|dir| {
351 let ignore_path = dir.join(".gitignore");
352 let content = self.fs.read_file_sync(ignore_path).ok()?;
353 let content = String::from_utf8(content).ok()?;
354 let mut builder = GitignoreBuilder::new(dir);
355 for line in content.lines() {
356 builder.add_line(Some(dir.into()), line).ok()?;
357 }
358 builder.build().ok()
359 })
360 .collect::<Vec<_>>();
361
362 // Load working copy files.
363 let git_files: HashMap<RepoPath, (String, bool)> = self
364 .fs
365 .files()
366 .iter()
367 .filter_map(|path| {
368 // TODO better simulate git status output in the case of submodules and worktrees
369 let repo_path = path.strip_prefix(workdir_path).ok()?;
370 let mut is_ignored = repo_path.starts_with(".git");
371 for ignore in &ignores {
372 match ignore.matched_path_or_any_parents(path, false) {
373 ignore::Match::None => {}
374 ignore::Match::Ignore(_) => is_ignored = true,
375 ignore::Match::Whitelist(_) => break,
376 }
377 }
378 let content = self
379 .fs
380 .read_file_sync(path)
381 .ok()
382 .map(|content| String::from_utf8(content).unwrap())?;
383 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
384 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
385 })
386 .collect();
387
388 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
389 let mut entries = Vec::new();
390 let paths = state
391 .head_contents
392 .keys()
393 .chain(state.index_contents.keys())
394 .chain(git_files.keys())
395 .collect::<HashSet<_>>();
396 for path in paths {
397 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
398 continue;
399 }
400
401 let head = state.head_contents.get(path);
402 let index = state.index_contents.get(path);
403 let unmerged = state.unmerged_paths.get(path);
404 let fs = git_files.get(path);
405 let status = match (unmerged, head, index, fs) {
406 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
407 (_, Some(head), Some(index), Some((fs, _))) => {
408 FileStatus::Tracked(TrackedStatus {
409 index_status: if head == index {
410 StatusCode::Unmodified
411 } else {
412 StatusCode::Modified
413 },
414 worktree_status: if fs == index {
415 StatusCode::Unmodified
416 } else {
417 StatusCode::Modified
418 },
419 })
420 }
421 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
422 index_status: if head == index {
423 StatusCode::Unmodified
424 } else {
425 StatusCode::Modified
426 },
427 worktree_status: StatusCode::Deleted,
428 }),
429 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
430 index_status: StatusCode::Deleted,
431 worktree_status: StatusCode::Added,
432 }),
433 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
434 index_status: StatusCode::Deleted,
435 worktree_status: StatusCode::Deleted,
436 }),
437 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
438 index_status: StatusCode::Added,
439 worktree_status: if fs == index {
440 StatusCode::Unmodified
441 } else {
442 StatusCode::Modified
443 },
444 }),
445 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
446 index_status: StatusCode::Added,
447 worktree_status: StatusCode::Deleted,
448 }),
449 (_, None, None, Some((_, is_ignored))) => {
450 if *is_ignored {
451 continue;
452 }
453 FileStatus::Untracked
454 }
455 (_, None, None, None) => {
456 unreachable!();
457 }
458 };
459 if status
460 != FileStatus::Tracked(TrackedStatus {
461 index_status: StatusCode::Unmodified,
462 worktree_status: StatusCode::Unmodified,
463 })
464 {
465 entries.push((path.clone(), status));
466 }
467 }
468 entries.sort_by(|a, b| a.0.cmp(&b.0));
469 anyhow::Ok(GitStatus {
470 entries: entries.into(),
471 })
472 });
473 Task::ready(match result {
474 Ok(result) => result,
475 Err(e) => Err(e),
476 })
477 }
478
479 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
480 self.with_state_async(false, |state| Ok(state.stash_entries.clone()))
481 }
482
483 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
484 self.with_state_async(false, move |state| {
485 let current_branch = &state.current_branch_name;
486 let mut branches = state
487 .branches
488 .iter()
489 .map(|branch_name| {
490 let ref_name = if branch_name.starts_with("refs/") {
491 branch_name.into()
492 } else if branch_name.contains('/') {
493 format!("refs/remotes/{branch_name}").into()
494 } else {
495 format!("refs/heads/{branch_name}").into()
496 };
497 Branch {
498 is_head: Some(branch_name) == current_branch.as_ref(),
499 ref_name,
500 most_recent_commit: None,
501 upstream: None,
502 }
503 })
504 .collect::<Vec<_>>();
505 // compute snapshot expects these to be sorted by ref_name
506 // because that's what git itself does
507 branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name));
508 Ok(branches)
509 })
510 }
511
512 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
513 let fs = self.fs.clone();
514 let common_dir_path = self.common_dir_path.clone();
515 let executor = self.executor.clone();
516
517 async move {
518 executor.simulate_random_delay().await;
519
520 let (main_worktree, refs) = fs.with_git_state(&common_dir_path, false, |state| {
521 let work_dir = common_dir_path
522 .parent()
523 .map(PathBuf::from)
524 .unwrap_or_else(|| common_dir_path.clone());
525 let head_sha = state
526 .refs
527 .get("HEAD")
528 .cloned()
529 .unwrap_or_else(|| "0000000".to_string());
530 let branch_ref = state
531 .current_branch_name
532 .as_ref()
533 .map(|name| format!("refs/heads/{name}"))
534 .unwrap_or_else(|| "refs/heads/main".to_string());
535 let main_wt = Worktree {
536 path: work_dir,
537 ref_name: Some(branch_ref.into()),
538 sha: head_sha.into(),
539 is_main: true,
540 is_bare: false,
541 };
542 (main_wt, state.refs.clone())
543 })?;
544
545 let mut all = vec![main_worktree];
546
547 let worktrees_dir = common_dir_path.join("worktrees");
548 if let Ok(mut entries) = fs.read_dir(&worktrees_dir).await {
549 use futures::StreamExt;
550 while let Some(Ok(entry_path)) = entries.next().await {
551 let head_content = match fs.load(&entry_path.join("HEAD")).await {
552 Ok(content) => content,
553 Err(_) => continue,
554 };
555 let gitdir_content = match fs.load(&entry_path.join("gitdir")).await {
556 Ok(content) => content,
557 Err(_) => continue,
558 };
559
560 let ref_name = head_content
561 .strip_prefix("ref: ")
562 .map(|s| s.trim().to_string());
563 let sha = ref_name
564 .as_ref()
565 .and_then(|r| refs.get(r))
566 .cloned()
567 .unwrap_or_else(|| head_content.trim().to_string());
568
569 let worktree_path = PathBuf::from(gitdir_content.trim())
570 .parent()
571 .map(PathBuf::from)
572 .unwrap_or_default();
573
574 all.push(Worktree {
575 path: worktree_path,
576 ref_name: ref_name.map(Into::into),
577 sha: sha.into(),
578 is_main: false,
579 is_bare: false,
580 });
581 }
582 }
583
584 Ok(all)
585 }
586 .boxed()
587 }
588
589 fn create_worktree(
590 &self,
591 target: CreateWorktreeTarget,
592 path: PathBuf,
593 ) -> BoxFuture<'_, Result<()>> {
594 let fs = self.fs.clone();
595 let executor = self.executor.clone();
596 let dot_git_path = self.dot_git_path.clone();
597 let common_dir_path = self.common_dir_path.clone();
598 async move {
599 executor.simulate_random_delay().await;
600
601 let branch_name = target.branch_name().map(ToOwned::to_owned);
602 let create_branch_ref = matches!(target, CreateWorktreeTarget::NewBranch { .. });
603
604 // Check for simulated error and validate branch state before any side effects.
605 fs.with_git_state(&dot_git_path, false, {
606 let branch_name = branch_name.clone();
607 move |state| {
608 if let Some(message) = &state.simulated_create_worktree_error {
609 anyhow::bail!("{message}");
610 }
611
612 match (create_branch_ref, branch_name.as_ref()) {
613 (true, Some(branch_name)) => {
614 if state.branches.contains(branch_name) {
615 bail!("a branch named '{}' already exists", branch_name);
616 }
617 }
618 (false, Some(branch_name)) => {
619 if !state.branches.contains(branch_name) {
620 bail!("no branch named '{}' exists", branch_name);
621 }
622 }
623 (false, None) => {}
624 (true, None) => bail!("branch name is required to create a branch"),
625 }
626
627 Ok(())
628 }
629 })??;
630
631 let (branch_name, sha, create_branch_ref) = match target {
632 CreateWorktreeTarget::ExistingBranch { branch_name } => {
633 let ref_name = format!("refs/heads/{branch_name}");
634 let sha = fs.with_git_state(&dot_git_path, false, {
635 move |state| {
636 Ok::<_, anyhow::Error>(
637 state
638 .refs
639 .get(&ref_name)
640 .cloned()
641 .unwrap_or_else(|| "fake-sha".to_string()),
642 )
643 }
644 })??;
645 (Some(branch_name), sha, false)
646 }
647 CreateWorktreeTarget::NewBranch {
648 branch_name,
649 base_sha: start_point,
650 } => (
651 Some(branch_name),
652 start_point.unwrap_or_else(|| "fake-sha".to_string()),
653 true,
654 ),
655 CreateWorktreeTarget::Detached {
656 base_sha: start_point,
657 } => (
658 None,
659 start_point.unwrap_or_else(|| "fake-sha".to_string()),
660 false,
661 ),
662 };
663
664 // Create the worktree checkout directory.
665 fs.create_dir(&path).await?;
666
667 // Create .git/worktrees/<name>/ directory with HEAD, commondir, gitdir.
668 let worktree_entry_name = branch_name.as_deref().unwrap_or_else(|| {
669 path.file_name()
670 .and_then(|name| name.to_str())
671 .unwrap_or("detached")
672 });
673 let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name);
674 fs.create_dir(&worktrees_entry_dir).await?;
675
676 let head_content = if let Some(ref branch_name) = branch_name {
677 let ref_name = format!("refs/heads/{branch_name}");
678 format!("ref: {ref_name}")
679 } else {
680 sha.clone()
681 };
682 fs.write_file_internal(
683 worktrees_entry_dir.join("HEAD"),
684 head_content.into_bytes(),
685 false,
686 )?;
687 fs.write_file_internal(
688 worktrees_entry_dir.join("commondir"),
689 common_dir_path.to_string_lossy().into_owned().into_bytes(),
690 false,
691 )?;
692 let worktree_dot_git = path.join(".git");
693 fs.write_file_internal(
694 worktrees_entry_dir.join("gitdir"),
695 worktree_dot_git.to_string_lossy().into_owned().into_bytes(),
696 false,
697 )?;
698
699 // Create .git file in the worktree checkout.
700 fs.write_file_internal(
701 &worktree_dot_git,
702 format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(),
703 false,
704 )?;
705
706 // Update git state for newly created branches.
707 if create_branch_ref {
708 fs.with_git_state(&dot_git_path, true, {
709 let branch_name = branch_name.clone();
710 let sha = sha.clone();
711 move |state| {
712 if let Some(branch_name) = branch_name {
713 let ref_name = format!("refs/heads/{branch_name}");
714 state.refs.insert(ref_name, sha);
715 state.branches.insert(branch_name);
716 }
717 Ok::<(), anyhow::Error>(())
718 }
719 })??;
720 }
721
722 Ok(())
723 }
724 .boxed()
725 }
726
727 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
728 let fs = self.fs.clone();
729 let executor = self.executor.clone();
730 let common_dir_path = self.common_dir_path.clone();
731 async move {
732 executor.simulate_random_delay().await;
733
734 // Try to read the worktree's .git file to find its entry
735 // directory. If the working tree is already gone (e.g. the
736 // caller deleted it before asking git to clean up), fall back
737 // to scanning `.git/worktrees/*/gitdir` for a matching path,
738 // mirroring real git's behavior with `--force`.
739 let dot_git_file = path.join(".git");
740 let worktree_entry_dir = if let Ok(content) = fs.load(&dot_git_file).await {
741 let gitdir = content
742 .strip_prefix("gitdir:")
743 .context("invalid .git file in worktree")?
744 .trim();
745 PathBuf::from(gitdir)
746 } else {
747 self.find_worktree_entry_dir_by_path(&path)
748 .await
749 .with_context(|| format!("no worktree found at path: {}", path.display()))?
750 };
751
752 // Remove the worktree checkout directory if it still exists.
753 fs.remove_dir(
754 &path,
755 RemoveOptions {
756 recursive: true,
757 ignore_if_not_exists: true,
758 },
759 )
760 .await?;
761
762 // Remove the .git/worktrees/<name>/ directory.
763 fs.remove_dir(
764 &worktree_entry_dir,
765 RemoveOptions {
766 recursive: true,
767 ignore_if_not_exists: false,
768 },
769 )
770 .await?;
771
772 // Emit a git event on the main .git directory so the scanner
773 // notices the change.
774 fs.with_git_state(&common_dir_path, true, |_| {})?;
775
776 Ok(())
777 }
778 .boxed()
779 }
780
781 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
782 let fs = self.fs.clone();
783 let executor = self.executor.clone();
784 let common_dir_path = self.common_dir_path.clone();
785 async move {
786 executor.simulate_random_delay().await;
787
788 // Read the worktree's .git file to find its entry directory.
789 let dot_git_file = old_path.join(".git");
790 let content = fs
791 .load(&dot_git_file)
792 .await
793 .with_context(|| format!("no worktree found at path: {}", old_path.display()))?;
794 let gitdir = content
795 .strip_prefix("gitdir:")
796 .context("invalid .git file in worktree")?
797 .trim();
798 let worktree_entry_dir = PathBuf::from(gitdir);
799
800 // Move the worktree checkout directory.
801 fs.rename(
802 &old_path,
803 &new_path,
804 RenameOptions {
805 overwrite: false,
806 ignore_if_exists: false,
807 create_parents: true,
808 },
809 )
810 .await?;
811
812 // Update the gitdir file in .git/worktrees/<name>/ to point to the
813 // new location.
814 let new_dot_git = new_path.join(".git");
815 fs.write_file_internal(
816 worktree_entry_dir.join("gitdir"),
817 new_dot_git.to_string_lossy().into_owned().into_bytes(),
818 false,
819 )?;
820
821 // Update the .git file in the moved worktree checkout.
822 fs.write_file_internal(
823 &new_dot_git,
824 format!("gitdir: {}", worktree_entry_dir.display()).into_bytes(),
825 false,
826 )?;
827
828 // Emit a git event on the main .git directory so the scanner
829 // notices the change.
830 fs.with_git_state(&common_dir_path, true, |_| {})?;
831
832 Ok(())
833 }
834 .boxed()
835 }
836
837 fn checkout_branch_in_worktree(
838 &self,
839 _branch_name: String,
840 _worktree_path: PathBuf,
841 _create: bool,
842 ) -> BoxFuture<'_, Result<()>> {
843 async { Ok(()) }.boxed()
844 }
845
846 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
847 self.with_state_async(true, |state| {
848 state.current_branch_name = Some(name);
849 Ok(())
850 })
851 }
852
853 fn create_branch(
854 &self,
855 name: String,
856 _base_branch: Option<String>,
857 ) -> BoxFuture<'_, Result<()>> {
858 self.with_state_async(true, move |state| {
859 if let Some((remote, _)) = name.split_once('/')
860 && !state.remotes.contains_key(remote)
861 {
862 state.remotes.insert(remote.to_owned(), "".to_owned());
863 }
864 state.branches.insert(name);
865 Ok(())
866 })
867 }
868
869 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
870 self.with_state_async(true, move |state| {
871 if !state.branches.remove(&branch) {
872 bail!("no such branch: {branch}");
873 }
874 state.branches.insert(new_name.clone());
875 if state.current_branch_name == Some(branch) {
876 state.current_branch_name = Some(new_name);
877 }
878 Ok(())
879 })
880 }
881
882 fn delete_branch(&self, _is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> {
883 self.with_state_async(true, move |state| {
884 if !state.branches.remove(&name) {
885 bail!("no such branch: {name}");
886 }
887 Ok(())
888 })
889 }
890
891 fn blame(
892 &self,
893 path: RepoPath,
894 _content: Rope,
895 _line_ending: LineEnding,
896 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
897 self.with_state_async(false, move |state| {
898 state
899 .blames
900 .get(&path)
901 .with_context(|| format!("failed to get blame for {:?}", path))
902 .cloned()
903 })
904 }
905
906 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
907 self.file_history_paginated(path, 0, None)
908 }
909
910 fn file_history_paginated(
911 &self,
912 path: RepoPath,
913 _skip: usize,
914 _limit: Option<usize>,
915 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
916 async move {
917 Ok(git::repository::FileHistory {
918 entries: Vec::new(),
919 path,
920 })
921 }
922 .boxed()
923 }
924
925 fn stage_paths(
926 &self,
927 paths: Vec<RepoPath>,
928 _env: Arc<HashMap<String, String>>,
929 ) -> BoxFuture<'_, Result<()>> {
930 Box::pin(async move {
931 let contents = paths
932 .into_iter()
933 .map(|path| {
934 let abs_path = self
935 .dot_git_path
936 .parent()
937 .unwrap()
938 .join(&path.as_std_path());
939 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
940 })
941 .collect::<Vec<_>>();
942 let contents = join_all(contents).await;
943 self.with_state_async(true, move |state| {
944 for (path, content) in contents {
945 if let Some(content) = content {
946 state.index_contents.insert(path, content);
947 } else {
948 state.index_contents.remove(&path);
949 }
950 }
951 Ok(())
952 })
953 .await
954 })
955 }
956
957 fn unstage_paths(
958 &self,
959 paths: Vec<RepoPath>,
960 _env: Arc<HashMap<String, String>>,
961 ) -> BoxFuture<'_, Result<()>> {
962 self.with_state_async(true, move |state| {
963 for path in paths {
964 match state.head_contents.get(&path) {
965 Some(content) => state.index_contents.insert(path, content.clone()),
966 None => state.index_contents.remove(&path),
967 };
968 }
969 Ok(())
970 })
971 }
972
973 fn stash_paths(
974 &self,
975 _paths: Vec<RepoPath>,
976 _env: Arc<HashMap<String, String>>,
977 ) -> BoxFuture<'_, Result<()>> {
978 unimplemented!()
979 }
980
981 fn stash_pop(
982 &self,
983 _index: Option<usize>,
984 _env: Arc<HashMap<String, String>>,
985 ) -> BoxFuture<'_, Result<()>> {
986 unimplemented!()
987 }
988
989 fn stash_apply(
990 &self,
991 _index: Option<usize>,
992 _env: Arc<HashMap<String, String>>,
993 ) -> BoxFuture<'_, Result<()>> {
994 unimplemented!()
995 }
996
997 fn stash_drop(
998 &self,
999 _index: Option<usize>,
1000 _env: Arc<HashMap<String, String>>,
1001 ) -> BoxFuture<'_, Result<()>> {
1002 unimplemented!()
1003 }
1004
1005 fn commit(
1006 &self,
1007 _message: gpui::SharedString,
1008 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
1009 options: CommitOptions,
1010 _askpass: AskPassDelegate,
1011 _env: Arc<HashMap<String, String>>,
1012 ) -> BoxFuture<'_, Result<()>> {
1013 self.with_state_async(true, move |state| {
1014 if !options.allow_empty && !options.amend && state.index_contents == state.head_contents
1015 {
1016 anyhow::bail!("nothing to commit (use allow_empty to create an empty commit)");
1017 }
1018
1019 let old_sha = state.refs.get("HEAD").cloned().unwrap_or_default();
1020 state.commit_history.push(FakeCommitSnapshot {
1021 head_contents: state.head_contents.clone(),
1022 index_contents: state.index_contents.clone(),
1023 sha: old_sha,
1024 });
1025
1026 state.head_contents = state.index_contents.clone();
1027
1028 let new_sha = format!("fake-commit-{}", state.commit_history.len());
1029 state.refs.insert("HEAD".into(), new_sha);
1030
1031 Ok(())
1032 })
1033 }
1034
1035 fn run_hook(
1036 &self,
1037 _hook: RunHook,
1038 _env: Arc<HashMap<String, String>>,
1039 ) -> BoxFuture<'_, Result<()>> {
1040 async { Ok(()) }.boxed()
1041 }
1042
1043 fn push(
1044 &self,
1045 _branch: String,
1046 _remote_branch: String,
1047 _remote: String,
1048 _options: Option<PushOptions>,
1049 _askpass: AskPassDelegate,
1050 _env: Arc<HashMap<String, String>>,
1051 _cx: AsyncApp,
1052 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1053 unimplemented!()
1054 }
1055
1056 fn pull(
1057 &self,
1058 _branch: Option<String>,
1059 _remote: String,
1060 _rebase: bool,
1061 _askpass: AskPassDelegate,
1062 _env: Arc<HashMap<String, String>>,
1063 _cx: AsyncApp,
1064 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1065 unimplemented!()
1066 }
1067
1068 fn fetch(
1069 &self,
1070 _fetch_options: FetchOptions,
1071 _askpass: AskPassDelegate,
1072 _env: Arc<HashMap<String, String>>,
1073 _cx: AsyncApp,
1074 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1075 unimplemented!()
1076 }
1077
1078 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
1079 self.with_state_async(false, move |state| {
1080 let remotes = state
1081 .remotes
1082 .keys()
1083 .map(|r| Remote {
1084 name: r.clone().into(),
1085 })
1086 .collect::<Vec<_>>();
1087 Ok(remotes)
1088 })
1089 }
1090
1091 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1092 unimplemented!()
1093 }
1094
1095 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1096 unimplemented!()
1097 }
1098
1099 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
1100 future::ready(Ok(Vec::new())).boxed()
1101 }
1102
1103 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
1104 future::ready(Ok(String::new())).boxed()
1105 }
1106
1107 fn diff_stat(
1108 &self,
1109 path_prefixes: &[RepoPath],
1110 ) -> BoxFuture<'_, Result<git::status::GitDiffStat>> {
1111 fn count_lines(s: &str) -> u32 {
1112 if s.is_empty() {
1113 0
1114 } else {
1115 s.lines().count() as u32
1116 }
1117 }
1118
1119 fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool {
1120 if prefixes.is_empty() {
1121 return true;
1122 }
1123 prefixes.iter().any(|prefix| {
1124 let prefix_str = prefix.as_unix_str();
1125 if prefix_str == "." {
1126 return true;
1127 }
1128 path == prefix || path.starts_with(&prefix)
1129 })
1130 }
1131
1132 let path_prefixes = path_prefixes.to_vec();
1133
1134 let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
1135 let worktree_files: HashMap<RepoPath, String> = self
1136 .fs
1137 .files()
1138 .iter()
1139 .filter_map(|path| {
1140 let repo_path = path.strip_prefix(&workdir_path).ok()?;
1141 if repo_path.starts_with(".git") {
1142 return None;
1143 }
1144 let content = self
1145 .fs
1146 .read_file_sync(path)
1147 .ok()
1148 .and_then(|bytes| String::from_utf8(bytes).ok())?;
1149 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
1150 Some((RepoPath::from_rel_path(&repo_path), content))
1151 })
1152 .collect();
1153
1154 self.with_state_async(false, move |state| {
1155 let mut entries = Vec::new();
1156 let all_paths: HashSet<&RepoPath> = state
1157 .head_contents
1158 .keys()
1159 .chain(
1160 worktree_files
1161 .keys()
1162 .filter(|p| state.index_contents.contains_key(*p)),
1163 )
1164 .collect();
1165 for path in all_paths {
1166 if !matches_prefixes(path, &path_prefixes) {
1167 continue;
1168 }
1169 let head = state.head_contents.get(path);
1170 let worktree = worktree_files.get(path);
1171 match (head, worktree) {
1172 (Some(old), Some(new)) if old != new => {
1173 entries.push((
1174 path.clone(),
1175 git::status::DiffStat {
1176 added: count_lines(new),
1177 deleted: count_lines(old),
1178 },
1179 ));
1180 }
1181 (Some(old), None) => {
1182 entries.push((
1183 path.clone(),
1184 git::status::DiffStat {
1185 added: 0,
1186 deleted: count_lines(old),
1187 },
1188 ));
1189 }
1190 (None, Some(new)) => {
1191 entries.push((
1192 path.clone(),
1193 git::status::DiffStat {
1194 added: count_lines(new),
1195 deleted: 0,
1196 },
1197 ));
1198 }
1199 _ => {}
1200 }
1201 }
1202 entries.sort_by(|(a, _), (b, _)| a.cmp(b));
1203 Ok(git::status::GitDiffStat {
1204 entries: entries.into(),
1205 })
1206 })
1207 .boxed()
1208 }
1209
1210 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
1211 let executor = self.executor.clone();
1212 let fs = self.fs.clone();
1213 let checkpoints = self.checkpoints.clone();
1214 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1215 async move {
1216 executor.simulate_random_delay().await;
1217 let oid = git::Oid::random(&mut *executor.rng().lock());
1218 let entry = fs.entry(&repository_dir_path)?;
1219 checkpoints.lock().insert(oid, entry);
1220 Ok(GitRepositoryCheckpoint { commit_sha: oid })
1221 }
1222 .boxed()
1223 }
1224
1225 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
1226 let executor = self.executor.clone();
1227 let fs = self.fs.clone();
1228 let checkpoints = self.checkpoints.clone();
1229 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1230 async move {
1231 executor.simulate_random_delay().await;
1232 let checkpoints = checkpoints.lock();
1233 let entry = checkpoints
1234 .get(&checkpoint.commit_sha)
1235 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
1236 fs.insert_entry(&repository_dir_path, entry.clone())?;
1237 Ok(())
1238 }
1239 .boxed()
1240 }
1241
1242 fn create_archive_checkpoint(&self) -> BoxFuture<'_, Result<(String, String)>> {
1243 let executor = self.executor.clone();
1244 let fs = self.fs.clone();
1245 let checkpoints = self.checkpoints.clone();
1246 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1247 async move {
1248 executor.simulate_random_delay().await;
1249 let staged_oid = git::Oid::random(&mut *executor.rng().lock());
1250 let unstaged_oid = git::Oid::random(&mut *executor.rng().lock());
1251 let entry = fs.entry(&repository_dir_path)?;
1252 checkpoints.lock().insert(staged_oid, entry.clone());
1253 checkpoints.lock().insert(unstaged_oid, entry);
1254 Ok((staged_oid.to_string(), unstaged_oid.to_string()))
1255 }
1256 .boxed()
1257 }
1258
1259 fn restore_archive_checkpoint(
1260 &self,
1261 // The fake filesystem doesn't model a separate index, so only the
1262 // unstaged (full working directory) snapshot is restored.
1263 _staged_sha: String,
1264 unstaged_sha: String,
1265 ) -> BoxFuture<'_, Result<()>> {
1266 match unstaged_sha.parse() {
1267 Ok(commit_sha) => self.restore_checkpoint(GitRepositoryCheckpoint { commit_sha }),
1268 Err(error) => async move {
1269 Err(anyhow::anyhow!(error).context("failed to parse unstaged SHA as Oid"))
1270 }
1271 .boxed(),
1272 }
1273 }
1274
1275 fn compare_checkpoints(
1276 &self,
1277 left: GitRepositoryCheckpoint,
1278 right: GitRepositoryCheckpoint,
1279 ) -> BoxFuture<'_, Result<bool>> {
1280 let executor = self.executor.clone();
1281 let checkpoints = self.checkpoints.clone();
1282 async move {
1283 executor.simulate_random_delay().await;
1284 let checkpoints = checkpoints.lock();
1285 let left = checkpoints
1286 .get(&left.commit_sha)
1287 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
1288 let right = checkpoints
1289 .get(&right.commit_sha)
1290 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
1291
1292 Ok(left == right)
1293 }
1294 .boxed()
1295 }
1296
1297 fn diff_checkpoints(
1298 &self,
1299 base_checkpoint: GitRepositoryCheckpoint,
1300 target_checkpoint: GitRepositoryCheckpoint,
1301 ) -> BoxFuture<'_, Result<String>> {
1302 let executor = self.executor.clone();
1303 let checkpoints = self.checkpoints.clone();
1304 async move {
1305 executor.simulate_random_delay().await;
1306 let checkpoints = checkpoints.lock();
1307 let base = checkpoints
1308 .get(&base_checkpoint.commit_sha)
1309 .context(format!(
1310 "invalid base checkpoint: {}",
1311 base_checkpoint.commit_sha
1312 ))?;
1313 let target = checkpoints
1314 .get(&target_checkpoint.commit_sha)
1315 .context(format!(
1316 "invalid target checkpoint: {}",
1317 target_checkpoint.commit_sha
1318 ))?;
1319
1320 fn collect_files(
1321 entry: &FakeFsEntry,
1322 prefix: String,
1323 out: &mut std::collections::BTreeMap<String, String>,
1324 ) {
1325 match entry {
1326 FakeFsEntry::File { content, .. } => {
1327 out.insert(prefix, String::from_utf8_lossy(content).into_owned());
1328 }
1329 FakeFsEntry::Dir { entries, .. } => {
1330 for (name, child) in entries {
1331 let path = if prefix.is_empty() {
1332 name.clone()
1333 } else {
1334 format!("{prefix}/{name}")
1335 };
1336 collect_files(child, path, out);
1337 }
1338 }
1339 FakeFsEntry::Symlink { .. } => {}
1340 }
1341 }
1342
1343 let mut base_files = std::collections::BTreeMap::new();
1344 let mut target_files = std::collections::BTreeMap::new();
1345 collect_files(base, String::new(), &mut base_files);
1346 collect_files(target, String::new(), &mut target_files);
1347
1348 let all_paths: std::collections::BTreeSet<&String> =
1349 base_files.keys().chain(target_files.keys()).collect();
1350
1351 let mut diff = String::new();
1352 for path in all_paths {
1353 match (base_files.get(path), target_files.get(path)) {
1354 (Some(base_content), Some(target_content))
1355 if base_content != target_content =>
1356 {
1357 diff.push_str(&format!("diff --git a/{path} b/{path}\n"));
1358 diff.push_str(&format!("--- a/{path}\n"));
1359 diff.push_str(&format!("+++ b/{path}\n"));
1360 for line in base_content.lines() {
1361 diff.push_str(&format!("-{line}\n"));
1362 }
1363 for line in target_content.lines() {
1364 diff.push_str(&format!("+{line}\n"));
1365 }
1366 }
1367 (Some(_), None) => {
1368 diff.push_str(&format!("diff --git a/{path} /dev/null\n"));
1369 diff.push_str("deleted file\n");
1370 }
1371 (None, Some(_)) => {
1372 diff.push_str(&format!("diff --git /dev/null b/{path}\n"));
1373 diff.push_str("new file\n");
1374 }
1375 _ => {}
1376 }
1377 }
1378 Ok(diff)
1379 }
1380 .boxed()
1381 }
1382
1383 fn default_branch(
1384 &self,
1385 include_remote_name: bool,
1386 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
1387 async move {
1388 Ok(Some(if include_remote_name {
1389 "origin/main".into()
1390 } else {
1391 "main".into()
1392 }))
1393 }
1394 .boxed()
1395 }
1396
1397 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
1398 self.with_state_async(true, move |state| {
1399 state.remotes.insert(name, url);
1400 Ok(())
1401 })
1402 }
1403
1404 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
1405 self.with_state_async(true, move |state| {
1406 state.branches.retain(|branch| {
1407 branch
1408 .split_once('/')
1409 .is_none_or(|(remote, _)| remote != name)
1410 });
1411 state.remotes.remove(&name);
1412 Ok(())
1413 })
1414 }
1415
1416 fn initial_graph_data(
1417 &self,
1418 _log_source: LogSource,
1419 _log_order: LogOrder,
1420 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
1421 ) -> BoxFuture<'_, Result<()>> {
1422 let fs = self.fs.clone();
1423 let dot_git_path = self.dot_git_path.clone();
1424 async move {
1425 let (graph_commits, simulated_error) =
1426 fs.with_git_state(&dot_git_path, false, |state| {
1427 (
1428 state.graph_commits.clone(),
1429 state.simulated_graph_error.clone(),
1430 )
1431 })?;
1432
1433 if let Some(error) = simulated_error {
1434 anyhow::bail!("{}", error);
1435 }
1436
1437 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
1438 request_tx.send(chunk.to_vec()).await.ok();
1439 }
1440 Ok(())
1441 }
1442 .boxed()
1443 }
1444
1445 fn search_commits(
1446 &self,
1447 _log_source: LogSource,
1448 _search_args: SearchCommitArgs,
1449 _request_tx: Sender<Oid>,
1450 ) -> BoxFuture<'_, Result<()>> {
1451 async { bail!("search_commits not supported for FakeGitRepository") }.boxed()
1452 }
1453
1454 fn commit_data_reader(&self) -> Result<CommitDataReader> {
1455 anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
1456 }
1457
1458 fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> {
1459 self.edit_ref(RefEdit::Update { ref_name, commit })
1460 }
1461
1462 fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> {
1463 self.edit_ref(RefEdit::Delete { ref_name })
1464 }
1465
1466 fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> {
1467 async { Ok(()) }.boxed()
1468 }
1469
1470 fn set_trusted(&self, trusted: bool) {
1471 self.is_trusted
1472 .store(trusted, std::sync::atomic::Ordering::Release);
1473 }
1474
1475 fn is_trusted(&self) -> bool {
1476 self.is_trusted.load(std::sync::atomic::Ordering::Acquire)
1477 }
1478}