1use std::path::Path;
2
3use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
4use anyhow::{Context as _, Result, bail};
5use collections::{HashMap, HashSet};
6use futures::future::{self, BoxFuture, join_all};
7use git::{
8 Oid, RunHook,
9 blame::Blame,
10 repository::{
11 AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions,
12 CreateWorktreeTarget, FetchOptions, GRAPH_CHUNK_SIZE, GitRepository,
13 GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, RefEdit,
14 Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree,
15 },
16 stash::GitStash,
17 status::{
18 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
19 UnmergedStatus,
20 },
21};
22use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
23use ignore::gitignore::GitignoreBuilder;
24use parking_lot::Mutex;
25use rope::Rope;
26use smol::{channel::Sender, future::FutureExt as _};
27use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
28use text::LineEnding;
29use util::{paths::PathStyle, rel_path::RelPath};
30
31#[derive(Clone)]
32pub struct FakeGitRepository {
33 pub(crate) fs: Arc<FakeFs>,
34 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
35 pub(crate) executor: BackgroundExecutor,
36 pub(crate) dot_git_path: PathBuf,
37 pub(crate) repository_dir_path: PathBuf,
38 pub(crate) common_dir_path: PathBuf,
39 pub(crate) is_trusted: Arc<AtomicBool>,
40}
41
42#[derive(Debug, Clone)]
43pub struct FakeCommitSnapshot {
44 pub head_contents: HashMap<RepoPath, String>,
45 pub index_contents: HashMap<RepoPath, String>,
46 pub sha: String,
47}
48
49#[derive(Debug, Clone)]
50pub struct FakeGitRepositoryState {
51 pub commit_history: Vec<FakeCommitSnapshot>,
52 pub event_emitter: smol::channel::Sender<PathBuf>,
53 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
54 pub head_contents: HashMap<RepoPath, String>,
55 pub index_contents: HashMap<RepoPath, String>,
56 // everything in commit contents is in oids
57 pub merge_base_contents: HashMap<RepoPath, Oid>,
58 pub oids: HashMap<Oid, String>,
59 pub blames: HashMap<RepoPath, Blame>,
60 pub current_branch_name: Option<String>,
61 pub branches: HashSet<String>,
62 /// List of remotes, keys are names and values are URLs
63 pub remotes: HashMap<String, String>,
64 pub simulated_index_write_error_message: Option<String>,
65 pub simulated_create_worktree_error: Option<String>,
66 pub simulated_graph_error: Option<String>,
67 pub refs: HashMap<String, String>,
68 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
69 pub stash_entries: GitStash,
70}
71
72impl FakeGitRepositoryState {
73 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
74 FakeGitRepositoryState {
75 event_emitter,
76 head_contents: Default::default(),
77 index_contents: Default::default(),
78 unmerged_paths: Default::default(),
79 blames: Default::default(),
80 current_branch_name: Default::default(),
81 branches: Default::default(),
82 simulated_index_write_error_message: Default::default(),
83 simulated_create_worktree_error: Default::default(),
84 simulated_graph_error: None,
85 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
86 merge_base_contents: Default::default(),
87 oids: Default::default(),
88 remotes: HashMap::default(),
89 graph_commits: Vec::new(),
90 commit_history: Vec::new(),
91 stash_entries: Default::default(),
92 }
93 }
94}
95
96impl FakeGitRepository {
97 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
98 where
99 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
100 T: Send,
101 {
102 let fs = self.fs.clone();
103 let executor = self.executor.clone();
104 let dot_git_path = self.dot_git_path.clone();
105 async move {
106 executor.simulate_random_delay().await;
107 fs.with_git_state(&dot_git_path, write, f)?
108 }
109 .boxed()
110 }
111
112 fn edit_ref(&self, edit: RefEdit) -> BoxFuture<'_, Result<()>> {
113 self.with_state_async(true, move |state| {
114 match edit {
115 RefEdit::Update { ref_name, commit } => {
116 state.refs.insert(ref_name, commit);
117 }
118 RefEdit::Delete { ref_name } => {
119 state.refs.remove(&ref_name);
120 }
121 }
122 Ok(())
123 })
124 }
125
126 /// Scans `.git/worktrees/*/gitdir` to find the admin entry directory for a
127 /// worktree at the given checkout path. Used when the working tree directory
128 /// has already been deleted and we can't read its `.git` pointer file.
129 async fn find_worktree_entry_dir_by_path(&self, path: &Path) -> Option<PathBuf> {
130 use futures::StreamExt;
131
132 let worktrees_dir = self.common_dir_path.join("worktrees");
133 let mut entries = self.fs.read_dir(&worktrees_dir).await.ok()?;
134 while let Some(Ok(entry_path)) = entries.next().await {
135 if let Ok(gitdir_content) = self.fs.load(&entry_path.join("gitdir")).await {
136 let worktree_path = PathBuf::from(gitdir_content.trim())
137 .parent()
138 .map(PathBuf::from)
139 .unwrap_or_default();
140 if worktree_path == path {
141 return Some(entry_path);
142 }
143 }
144 }
145 None
146 }
147}
148
149impl GitRepository for FakeGitRepository {
150 fn reload_index(&self) {}
151
152 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
153 let fut = self.with_state_async(false, move |state| {
154 state
155 .index_contents
156 .get(&path)
157 .context("not present in index")
158 .cloned()
159 });
160 self.executor.spawn(async move { fut.await.ok() }).boxed()
161 }
162
163 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
164 let fut = self.with_state_async(false, move |state| {
165 state
166 .head_contents
167 .get(&path)
168 .context("not present in HEAD")
169 .cloned()
170 });
171 self.executor.spawn(async move { fut.await.ok() }).boxed()
172 }
173
174 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
175 self.with_state_async(false, move |state| {
176 state.oids.get(&oid).cloned().context("oid does not exist")
177 })
178 .boxed()
179 }
180
181 fn load_commit(
182 &self,
183 _commit: String,
184 _cx: AsyncApp,
185 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
186 unimplemented!()
187 }
188
189 fn set_index_text(
190 &self,
191 path: RepoPath,
192 content: Option<String>,
193 _env: Arc<HashMap<String, String>>,
194 _is_executable: bool,
195 ) -> BoxFuture<'_, anyhow::Result<()>> {
196 self.with_state_async(true, move |state| {
197 if let Some(message) = &state.simulated_index_write_error_message {
198 anyhow::bail!("{message}");
199 } else if let Some(content) = content {
200 state.index_contents.insert(path, content);
201 } else {
202 state.index_contents.remove(&path);
203 }
204 Ok(())
205 })
206 }
207
208 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
209 let name = name.to_string();
210 let fut = self.with_state_async(false, move |state| {
211 state
212 .remotes
213 .get(&name)
214 .context("remote not found")
215 .cloned()
216 });
217 async move { fut.await.ok() }.boxed()
218 }
219
220 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
221 let mut entries = HashMap::default();
222 self.with_state_async(false, |state| {
223 for (path, content) in &state.head_contents {
224 let status = if let Some((oid, original)) = state
225 .merge_base_contents
226 .get(path)
227 .map(|oid| (oid, &state.oids[oid]))
228 {
229 if original == content {
230 continue;
231 }
232 TreeDiffStatus::Modified { old: *oid }
233 } else {
234 TreeDiffStatus::Added
235 };
236 entries.insert(path.clone(), status);
237 }
238 for (path, oid) in &state.merge_base_contents {
239 if !entries.contains_key(path) {
240 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
241 }
242 }
243 Ok(TreeDiff { entries })
244 })
245 .boxed()
246 }
247
248 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
249 self.with_state_async(false, |state| {
250 Ok(revs
251 .into_iter()
252 .map(|rev| state.refs.get(&rev).cloned())
253 .collect())
254 })
255 }
256
257 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
258 async {
259 Ok(CommitDetails {
260 sha: commit.into(),
261 message: "initial commit".into(),
262 ..Default::default()
263 })
264 }
265 .boxed()
266 }
267
268 fn reset(
269 &self,
270 commit: String,
271 mode: ResetMode,
272 _env: Arc<HashMap<String, String>>,
273 ) -> BoxFuture<'_, Result<()>> {
274 self.with_state_async(true, move |state| {
275 let pop_count = if commit == "HEAD~" || commit == "HEAD^" {
276 1
277 } else if let Some(suffix) = commit.strip_prefix("HEAD~") {
278 suffix
279 .parse::<usize>()
280 .with_context(|| format!("Invalid HEAD~ offset: {commit}"))?
281 } else {
282 match state
283 .commit_history
284 .iter()
285 .rposition(|entry| entry.sha == commit)
286 {
287 Some(index) => state.commit_history.len() - index,
288 None => anyhow::bail!("Unknown commit ref: {commit}"),
289 }
290 };
291
292 if pop_count == 0 || pop_count > state.commit_history.len() {
293 anyhow::bail!(
294 "Cannot reset {pop_count} commit(s): only {} in history",
295 state.commit_history.len()
296 );
297 }
298
299 let target_index = state.commit_history.len() - pop_count;
300 let snapshot = state.commit_history[target_index].clone();
301 state.commit_history.truncate(target_index);
302
303 match mode {
304 ResetMode::Soft => {
305 state.head_contents = snapshot.head_contents;
306 }
307 ResetMode::Mixed => {
308 state.head_contents = snapshot.head_contents;
309 state.index_contents = state.head_contents.clone();
310 }
311 }
312
313 state.refs.insert("HEAD".into(), snapshot.sha);
314 Ok(())
315 })
316 }
317
318 fn checkout_files(
319 &self,
320 _commit: String,
321 _paths: Vec<RepoPath>,
322 _env: Arc<HashMap<String, String>>,
323 ) -> BoxFuture<'_, Result<()>> {
324 unimplemented!()
325 }
326
327 fn path(&self) -> PathBuf {
328 self.repository_dir_path.clone()
329 }
330
331 fn main_repository_path(&self) -> PathBuf {
332 self.common_dir_path.clone()
333 }
334
335 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
336 async move { None }.boxed()
337 }
338
339 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
340 let workdir_path = self.dot_git_path.parent().unwrap();
341
342 // Load gitignores
343 let ignores = workdir_path
344 .ancestors()
345 .filter_map(|dir| {
346 let ignore_path = dir.join(".gitignore");
347 let content = self.fs.read_file_sync(ignore_path).ok()?;
348 let content = String::from_utf8(content).ok()?;
349 let mut builder = GitignoreBuilder::new(dir);
350 for line in content.lines() {
351 builder.add_line(Some(dir.into()), line).ok()?;
352 }
353 builder.build().ok()
354 })
355 .collect::<Vec<_>>();
356
357 // Load working copy files.
358 let git_files: HashMap<RepoPath, (String, bool)> = self
359 .fs
360 .files()
361 .iter()
362 .filter_map(|path| {
363 // TODO better simulate git status output in the case of submodules and worktrees
364 let repo_path = path.strip_prefix(workdir_path).ok()?;
365 let mut is_ignored = repo_path.starts_with(".git");
366 for ignore in &ignores {
367 match ignore.matched_path_or_any_parents(path, false) {
368 ignore::Match::None => {}
369 ignore::Match::Ignore(_) => is_ignored = true,
370 ignore::Match::Whitelist(_) => break,
371 }
372 }
373 let content = self
374 .fs
375 .read_file_sync(path)
376 .ok()
377 .map(|content| String::from_utf8(content).unwrap())?;
378 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
379 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
380 })
381 .collect();
382
383 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
384 let mut entries = Vec::new();
385 let paths = state
386 .head_contents
387 .keys()
388 .chain(state.index_contents.keys())
389 .chain(git_files.keys())
390 .collect::<HashSet<_>>();
391 for path in paths {
392 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
393 continue;
394 }
395
396 let head = state.head_contents.get(path);
397 let index = state.index_contents.get(path);
398 let unmerged = state.unmerged_paths.get(path);
399 let fs = git_files.get(path);
400 let status = match (unmerged, head, index, fs) {
401 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
402 (_, Some(head), Some(index), Some((fs, _))) => {
403 FileStatus::Tracked(TrackedStatus {
404 index_status: if head == index {
405 StatusCode::Unmodified
406 } else {
407 StatusCode::Modified
408 },
409 worktree_status: if fs == index {
410 StatusCode::Unmodified
411 } else {
412 StatusCode::Modified
413 },
414 })
415 }
416 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
417 index_status: if head == index {
418 StatusCode::Unmodified
419 } else {
420 StatusCode::Modified
421 },
422 worktree_status: StatusCode::Deleted,
423 }),
424 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
425 index_status: StatusCode::Deleted,
426 worktree_status: StatusCode::Added,
427 }),
428 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
429 index_status: StatusCode::Deleted,
430 worktree_status: StatusCode::Deleted,
431 }),
432 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
433 index_status: StatusCode::Added,
434 worktree_status: if fs == index {
435 StatusCode::Unmodified
436 } else {
437 StatusCode::Modified
438 },
439 }),
440 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
441 index_status: StatusCode::Added,
442 worktree_status: StatusCode::Deleted,
443 }),
444 (_, None, None, Some((_, is_ignored))) => {
445 if *is_ignored {
446 continue;
447 }
448 FileStatus::Untracked
449 }
450 (_, None, None, None) => {
451 unreachable!();
452 }
453 };
454 if status
455 != FileStatus::Tracked(TrackedStatus {
456 index_status: StatusCode::Unmodified,
457 worktree_status: StatusCode::Unmodified,
458 })
459 {
460 entries.push((path.clone(), status));
461 }
462 }
463 entries.sort_by(|a, b| a.0.cmp(&b.0));
464 anyhow::Ok(GitStatus {
465 entries: entries.into(),
466 })
467 });
468 Task::ready(match result {
469 Ok(result) => result,
470 Err(e) => Err(e),
471 })
472 }
473
474 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
475 self.with_state_async(false, |state| Ok(state.stash_entries.clone()))
476 }
477
478 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
479 self.with_state_async(false, move |state| {
480 let current_branch = &state.current_branch_name;
481 let mut branches = state
482 .branches
483 .iter()
484 .map(|branch_name| {
485 let ref_name = if branch_name.starts_with("refs/") {
486 branch_name.into()
487 } else if branch_name.contains('/') {
488 format!("refs/remotes/{branch_name}").into()
489 } else {
490 format!("refs/heads/{branch_name}").into()
491 };
492 Branch {
493 is_head: Some(branch_name) == current_branch.as_ref(),
494 ref_name,
495 most_recent_commit: None,
496 upstream: None,
497 }
498 })
499 .collect::<Vec<_>>();
500 // compute snapshot expects these to be sorted by ref_name
501 // because that's what git itself does
502 branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name));
503 Ok(branches)
504 })
505 }
506
507 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
508 let fs = self.fs.clone();
509 let common_dir_path = self.common_dir_path.clone();
510 let executor = self.executor.clone();
511
512 async move {
513 executor.simulate_random_delay().await;
514
515 let (main_worktree, refs) = fs.with_git_state(&common_dir_path, false, |state| {
516 let work_dir = common_dir_path
517 .parent()
518 .map(PathBuf::from)
519 .unwrap_or_else(|| common_dir_path.clone());
520 let head_sha = state
521 .refs
522 .get("HEAD")
523 .cloned()
524 .unwrap_or_else(|| "0000000".to_string());
525 let branch_ref = state
526 .current_branch_name
527 .as_ref()
528 .map(|name| format!("refs/heads/{name}"))
529 .unwrap_or_else(|| "refs/heads/main".to_string());
530 let main_wt = Worktree {
531 path: work_dir,
532 ref_name: Some(branch_ref.into()),
533 sha: head_sha.into(),
534 is_main: true,
535 is_bare: false,
536 };
537 (main_wt, state.refs.clone())
538 })?;
539
540 let mut all = vec![main_worktree];
541
542 let worktrees_dir = common_dir_path.join("worktrees");
543 if let Ok(mut entries) = fs.read_dir(&worktrees_dir).await {
544 use futures::StreamExt;
545 while let Some(Ok(entry_path)) = entries.next().await {
546 let head_content = match fs.load(&entry_path.join("HEAD")).await {
547 Ok(content) => content,
548 Err(_) => continue,
549 };
550 let gitdir_content = match fs.load(&entry_path.join("gitdir")).await {
551 Ok(content) => content,
552 Err(_) => continue,
553 };
554
555 let ref_name = head_content
556 .strip_prefix("ref: ")
557 .map(|s| s.trim().to_string());
558 let sha = ref_name
559 .as_ref()
560 .and_then(|r| refs.get(r))
561 .cloned()
562 .unwrap_or_else(|| head_content.trim().to_string());
563
564 let worktree_path = PathBuf::from(gitdir_content.trim())
565 .parent()
566 .map(PathBuf::from)
567 .unwrap_or_default();
568
569 all.push(Worktree {
570 path: worktree_path,
571 ref_name: ref_name.map(Into::into),
572 sha: sha.into(),
573 is_main: false,
574 is_bare: false,
575 });
576 }
577 }
578
579 Ok(all)
580 }
581 .boxed()
582 }
583
584 fn create_worktree(
585 &self,
586 target: CreateWorktreeTarget,
587 path: PathBuf,
588 ) -> BoxFuture<'_, Result<()>> {
589 let fs = self.fs.clone();
590 let executor = self.executor.clone();
591 let dot_git_path = self.dot_git_path.clone();
592 let common_dir_path = self.common_dir_path.clone();
593 async move {
594 executor.simulate_random_delay().await;
595
596 let branch_name = target.branch_name().map(ToOwned::to_owned);
597 let create_branch_ref = matches!(target, CreateWorktreeTarget::NewBranch { .. });
598
599 // Check for simulated error and validate branch state before any side effects.
600 fs.with_git_state(&dot_git_path, false, {
601 let branch_name = branch_name.clone();
602 move |state| {
603 if let Some(message) = &state.simulated_create_worktree_error {
604 anyhow::bail!("{message}");
605 }
606
607 match (create_branch_ref, branch_name.as_ref()) {
608 (true, Some(branch_name)) => {
609 if state.branches.contains(branch_name) {
610 bail!("a branch named '{}' already exists", branch_name);
611 }
612 }
613 (false, Some(branch_name)) => {
614 if !state.branches.contains(branch_name) {
615 bail!("no branch named '{}' exists", branch_name);
616 }
617 }
618 (false, None) => {}
619 (true, None) => bail!("branch name is required to create a branch"),
620 }
621
622 Ok(())
623 }
624 })??;
625
626 let (branch_name, sha, create_branch_ref) = match target {
627 CreateWorktreeTarget::ExistingBranch { branch_name } => {
628 let ref_name = format!("refs/heads/{branch_name}");
629 let sha = fs.with_git_state(&dot_git_path, false, {
630 move |state| {
631 Ok::<_, anyhow::Error>(
632 state
633 .refs
634 .get(&ref_name)
635 .cloned()
636 .unwrap_or_else(|| "fake-sha".to_string()),
637 )
638 }
639 })??;
640 (Some(branch_name), sha, false)
641 }
642 CreateWorktreeTarget::NewBranch {
643 branch_name,
644 base_sha: start_point,
645 } => (
646 Some(branch_name),
647 start_point.unwrap_or_else(|| "fake-sha".to_string()),
648 true,
649 ),
650 CreateWorktreeTarget::Detached {
651 base_sha: start_point,
652 } => (
653 None,
654 start_point.unwrap_or_else(|| "fake-sha".to_string()),
655 false,
656 ),
657 };
658
659 // Create the worktree checkout directory.
660 fs.create_dir(&path).await?;
661
662 // Create .git/worktrees/<name>/ directory with HEAD, commondir, gitdir.
663 let worktree_entry_name = branch_name.as_deref().unwrap_or_else(|| {
664 path.file_name()
665 .and_then(|name| name.to_str())
666 .unwrap_or("detached")
667 });
668 let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name);
669 fs.create_dir(&worktrees_entry_dir).await?;
670
671 let head_content = if let Some(ref branch_name) = branch_name {
672 let ref_name = format!("refs/heads/{branch_name}");
673 format!("ref: {ref_name}")
674 } else {
675 sha.clone()
676 };
677 fs.write_file_internal(
678 worktrees_entry_dir.join("HEAD"),
679 head_content.into_bytes(),
680 false,
681 )?;
682 fs.write_file_internal(
683 worktrees_entry_dir.join("commondir"),
684 common_dir_path.to_string_lossy().into_owned().into_bytes(),
685 false,
686 )?;
687 let worktree_dot_git = path.join(".git");
688 fs.write_file_internal(
689 worktrees_entry_dir.join("gitdir"),
690 worktree_dot_git.to_string_lossy().into_owned().into_bytes(),
691 false,
692 )?;
693
694 // Create .git file in the worktree checkout.
695 fs.write_file_internal(
696 &worktree_dot_git,
697 format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(),
698 false,
699 )?;
700
701 // Update git state for newly created branches.
702 if create_branch_ref {
703 fs.with_git_state(&dot_git_path, true, {
704 let branch_name = branch_name.clone();
705 let sha = sha.clone();
706 move |state| {
707 if let Some(branch_name) = branch_name {
708 let ref_name = format!("refs/heads/{branch_name}");
709 state.refs.insert(ref_name, sha);
710 state.branches.insert(branch_name);
711 }
712 Ok::<(), anyhow::Error>(())
713 }
714 })??;
715 }
716
717 Ok(())
718 }
719 .boxed()
720 }
721
722 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
723 let fs = self.fs.clone();
724 let executor = self.executor.clone();
725 let common_dir_path = self.common_dir_path.clone();
726 async move {
727 executor.simulate_random_delay().await;
728
729 // Try to read the worktree's .git file to find its entry
730 // directory. If the working tree is already gone (e.g. the
731 // caller deleted it before asking git to clean up), fall back
732 // to scanning `.git/worktrees/*/gitdir` for a matching path,
733 // mirroring real git's behavior with `--force`.
734 let dot_git_file = path.join(".git");
735 let worktree_entry_dir = if let Ok(content) = fs.load(&dot_git_file).await {
736 let gitdir = content
737 .strip_prefix("gitdir:")
738 .context("invalid .git file in worktree")?
739 .trim();
740 PathBuf::from(gitdir)
741 } else {
742 self.find_worktree_entry_dir_by_path(&path)
743 .await
744 .with_context(|| format!("no worktree found at path: {}", path.display()))?
745 };
746
747 // Remove the worktree checkout directory if it still exists.
748 fs.remove_dir(
749 &path,
750 RemoveOptions {
751 recursive: true,
752 ignore_if_not_exists: true,
753 },
754 )
755 .await?;
756
757 // Remove the .git/worktrees/<name>/ directory.
758 fs.remove_dir(
759 &worktree_entry_dir,
760 RemoveOptions {
761 recursive: true,
762 ignore_if_not_exists: false,
763 },
764 )
765 .await?;
766
767 // Emit a git event on the main .git directory so the scanner
768 // notices the change.
769 fs.with_git_state(&common_dir_path, true, |_| {})?;
770
771 Ok(())
772 }
773 .boxed()
774 }
775
776 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
777 let fs = self.fs.clone();
778 let executor = self.executor.clone();
779 let common_dir_path = self.common_dir_path.clone();
780 async move {
781 executor.simulate_random_delay().await;
782
783 // Read the worktree's .git file to find its entry directory.
784 let dot_git_file = old_path.join(".git");
785 let content = fs
786 .load(&dot_git_file)
787 .await
788 .with_context(|| format!("no worktree found at path: {}", old_path.display()))?;
789 let gitdir = content
790 .strip_prefix("gitdir:")
791 .context("invalid .git file in worktree")?
792 .trim();
793 let worktree_entry_dir = PathBuf::from(gitdir);
794
795 // Move the worktree checkout directory.
796 fs.rename(
797 &old_path,
798 &new_path,
799 RenameOptions {
800 overwrite: false,
801 ignore_if_exists: false,
802 create_parents: true,
803 },
804 )
805 .await?;
806
807 // Update the gitdir file in .git/worktrees/<name>/ to point to the
808 // new location.
809 let new_dot_git = new_path.join(".git");
810 fs.write_file_internal(
811 worktree_entry_dir.join("gitdir"),
812 new_dot_git.to_string_lossy().into_owned().into_bytes(),
813 false,
814 )?;
815
816 // Update the .git file in the moved worktree checkout.
817 fs.write_file_internal(
818 &new_dot_git,
819 format!("gitdir: {}", worktree_entry_dir.display()).into_bytes(),
820 false,
821 )?;
822
823 // Emit a git event on the main .git directory so the scanner
824 // notices the change.
825 fs.with_git_state(&common_dir_path, true, |_| {})?;
826
827 Ok(())
828 }
829 .boxed()
830 }
831
832 fn checkout_branch_in_worktree(
833 &self,
834 _branch_name: String,
835 _worktree_path: PathBuf,
836 _create: bool,
837 ) -> BoxFuture<'_, Result<()>> {
838 async { Ok(()) }.boxed()
839 }
840
841 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
842 self.with_state_async(true, |state| {
843 state.current_branch_name = Some(name);
844 Ok(())
845 })
846 }
847
848 fn create_branch(
849 &self,
850 name: String,
851 _base_branch: Option<String>,
852 ) -> BoxFuture<'_, Result<()>> {
853 self.with_state_async(true, move |state| {
854 if let Some((remote, _)) = name.split_once('/')
855 && !state.remotes.contains_key(remote)
856 {
857 state.remotes.insert(remote.to_owned(), "".to_owned());
858 }
859 state.branches.insert(name);
860 Ok(())
861 })
862 }
863
864 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
865 self.with_state_async(true, move |state| {
866 if !state.branches.remove(&branch) {
867 bail!("no such branch: {branch}");
868 }
869 state.branches.insert(new_name.clone());
870 if state.current_branch_name == Some(branch) {
871 state.current_branch_name = Some(new_name);
872 }
873 Ok(())
874 })
875 }
876
877 fn delete_branch(&self, _is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> {
878 self.with_state_async(true, move |state| {
879 if !state.branches.remove(&name) {
880 bail!("no such branch: {name}");
881 }
882 Ok(())
883 })
884 }
885
886 fn blame(
887 &self,
888 path: RepoPath,
889 _content: Rope,
890 _line_ending: LineEnding,
891 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
892 self.with_state_async(false, move |state| {
893 state
894 .blames
895 .get(&path)
896 .with_context(|| format!("failed to get blame for {:?}", path))
897 .cloned()
898 })
899 }
900
901 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
902 self.file_history_paginated(path, 0, None)
903 }
904
905 fn file_history_paginated(
906 &self,
907 path: RepoPath,
908 _skip: usize,
909 _limit: Option<usize>,
910 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
911 async move {
912 Ok(git::repository::FileHistory {
913 entries: Vec::new(),
914 path,
915 })
916 }
917 .boxed()
918 }
919
920 fn stage_paths(
921 &self,
922 paths: Vec<RepoPath>,
923 _env: Arc<HashMap<String, String>>,
924 ) -> BoxFuture<'_, Result<()>> {
925 Box::pin(async move {
926 let contents = paths
927 .into_iter()
928 .map(|path| {
929 let abs_path = self
930 .dot_git_path
931 .parent()
932 .unwrap()
933 .join(&path.as_std_path());
934 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
935 })
936 .collect::<Vec<_>>();
937 let contents = join_all(contents).await;
938 self.with_state_async(true, move |state| {
939 for (path, content) in contents {
940 if let Some(content) = content {
941 state.index_contents.insert(path, content);
942 } else {
943 state.index_contents.remove(&path);
944 }
945 }
946 Ok(())
947 })
948 .await
949 })
950 }
951
952 fn unstage_paths(
953 &self,
954 paths: Vec<RepoPath>,
955 _env: Arc<HashMap<String, String>>,
956 ) -> BoxFuture<'_, Result<()>> {
957 self.with_state_async(true, move |state| {
958 for path in paths {
959 match state.head_contents.get(&path) {
960 Some(content) => state.index_contents.insert(path, content.clone()),
961 None => state.index_contents.remove(&path),
962 };
963 }
964 Ok(())
965 })
966 }
967
968 fn stash_paths(
969 &self,
970 _paths: Vec<RepoPath>,
971 _env: Arc<HashMap<String, String>>,
972 ) -> BoxFuture<'_, Result<()>> {
973 unimplemented!()
974 }
975
976 fn stash_pop(
977 &self,
978 _index: Option<usize>,
979 _env: Arc<HashMap<String, String>>,
980 ) -> BoxFuture<'_, Result<()>> {
981 unimplemented!()
982 }
983
984 fn stash_apply(
985 &self,
986 _index: Option<usize>,
987 _env: Arc<HashMap<String, String>>,
988 ) -> BoxFuture<'_, Result<()>> {
989 unimplemented!()
990 }
991
992 fn stash_drop(
993 &self,
994 _index: Option<usize>,
995 _env: Arc<HashMap<String, String>>,
996 ) -> BoxFuture<'_, Result<()>> {
997 unimplemented!()
998 }
999
1000 fn commit(
1001 &self,
1002 _message: gpui::SharedString,
1003 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
1004 options: CommitOptions,
1005 _askpass: AskPassDelegate,
1006 _env: Arc<HashMap<String, String>>,
1007 ) -> BoxFuture<'_, Result<()>> {
1008 self.with_state_async(true, move |state| {
1009 if !options.allow_empty && !options.amend && state.index_contents == state.head_contents
1010 {
1011 anyhow::bail!("nothing to commit (use allow_empty to create an empty commit)");
1012 }
1013
1014 let old_sha = state.refs.get("HEAD").cloned().unwrap_or_default();
1015 state.commit_history.push(FakeCommitSnapshot {
1016 head_contents: state.head_contents.clone(),
1017 index_contents: state.index_contents.clone(),
1018 sha: old_sha,
1019 });
1020
1021 state.head_contents = state.index_contents.clone();
1022
1023 let new_sha = format!("fake-commit-{}", state.commit_history.len());
1024 state.refs.insert("HEAD".into(), new_sha);
1025
1026 Ok(())
1027 })
1028 }
1029
1030 fn run_hook(
1031 &self,
1032 _hook: RunHook,
1033 _env: Arc<HashMap<String, String>>,
1034 ) -> BoxFuture<'_, Result<()>> {
1035 async { Ok(()) }.boxed()
1036 }
1037
1038 fn push(
1039 &self,
1040 _branch: String,
1041 _remote_branch: String,
1042 _remote: String,
1043 _options: Option<PushOptions>,
1044 _askpass: AskPassDelegate,
1045 _env: Arc<HashMap<String, String>>,
1046 _cx: AsyncApp,
1047 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1048 unimplemented!()
1049 }
1050
1051 fn pull(
1052 &self,
1053 _branch: Option<String>,
1054 _remote: String,
1055 _rebase: bool,
1056 _askpass: AskPassDelegate,
1057 _env: Arc<HashMap<String, String>>,
1058 _cx: AsyncApp,
1059 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1060 unimplemented!()
1061 }
1062
1063 fn fetch(
1064 &self,
1065 _fetch_options: FetchOptions,
1066 _askpass: AskPassDelegate,
1067 _env: Arc<HashMap<String, String>>,
1068 _cx: AsyncApp,
1069 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1070 unimplemented!()
1071 }
1072
1073 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
1074 self.with_state_async(false, move |state| {
1075 let remotes = state
1076 .remotes
1077 .keys()
1078 .map(|r| Remote {
1079 name: r.clone().into(),
1080 })
1081 .collect::<Vec<_>>();
1082 Ok(remotes)
1083 })
1084 }
1085
1086 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1087 unimplemented!()
1088 }
1089
1090 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1091 unimplemented!()
1092 }
1093
1094 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
1095 future::ready(Ok(Vec::new())).boxed()
1096 }
1097
1098 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
1099 future::ready(Ok(String::new())).boxed()
1100 }
1101
1102 fn diff_stat(
1103 &self,
1104 path_prefixes: &[RepoPath],
1105 ) -> BoxFuture<'_, Result<git::status::GitDiffStat>> {
1106 fn count_lines(s: &str) -> u32 {
1107 if s.is_empty() {
1108 0
1109 } else {
1110 s.lines().count() as u32
1111 }
1112 }
1113
1114 fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool {
1115 if prefixes.is_empty() {
1116 return true;
1117 }
1118 prefixes.iter().any(|prefix| {
1119 let prefix_str = prefix.as_unix_str();
1120 if prefix_str == "." {
1121 return true;
1122 }
1123 path == prefix || path.starts_with(&prefix)
1124 })
1125 }
1126
1127 let path_prefixes = path_prefixes.to_vec();
1128
1129 let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
1130 let worktree_files: HashMap<RepoPath, String> = self
1131 .fs
1132 .files()
1133 .iter()
1134 .filter_map(|path| {
1135 let repo_path = path.strip_prefix(&workdir_path).ok()?;
1136 if repo_path.starts_with(".git") {
1137 return None;
1138 }
1139 let content = self
1140 .fs
1141 .read_file_sync(path)
1142 .ok()
1143 .and_then(|bytes| String::from_utf8(bytes).ok())?;
1144 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
1145 Some((RepoPath::from_rel_path(&repo_path), content))
1146 })
1147 .collect();
1148
1149 self.with_state_async(false, move |state| {
1150 let mut entries = Vec::new();
1151 let all_paths: HashSet<&RepoPath> = state
1152 .head_contents
1153 .keys()
1154 .chain(
1155 worktree_files
1156 .keys()
1157 .filter(|p| state.index_contents.contains_key(*p)),
1158 )
1159 .collect();
1160 for path in all_paths {
1161 if !matches_prefixes(path, &path_prefixes) {
1162 continue;
1163 }
1164 let head = state.head_contents.get(path);
1165 let worktree = worktree_files.get(path);
1166 match (head, worktree) {
1167 (Some(old), Some(new)) if old != new => {
1168 entries.push((
1169 path.clone(),
1170 git::status::DiffStat {
1171 added: count_lines(new),
1172 deleted: count_lines(old),
1173 },
1174 ));
1175 }
1176 (Some(old), None) => {
1177 entries.push((
1178 path.clone(),
1179 git::status::DiffStat {
1180 added: 0,
1181 deleted: count_lines(old),
1182 },
1183 ));
1184 }
1185 (None, Some(new)) => {
1186 entries.push((
1187 path.clone(),
1188 git::status::DiffStat {
1189 added: count_lines(new),
1190 deleted: 0,
1191 },
1192 ));
1193 }
1194 _ => {}
1195 }
1196 }
1197 entries.sort_by(|(a, _), (b, _)| a.cmp(b));
1198 Ok(git::status::GitDiffStat {
1199 entries: entries.into(),
1200 })
1201 })
1202 .boxed()
1203 }
1204
1205 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
1206 let executor = self.executor.clone();
1207 let fs = self.fs.clone();
1208 let checkpoints = self.checkpoints.clone();
1209 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1210 async move {
1211 executor.simulate_random_delay().await;
1212 let oid = git::Oid::random(&mut *executor.rng().lock());
1213 let entry = fs.entry(&repository_dir_path)?;
1214 checkpoints.lock().insert(oid, entry);
1215 Ok(GitRepositoryCheckpoint { commit_sha: oid })
1216 }
1217 .boxed()
1218 }
1219
1220 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
1221 let executor = self.executor.clone();
1222 let fs = self.fs.clone();
1223 let checkpoints = self.checkpoints.clone();
1224 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1225 async move {
1226 executor.simulate_random_delay().await;
1227 let checkpoints = checkpoints.lock();
1228 let entry = checkpoints
1229 .get(&checkpoint.commit_sha)
1230 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
1231 fs.insert_entry(&repository_dir_path, entry.clone())?;
1232 Ok(())
1233 }
1234 .boxed()
1235 }
1236
1237 fn create_archive_checkpoint(&self) -> BoxFuture<'_, Result<(String, String)>> {
1238 let executor = self.executor.clone();
1239 let fs = self.fs.clone();
1240 let checkpoints = self.checkpoints.clone();
1241 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1242 async move {
1243 executor.simulate_random_delay().await;
1244 let staged_oid = git::Oid::random(&mut *executor.rng().lock());
1245 let unstaged_oid = git::Oid::random(&mut *executor.rng().lock());
1246 let entry = fs.entry(&repository_dir_path)?;
1247 checkpoints.lock().insert(staged_oid, entry.clone());
1248 checkpoints.lock().insert(unstaged_oid, entry);
1249 Ok((staged_oid.to_string(), unstaged_oid.to_string()))
1250 }
1251 .boxed()
1252 }
1253
1254 fn restore_archive_checkpoint(
1255 &self,
1256 // The fake filesystem doesn't model a separate index, so only the
1257 // unstaged (full working directory) snapshot is restored.
1258 _staged_sha: String,
1259 unstaged_sha: String,
1260 ) -> BoxFuture<'_, Result<()>> {
1261 match unstaged_sha.parse() {
1262 Ok(commit_sha) => self.restore_checkpoint(GitRepositoryCheckpoint { commit_sha }),
1263 Err(error) => async move {
1264 Err(anyhow::anyhow!(error).context("failed to parse unstaged SHA as Oid"))
1265 }
1266 .boxed(),
1267 }
1268 }
1269
1270 fn compare_checkpoints(
1271 &self,
1272 left: GitRepositoryCheckpoint,
1273 right: GitRepositoryCheckpoint,
1274 ) -> BoxFuture<'_, Result<bool>> {
1275 let executor = self.executor.clone();
1276 let checkpoints = self.checkpoints.clone();
1277 async move {
1278 executor.simulate_random_delay().await;
1279 let checkpoints = checkpoints.lock();
1280 let left = checkpoints
1281 .get(&left.commit_sha)
1282 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
1283 let right = checkpoints
1284 .get(&right.commit_sha)
1285 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
1286
1287 Ok(left == right)
1288 }
1289 .boxed()
1290 }
1291
1292 fn diff_checkpoints(
1293 &self,
1294 base_checkpoint: GitRepositoryCheckpoint,
1295 target_checkpoint: GitRepositoryCheckpoint,
1296 ) -> BoxFuture<'_, Result<String>> {
1297 let executor = self.executor.clone();
1298 let checkpoints = self.checkpoints.clone();
1299 async move {
1300 executor.simulate_random_delay().await;
1301 let checkpoints = checkpoints.lock();
1302 let base = checkpoints
1303 .get(&base_checkpoint.commit_sha)
1304 .context(format!(
1305 "invalid base checkpoint: {}",
1306 base_checkpoint.commit_sha
1307 ))?;
1308 let target = checkpoints
1309 .get(&target_checkpoint.commit_sha)
1310 .context(format!(
1311 "invalid target checkpoint: {}",
1312 target_checkpoint.commit_sha
1313 ))?;
1314
1315 fn collect_files(
1316 entry: &FakeFsEntry,
1317 prefix: String,
1318 out: &mut std::collections::BTreeMap<String, String>,
1319 ) {
1320 match entry {
1321 FakeFsEntry::File { content, .. } => {
1322 out.insert(prefix, String::from_utf8_lossy(content).into_owned());
1323 }
1324 FakeFsEntry::Dir { entries, .. } => {
1325 for (name, child) in entries {
1326 let path = if prefix.is_empty() {
1327 name.clone()
1328 } else {
1329 format!("{prefix}/{name}")
1330 };
1331 collect_files(child, path, out);
1332 }
1333 }
1334 FakeFsEntry::Symlink { .. } => {}
1335 }
1336 }
1337
1338 let mut base_files = std::collections::BTreeMap::new();
1339 let mut target_files = std::collections::BTreeMap::new();
1340 collect_files(base, String::new(), &mut base_files);
1341 collect_files(target, String::new(), &mut target_files);
1342
1343 let all_paths: std::collections::BTreeSet<&String> =
1344 base_files.keys().chain(target_files.keys()).collect();
1345
1346 let mut diff = String::new();
1347 for path in all_paths {
1348 match (base_files.get(path), target_files.get(path)) {
1349 (Some(base_content), Some(target_content))
1350 if base_content != target_content =>
1351 {
1352 diff.push_str(&format!("diff --git a/{path} b/{path}\n"));
1353 diff.push_str(&format!("--- a/{path}\n"));
1354 diff.push_str(&format!("+++ b/{path}\n"));
1355 for line in base_content.lines() {
1356 diff.push_str(&format!("-{line}\n"));
1357 }
1358 for line in target_content.lines() {
1359 diff.push_str(&format!("+{line}\n"));
1360 }
1361 }
1362 (Some(_), None) => {
1363 diff.push_str(&format!("diff --git a/{path} /dev/null\n"));
1364 diff.push_str("deleted file\n");
1365 }
1366 (None, Some(_)) => {
1367 diff.push_str(&format!("diff --git /dev/null b/{path}\n"));
1368 diff.push_str("new file\n");
1369 }
1370 _ => {}
1371 }
1372 }
1373 Ok(diff)
1374 }
1375 .boxed()
1376 }
1377
1378 fn default_branch(
1379 &self,
1380 include_remote_name: bool,
1381 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
1382 async move {
1383 Ok(Some(if include_remote_name {
1384 "origin/main".into()
1385 } else {
1386 "main".into()
1387 }))
1388 }
1389 .boxed()
1390 }
1391
1392 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
1393 self.with_state_async(true, move |state| {
1394 state.remotes.insert(name, url);
1395 Ok(())
1396 })
1397 }
1398
1399 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
1400 self.with_state_async(true, move |state| {
1401 state.branches.retain(|branch| {
1402 branch
1403 .split_once('/')
1404 .is_none_or(|(remote, _)| remote != name)
1405 });
1406 state.remotes.remove(&name);
1407 Ok(())
1408 })
1409 }
1410
1411 fn initial_graph_data(
1412 &self,
1413 _log_source: LogSource,
1414 _log_order: LogOrder,
1415 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
1416 ) -> BoxFuture<'_, Result<()>> {
1417 let fs = self.fs.clone();
1418 let dot_git_path = self.dot_git_path.clone();
1419 async move {
1420 let (graph_commits, simulated_error) =
1421 fs.with_git_state(&dot_git_path, false, |state| {
1422 (
1423 state.graph_commits.clone(),
1424 state.simulated_graph_error.clone(),
1425 )
1426 })?;
1427
1428 if let Some(error) = simulated_error {
1429 anyhow::bail!("{}", error);
1430 }
1431
1432 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
1433 request_tx.send(chunk.to_vec()).await.ok();
1434 }
1435 Ok(())
1436 }
1437 .boxed()
1438 }
1439
1440 fn search_commits(
1441 &self,
1442 _log_source: LogSource,
1443 _search_args: SearchCommitArgs,
1444 _request_tx: Sender<Oid>,
1445 ) -> BoxFuture<'_, Result<()>> {
1446 async { bail!("search_commits not supported for FakeGitRepository") }.boxed()
1447 }
1448
1449 fn commit_data_reader(&self) -> Result<CommitDataReader> {
1450 anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
1451 }
1452
1453 fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> {
1454 self.edit_ref(RefEdit::Update { ref_name, commit })
1455 }
1456
1457 fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> {
1458 self.edit_ref(RefEdit::Delete { ref_name })
1459 }
1460
1461 fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> {
1462 async { Ok(()) }.boxed()
1463 }
1464
1465 fn set_trusted(&self, trusted: bool) {
1466 self.is_trusted
1467 .store(trusted, std::sync::atomic::Ordering::Release);
1468 }
1469
1470 fn is_trusted(&self) -> bool {
1471 self.is_trusted.load(std::sync::atomic::Ordering::Acquire)
1472 }
1473}