1use std::path::Path;
2
3use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
4use anyhow::{Context as _, Result, bail};
5use collections::{HashMap, HashSet};
6use futures::future::{self, BoxFuture, join_all};
7use git::{
8 Oid, RunHook,
9 blame::Blame,
10 repository::{
11 AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions,
12 CreateWorktreeTarget, FetchOptions, GRAPH_CHUNK_SIZE, GitRepository,
13 GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote,
14 RepoPath, ResetMode, SearchCommitArgs, Worktree,
15 },
16 stash::GitStash,
17 status::{
18 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
19 UnmergedStatus,
20 },
21};
22use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
23use ignore::gitignore::GitignoreBuilder;
24use parking_lot::Mutex;
25use rope::Rope;
26use smol::{channel::Sender, future::FutureExt as _};
27use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
28use text::LineEnding;
29use util::{paths::PathStyle, rel_path::RelPath};
30
31#[derive(Clone)]
32pub struct FakeGitRepository {
33 pub(crate) fs: Arc<FakeFs>,
34 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
35 pub(crate) executor: BackgroundExecutor,
36 pub(crate) dot_git_path: PathBuf,
37 pub(crate) repository_dir_path: PathBuf,
38 pub(crate) common_dir_path: PathBuf,
39 pub(crate) is_trusted: Arc<AtomicBool>,
40}
41
42#[derive(Debug, Clone)]
43pub struct FakeCommitSnapshot {
44 pub head_contents: HashMap<RepoPath, String>,
45 pub index_contents: HashMap<RepoPath, String>,
46 pub sha: String,
47}
48
49#[derive(Debug, Clone)]
50pub struct FakeGitRepositoryState {
51 pub commit_history: Vec<FakeCommitSnapshot>,
52 pub event_emitter: smol::channel::Sender<PathBuf>,
53 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
54 pub head_contents: HashMap<RepoPath, String>,
55 pub index_contents: HashMap<RepoPath, String>,
56 // everything in commit contents is in oids
57 pub merge_base_contents: HashMap<RepoPath, Oid>,
58 pub oids: HashMap<Oid, String>,
59 pub blames: HashMap<RepoPath, Blame>,
60 pub current_branch_name: Option<String>,
61 pub branches: HashSet<String>,
62 /// List of remotes, keys are names and values are URLs
63 pub remotes: HashMap<String, String>,
64 pub simulated_index_write_error_message: Option<String>,
65 pub simulated_create_worktree_error: Option<String>,
66 pub simulated_graph_error: Option<String>,
67 pub refs: HashMap<String, String>,
68 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
69 pub stash_entries: GitStash,
70}
71
72impl FakeGitRepositoryState {
73 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
74 FakeGitRepositoryState {
75 event_emitter,
76 head_contents: Default::default(),
77 index_contents: Default::default(),
78 unmerged_paths: Default::default(),
79 blames: Default::default(),
80 current_branch_name: Default::default(),
81 branches: Default::default(),
82 simulated_index_write_error_message: Default::default(),
83 simulated_create_worktree_error: Default::default(),
84 simulated_graph_error: None,
85 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
86 merge_base_contents: Default::default(),
87 oids: Default::default(),
88 remotes: HashMap::default(),
89 graph_commits: Vec::new(),
90 commit_history: Vec::new(),
91 stash_entries: Default::default(),
92 }
93 }
94}
95
96impl FakeGitRepository {
97 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
98 where
99 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
100 T: Send,
101 {
102 let fs = self.fs.clone();
103 let executor = self.executor.clone();
104 let dot_git_path = self.dot_git_path.clone();
105 async move {
106 executor.simulate_random_delay().await;
107 fs.with_git_state(&dot_git_path, write, f)?
108 }
109 .boxed()
110 }
111
112 /// Scans `.git/worktrees/*/gitdir` to find the admin entry directory for a
113 /// worktree at the given checkout path. Used when the working tree directory
114 /// has already been deleted and we can't read its `.git` pointer file.
115 async fn find_worktree_entry_dir_by_path(&self, path: &Path) -> Option<PathBuf> {
116 use futures::StreamExt;
117
118 let worktrees_dir = self.common_dir_path.join("worktrees");
119 let mut entries = self.fs.read_dir(&worktrees_dir).await.ok()?;
120 while let Some(Ok(entry_path)) = entries.next().await {
121 if let Ok(gitdir_content) = self.fs.load(&entry_path.join("gitdir")).await {
122 let worktree_path = PathBuf::from(gitdir_content.trim())
123 .parent()
124 .map(PathBuf::from)
125 .unwrap_or_default();
126 if worktree_path == path {
127 return Some(entry_path);
128 }
129 }
130 }
131 None
132 }
133}
134
135impl GitRepository for FakeGitRepository {
136 fn reload_index(&self) {}
137
138 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
139 let fut = self.with_state_async(false, move |state| {
140 state
141 .index_contents
142 .get(&path)
143 .context("not present in index")
144 .cloned()
145 });
146 self.executor.spawn(async move { fut.await.ok() }).boxed()
147 }
148
149 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
150 let fut = self.with_state_async(false, move |state| {
151 state
152 .head_contents
153 .get(&path)
154 .context("not present in HEAD")
155 .cloned()
156 });
157 self.executor.spawn(async move { fut.await.ok() }).boxed()
158 }
159
160 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
161 self.with_state_async(false, move |state| {
162 state.oids.get(&oid).cloned().context("oid does not exist")
163 })
164 .boxed()
165 }
166
167 fn load_commit(
168 &self,
169 _commit: String,
170 _cx: AsyncApp,
171 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
172 unimplemented!()
173 }
174
175 fn set_index_text(
176 &self,
177 path: RepoPath,
178 content: Option<String>,
179 _env: Arc<HashMap<String, String>>,
180 _is_executable: bool,
181 ) -> BoxFuture<'_, anyhow::Result<()>> {
182 self.with_state_async(true, move |state| {
183 if let Some(message) = &state.simulated_index_write_error_message {
184 anyhow::bail!("{message}");
185 } else if let Some(content) = content {
186 state.index_contents.insert(path, content);
187 } else {
188 state.index_contents.remove(&path);
189 }
190 Ok(())
191 })
192 }
193
194 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
195 let name = name.to_string();
196 let fut = self.with_state_async(false, move |state| {
197 state
198 .remotes
199 .get(&name)
200 .context("remote not found")
201 .cloned()
202 });
203 async move { fut.await.ok() }.boxed()
204 }
205
206 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
207 let mut entries = HashMap::default();
208 self.with_state_async(false, |state| {
209 for (path, content) in &state.head_contents {
210 let status = if let Some((oid, original)) = state
211 .merge_base_contents
212 .get(path)
213 .map(|oid| (oid, &state.oids[oid]))
214 {
215 if original == content {
216 continue;
217 }
218 TreeDiffStatus::Modified { old: *oid }
219 } else {
220 TreeDiffStatus::Added
221 };
222 entries.insert(path.clone(), status);
223 }
224 for (path, oid) in &state.merge_base_contents {
225 if !entries.contains_key(path) {
226 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
227 }
228 }
229 Ok(TreeDiff { entries })
230 })
231 .boxed()
232 }
233
234 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
235 self.with_state_async(false, |state| {
236 Ok(revs
237 .into_iter()
238 .map(|rev| state.refs.get(&rev).cloned())
239 .collect())
240 })
241 }
242
243 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
244 async {
245 Ok(CommitDetails {
246 sha: commit.into(),
247 message: "initial commit".into(),
248 ..Default::default()
249 })
250 }
251 .boxed()
252 }
253
254 fn reset(
255 &self,
256 commit: String,
257 mode: ResetMode,
258 _env: Arc<HashMap<String, String>>,
259 ) -> BoxFuture<'_, Result<()>> {
260 self.with_state_async(true, move |state| {
261 let pop_count = if commit == "HEAD~" || commit == "HEAD^" {
262 1
263 } else if let Some(suffix) = commit.strip_prefix("HEAD~") {
264 suffix
265 .parse::<usize>()
266 .with_context(|| format!("Invalid HEAD~ offset: {commit}"))?
267 } else {
268 match state
269 .commit_history
270 .iter()
271 .rposition(|entry| entry.sha == commit)
272 {
273 Some(index) => state.commit_history.len() - index,
274 None => anyhow::bail!("Unknown commit ref: {commit}"),
275 }
276 };
277
278 if pop_count == 0 || pop_count > state.commit_history.len() {
279 anyhow::bail!(
280 "Cannot reset {pop_count} commit(s): only {} in history",
281 state.commit_history.len()
282 );
283 }
284
285 let target_index = state.commit_history.len() - pop_count;
286 let snapshot = state.commit_history[target_index].clone();
287 state.commit_history.truncate(target_index);
288
289 match mode {
290 ResetMode::Soft => {
291 state.head_contents = snapshot.head_contents;
292 }
293 ResetMode::Mixed => {
294 state.head_contents = snapshot.head_contents;
295 state.index_contents = state.head_contents.clone();
296 }
297 }
298
299 state.refs.insert("HEAD".into(), snapshot.sha);
300 Ok(())
301 })
302 }
303
304 fn checkout_files(
305 &self,
306 _commit: String,
307 _paths: Vec<RepoPath>,
308 _env: Arc<HashMap<String, String>>,
309 ) -> BoxFuture<'_, Result<()>> {
310 unimplemented!()
311 }
312
313 fn path(&self) -> PathBuf {
314 self.repository_dir_path.clone()
315 }
316
317 fn main_repository_path(&self) -> PathBuf {
318 self.common_dir_path.clone()
319 }
320
321 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
322 async move { None }.boxed()
323 }
324
325 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
326 let workdir_path = self.dot_git_path.parent().unwrap();
327
328 // Load gitignores
329 let ignores = workdir_path
330 .ancestors()
331 .filter_map(|dir| {
332 let ignore_path = dir.join(".gitignore");
333 let content = self.fs.read_file_sync(ignore_path).ok()?;
334 let content = String::from_utf8(content).ok()?;
335 let mut builder = GitignoreBuilder::new(dir);
336 for line in content.lines() {
337 builder.add_line(Some(dir.into()), line).ok()?;
338 }
339 builder.build().ok()
340 })
341 .collect::<Vec<_>>();
342
343 // Load working copy files.
344 let git_files: HashMap<RepoPath, (String, bool)> = self
345 .fs
346 .files()
347 .iter()
348 .filter_map(|path| {
349 // TODO better simulate git status output in the case of submodules and worktrees
350 let repo_path = path.strip_prefix(workdir_path).ok()?;
351 let mut is_ignored = repo_path.starts_with(".git");
352 for ignore in &ignores {
353 match ignore.matched_path_or_any_parents(path, false) {
354 ignore::Match::None => {}
355 ignore::Match::Ignore(_) => is_ignored = true,
356 ignore::Match::Whitelist(_) => break,
357 }
358 }
359 let content = self
360 .fs
361 .read_file_sync(path)
362 .ok()
363 .map(|content| String::from_utf8(content).unwrap())?;
364 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
365 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
366 })
367 .collect();
368
369 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
370 let mut entries = Vec::new();
371 let paths = state
372 .head_contents
373 .keys()
374 .chain(state.index_contents.keys())
375 .chain(git_files.keys())
376 .collect::<HashSet<_>>();
377 for path in paths {
378 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
379 continue;
380 }
381
382 let head = state.head_contents.get(path);
383 let index = state.index_contents.get(path);
384 let unmerged = state.unmerged_paths.get(path);
385 let fs = git_files.get(path);
386 let status = match (unmerged, head, index, fs) {
387 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
388 (_, Some(head), Some(index), Some((fs, _))) => {
389 FileStatus::Tracked(TrackedStatus {
390 index_status: if head == index {
391 StatusCode::Unmodified
392 } else {
393 StatusCode::Modified
394 },
395 worktree_status: if fs == index {
396 StatusCode::Unmodified
397 } else {
398 StatusCode::Modified
399 },
400 })
401 }
402 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
403 index_status: if head == index {
404 StatusCode::Unmodified
405 } else {
406 StatusCode::Modified
407 },
408 worktree_status: StatusCode::Deleted,
409 }),
410 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
411 index_status: StatusCode::Deleted,
412 worktree_status: StatusCode::Added,
413 }),
414 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
415 index_status: StatusCode::Deleted,
416 worktree_status: StatusCode::Deleted,
417 }),
418 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
419 index_status: StatusCode::Added,
420 worktree_status: if fs == index {
421 StatusCode::Unmodified
422 } else {
423 StatusCode::Modified
424 },
425 }),
426 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
427 index_status: StatusCode::Added,
428 worktree_status: StatusCode::Deleted,
429 }),
430 (_, None, None, Some((_, is_ignored))) => {
431 if *is_ignored {
432 continue;
433 }
434 FileStatus::Untracked
435 }
436 (_, None, None, None) => {
437 unreachable!();
438 }
439 };
440 if status
441 != FileStatus::Tracked(TrackedStatus {
442 index_status: StatusCode::Unmodified,
443 worktree_status: StatusCode::Unmodified,
444 })
445 {
446 entries.push((path.clone(), status));
447 }
448 }
449 entries.sort_by(|a, b| a.0.cmp(&b.0));
450 anyhow::Ok(GitStatus {
451 entries: entries.into(),
452 })
453 });
454 Task::ready(match result {
455 Ok(result) => result,
456 Err(e) => Err(e),
457 })
458 }
459
460 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
461 self.with_state_async(false, |state| Ok(state.stash_entries.clone()))
462 }
463
464 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
465 self.with_state_async(false, move |state| {
466 let current_branch = &state.current_branch_name;
467 let mut branches = state
468 .branches
469 .iter()
470 .map(|branch_name| {
471 let ref_name = if branch_name.starts_with("refs/") {
472 branch_name.into()
473 } else if branch_name.contains('/') {
474 format!("refs/remotes/{branch_name}").into()
475 } else {
476 format!("refs/heads/{branch_name}").into()
477 };
478 Branch {
479 is_head: Some(branch_name) == current_branch.as_ref(),
480 ref_name,
481 most_recent_commit: None,
482 upstream: None,
483 }
484 })
485 .collect::<Vec<_>>();
486 // compute snapshot expects these to be sorted by ref_name
487 // because that's what git itself does
488 branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name));
489 Ok(branches)
490 })
491 }
492
493 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
494 let fs = self.fs.clone();
495 let common_dir_path = self.common_dir_path.clone();
496 let executor = self.executor.clone();
497
498 async move {
499 executor.simulate_random_delay().await;
500
501 let (main_worktree, refs) = fs.with_git_state(&common_dir_path, false, |state| {
502 let work_dir = common_dir_path
503 .parent()
504 .map(PathBuf::from)
505 .unwrap_or_else(|| common_dir_path.clone());
506 let head_sha = state
507 .refs
508 .get("HEAD")
509 .cloned()
510 .unwrap_or_else(|| "0000000".to_string());
511 let branch_ref = state
512 .current_branch_name
513 .as_ref()
514 .map(|name| format!("refs/heads/{name}"))
515 .unwrap_or_else(|| "refs/heads/main".to_string());
516 let main_wt = Worktree {
517 path: work_dir,
518 ref_name: Some(branch_ref.into()),
519 sha: head_sha.into(),
520 is_main: true,
521 is_bare: false,
522 };
523 (main_wt, state.refs.clone())
524 })?;
525
526 let mut all = vec![main_worktree];
527
528 let worktrees_dir = common_dir_path.join("worktrees");
529 if let Ok(mut entries) = fs.read_dir(&worktrees_dir).await {
530 use futures::StreamExt;
531 while let Some(Ok(entry_path)) = entries.next().await {
532 let head_content = match fs.load(&entry_path.join("HEAD")).await {
533 Ok(content) => content,
534 Err(_) => continue,
535 };
536 let gitdir_content = match fs.load(&entry_path.join("gitdir")).await {
537 Ok(content) => content,
538 Err(_) => continue,
539 };
540
541 let ref_name = head_content
542 .strip_prefix("ref: ")
543 .map(|s| s.trim().to_string());
544 let sha = ref_name
545 .as_ref()
546 .and_then(|r| refs.get(r))
547 .cloned()
548 .unwrap_or_else(|| head_content.trim().to_string());
549
550 let worktree_path = PathBuf::from(gitdir_content.trim())
551 .parent()
552 .map(PathBuf::from)
553 .unwrap_or_default();
554
555 all.push(Worktree {
556 path: worktree_path,
557 ref_name: ref_name.map(Into::into),
558 sha: sha.into(),
559 is_main: false,
560 is_bare: false,
561 });
562 }
563 }
564
565 Ok(all)
566 }
567 .boxed()
568 }
569
570 fn create_worktree(
571 &self,
572 target: CreateWorktreeTarget,
573 path: PathBuf,
574 ) -> BoxFuture<'_, Result<()>> {
575 let fs = self.fs.clone();
576 let executor = self.executor.clone();
577 let dot_git_path = self.dot_git_path.clone();
578 let common_dir_path = self.common_dir_path.clone();
579 async move {
580 executor.simulate_random_delay().await;
581
582 let branch_name = target.branch_name().map(ToOwned::to_owned);
583 let create_branch_ref = matches!(target, CreateWorktreeTarget::NewBranch { .. });
584
585 // Check for simulated error and validate branch state before any side effects.
586 fs.with_git_state(&dot_git_path, false, {
587 let branch_name = branch_name.clone();
588 move |state| {
589 if let Some(message) = &state.simulated_create_worktree_error {
590 anyhow::bail!("{message}");
591 }
592
593 match (create_branch_ref, branch_name.as_ref()) {
594 (true, Some(branch_name)) => {
595 if state.branches.contains(branch_name) {
596 bail!("a branch named '{}' already exists", branch_name);
597 }
598 }
599 (false, Some(branch_name)) => {
600 if !state.branches.contains(branch_name) {
601 bail!("no branch named '{}' exists", branch_name);
602 }
603 }
604 (false, None) => {}
605 (true, None) => bail!("branch name is required to create a branch"),
606 }
607
608 Ok(())
609 }
610 })??;
611
612 let (branch_name, sha, create_branch_ref) = match target {
613 CreateWorktreeTarget::ExistingBranch { branch_name } => {
614 let ref_name = format!("refs/heads/{branch_name}");
615 let sha = fs.with_git_state(&dot_git_path, false, {
616 move |state| {
617 Ok::<_, anyhow::Error>(
618 state
619 .refs
620 .get(&ref_name)
621 .cloned()
622 .unwrap_or_else(|| "fake-sha".to_string()),
623 )
624 }
625 })??;
626 (Some(branch_name), sha, false)
627 }
628 CreateWorktreeTarget::NewBranch {
629 branch_name,
630 base_sha: start_point,
631 } => (
632 Some(branch_name),
633 start_point.unwrap_or_else(|| "fake-sha".to_string()),
634 true,
635 ),
636 CreateWorktreeTarget::Detached {
637 base_sha: start_point,
638 } => (
639 None,
640 start_point.unwrap_or_else(|| "fake-sha".to_string()),
641 false,
642 ),
643 };
644
645 // Create the worktree checkout directory.
646 fs.create_dir(&path).await?;
647
648 // Create .git/worktrees/<name>/ directory with HEAD, commondir, gitdir.
649 let worktree_entry_name = branch_name.as_deref().unwrap_or_else(|| {
650 path.file_name()
651 .and_then(|name| name.to_str())
652 .unwrap_or("detached")
653 });
654 let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name);
655 fs.create_dir(&worktrees_entry_dir).await?;
656
657 let head_content = if let Some(ref branch_name) = branch_name {
658 let ref_name = format!("refs/heads/{branch_name}");
659 format!("ref: {ref_name}")
660 } else {
661 sha.clone()
662 };
663 fs.write_file_internal(
664 worktrees_entry_dir.join("HEAD"),
665 head_content.into_bytes(),
666 false,
667 )?;
668 fs.write_file_internal(
669 worktrees_entry_dir.join("commondir"),
670 common_dir_path.to_string_lossy().into_owned().into_bytes(),
671 false,
672 )?;
673 let worktree_dot_git = path.join(".git");
674 fs.write_file_internal(
675 worktrees_entry_dir.join("gitdir"),
676 worktree_dot_git.to_string_lossy().into_owned().into_bytes(),
677 false,
678 )?;
679
680 // Create .git file in the worktree checkout.
681 fs.write_file_internal(
682 &worktree_dot_git,
683 format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(),
684 false,
685 )?;
686
687 // Update git state for newly created branches.
688 if create_branch_ref {
689 fs.with_git_state(&dot_git_path, true, {
690 let branch_name = branch_name.clone();
691 let sha = sha.clone();
692 move |state| {
693 if let Some(branch_name) = branch_name {
694 let ref_name = format!("refs/heads/{branch_name}");
695 state.refs.insert(ref_name, sha);
696 state.branches.insert(branch_name);
697 }
698 Ok::<(), anyhow::Error>(())
699 }
700 })??;
701 }
702
703 Ok(())
704 }
705 .boxed()
706 }
707
708 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
709 let fs = self.fs.clone();
710 let executor = self.executor.clone();
711 let common_dir_path = self.common_dir_path.clone();
712 async move {
713 executor.simulate_random_delay().await;
714
715 // Try to read the worktree's .git file to find its entry
716 // directory. If the working tree is already gone (e.g. the
717 // caller deleted it before asking git to clean up), fall back
718 // to scanning `.git/worktrees/*/gitdir` for a matching path,
719 // mirroring real git's behavior with `--force`.
720 let dot_git_file = path.join(".git");
721 let worktree_entry_dir = if let Ok(content) = fs.load(&dot_git_file).await {
722 let gitdir = content
723 .strip_prefix("gitdir:")
724 .context("invalid .git file in worktree")?
725 .trim();
726 PathBuf::from(gitdir)
727 } else {
728 self.find_worktree_entry_dir_by_path(&path)
729 .await
730 .with_context(|| format!("no worktree found at path: {}", path.display()))?
731 };
732
733 // Remove the worktree checkout directory if it still exists.
734 fs.remove_dir(
735 &path,
736 RemoveOptions {
737 recursive: true,
738 ignore_if_not_exists: true,
739 },
740 )
741 .await?;
742
743 // Remove the .git/worktrees/<name>/ directory.
744 fs.remove_dir(
745 &worktree_entry_dir,
746 RemoveOptions {
747 recursive: true,
748 ignore_if_not_exists: false,
749 },
750 )
751 .await?;
752
753 // Emit a git event on the main .git directory so the scanner
754 // notices the change.
755 fs.with_git_state(&common_dir_path, true, |_| {})?;
756
757 Ok(())
758 }
759 .boxed()
760 }
761
762 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
763 let fs = self.fs.clone();
764 let executor = self.executor.clone();
765 let common_dir_path = self.common_dir_path.clone();
766 async move {
767 executor.simulate_random_delay().await;
768
769 // Read the worktree's .git file to find its entry directory.
770 let dot_git_file = old_path.join(".git");
771 let content = fs
772 .load(&dot_git_file)
773 .await
774 .with_context(|| format!("no worktree found at path: {}", old_path.display()))?;
775 let gitdir = content
776 .strip_prefix("gitdir:")
777 .context("invalid .git file in worktree")?
778 .trim();
779 let worktree_entry_dir = PathBuf::from(gitdir);
780
781 // Move the worktree checkout directory.
782 fs.rename(
783 &old_path,
784 &new_path,
785 RenameOptions {
786 overwrite: false,
787 ignore_if_exists: false,
788 create_parents: true,
789 },
790 )
791 .await?;
792
793 // Update the gitdir file in .git/worktrees/<name>/ to point to the
794 // new location.
795 let new_dot_git = new_path.join(".git");
796 fs.write_file_internal(
797 worktree_entry_dir.join("gitdir"),
798 new_dot_git.to_string_lossy().into_owned().into_bytes(),
799 false,
800 )?;
801
802 // Update the .git file in the moved worktree checkout.
803 fs.write_file_internal(
804 &new_dot_git,
805 format!("gitdir: {}", worktree_entry_dir.display()).into_bytes(),
806 false,
807 )?;
808
809 // Emit a git event on the main .git directory so the scanner
810 // notices the change.
811 fs.with_git_state(&common_dir_path, true, |_| {})?;
812
813 Ok(())
814 }
815 .boxed()
816 }
817
818 fn checkout_branch_in_worktree(
819 &self,
820 _branch_name: String,
821 _worktree_path: PathBuf,
822 _create: bool,
823 ) -> BoxFuture<'_, Result<()>> {
824 async { Ok(()) }.boxed()
825 }
826
827 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
828 self.with_state_async(true, |state| {
829 state.current_branch_name = Some(name);
830 Ok(())
831 })
832 }
833
834 fn create_branch(
835 &self,
836 name: String,
837 _base_branch: Option<String>,
838 ) -> BoxFuture<'_, Result<()>> {
839 self.with_state_async(true, move |state| {
840 if let Some((remote, _)) = name.split_once('/')
841 && !state.remotes.contains_key(remote)
842 {
843 state.remotes.insert(remote.to_owned(), "".to_owned());
844 }
845 state.branches.insert(name);
846 Ok(())
847 })
848 }
849
850 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
851 self.with_state_async(true, move |state| {
852 if !state.branches.remove(&branch) {
853 bail!("no such branch: {branch}");
854 }
855 state.branches.insert(new_name.clone());
856 if state.current_branch_name == Some(branch) {
857 state.current_branch_name = Some(new_name);
858 }
859 Ok(())
860 })
861 }
862
863 fn delete_branch(&self, _is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> {
864 self.with_state_async(true, move |state| {
865 if !state.branches.remove(&name) {
866 bail!("no such branch: {name}");
867 }
868 Ok(())
869 })
870 }
871
872 fn blame(
873 &self,
874 path: RepoPath,
875 _content: Rope,
876 _line_ending: LineEnding,
877 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
878 self.with_state_async(false, move |state| {
879 state
880 .blames
881 .get(&path)
882 .with_context(|| format!("failed to get blame for {:?}", path))
883 .cloned()
884 })
885 }
886
887 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
888 self.file_history_paginated(path, 0, None)
889 }
890
891 fn file_history_paginated(
892 &self,
893 path: RepoPath,
894 _skip: usize,
895 _limit: Option<usize>,
896 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
897 async move {
898 Ok(git::repository::FileHistory {
899 entries: Vec::new(),
900 path,
901 })
902 }
903 .boxed()
904 }
905
906 fn stage_paths(
907 &self,
908 paths: Vec<RepoPath>,
909 _env: Arc<HashMap<String, String>>,
910 ) -> BoxFuture<'_, Result<()>> {
911 Box::pin(async move {
912 let contents = paths
913 .into_iter()
914 .map(|path| {
915 let abs_path = self
916 .dot_git_path
917 .parent()
918 .unwrap()
919 .join(&path.as_std_path());
920 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
921 })
922 .collect::<Vec<_>>();
923 let contents = join_all(contents).await;
924 self.with_state_async(true, move |state| {
925 for (path, content) in contents {
926 if let Some(content) = content {
927 state.index_contents.insert(path, content);
928 } else {
929 state.index_contents.remove(&path);
930 }
931 }
932 Ok(())
933 })
934 .await
935 })
936 }
937
938 fn unstage_paths(
939 &self,
940 paths: Vec<RepoPath>,
941 _env: Arc<HashMap<String, String>>,
942 ) -> BoxFuture<'_, Result<()>> {
943 self.with_state_async(true, move |state| {
944 for path in paths {
945 match state.head_contents.get(&path) {
946 Some(content) => state.index_contents.insert(path, content.clone()),
947 None => state.index_contents.remove(&path),
948 };
949 }
950 Ok(())
951 })
952 }
953
954 fn stash_paths(
955 &self,
956 _paths: Vec<RepoPath>,
957 _env: Arc<HashMap<String, String>>,
958 ) -> BoxFuture<'_, Result<()>> {
959 unimplemented!()
960 }
961
962 fn stash_pop(
963 &self,
964 _index: Option<usize>,
965 _env: Arc<HashMap<String, String>>,
966 ) -> BoxFuture<'_, Result<()>> {
967 unimplemented!()
968 }
969
970 fn stash_apply(
971 &self,
972 _index: Option<usize>,
973 _env: Arc<HashMap<String, String>>,
974 ) -> BoxFuture<'_, Result<()>> {
975 unimplemented!()
976 }
977
978 fn stash_drop(
979 &self,
980 _index: Option<usize>,
981 _env: Arc<HashMap<String, String>>,
982 ) -> BoxFuture<'_, Result<()>> {
983 unimplemented!()
984 }
985
986 fn commit(
987 &self,
988 _message: gpui::SharedString,
989 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
990 options: CommitOptions,
991 _askpass: AskPassDelegate,
992 _env: Arc<HashMap<String, String>>,
993 ) -> BoxFuture<'_, Result<()>> {
994 self.with_state_async(true, move |state| {
995 if !options.allow_empty && !options.amend && state.index_contents == state.head_contents
996 {
997 anyhow::bail!("nothing to commit (use allow_empty to create an empty commit)");
998 }
999
1000 let old_sha = state.refs.get("HEAD").cloned().unwrap_or_default();
1001 state.commit_history.push(FakeCommitSnapshot {
1002 head_contents: state.head_contents.clone(),
1003 index_contents: state.index_contents.clone(),
1004 sha: old_sha,
1005 });
1006
1007 state.head_contents = state.index_contents.clone();
1008
1009 let new_sha = format!("fake-commit-{}", state.commit_history.len());
1010 state.refs.insert("HEAD".into(), new_sha);
1011
1012 Ok(())
1013 })
1014 }
1015
1016 fn run_hook(
1017 &self,
1018 _hook: RunHook,
1019 _env: Arc<HashMap<String, String>>,
1020 ) -> BoxFuture<'_, Result<()>> {
1021 async { Ok(()) }.boxed()
1022 }
1023
1024 fn push(
1025 &self,
1026 _branch: String,
1027 _remote_branch: String,
1028 _remote: String,
1029 _options: Option<PushOptions>,
1030 _askpass: AskPassDelegate,
1031 _env: Arc<HashMap<String, String>>,
1032 _cx: AsyncApp,
1033 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1034 unimplemented!()
1035 }
1036
1037 fn pull(
1038 &self,
1039 _branch: Option<String>,
1040 _remote: String,
1041 _rebase: bool,
1042 _askpass: AskPassDelegate,
1043 _env: Arc<HashMap<String, String>>,
1044 _cx: AsyncApp,
1045 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1046 unimplemented!()
1047 }
1048
1049 fn fetch(
1050 &self,
1051 _fetch_options: FetchOptions,
1052 _askpass: AskPassDelegate,
1053 _env: Arc<HashMap<String, String>>,
1054 _cx: AsyncApp,
1055 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1056 unimplemented!()
1057 }
1058
1059 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
1060 self.with_state_async(false, move |state| {
1061 let remotes = state
1062 .remotes
1063 .keys()
1064 .map(|r| Remote {
1065 name: r.clone().into(),
1066 })
1067 .collect::<Vec<_>>();
1068 Ok(remotes)
1069 })
1070 }
1071
1072 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1073 unimplemented!()
1074 }
1075
1076 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1077 unimplemented!()
1078 }
1079
1080 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
1081 future::ready(Ok(Vec::new())).boxed()
1082 }
1083
1084 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
1085 future::ready(Ok(String::new())).boxed()
1086 }
1087
1088 fn diff_stat(
1089 &self,
1090 path_prefixes: &[RepoPath],
1091 ) -> BoxFuture<'_, Result<git::status::GitDiffStat>> {
1092 fn count_lines(s: &str) -> u32 {
1093 if s.is_empty() {
1094 0
1095 } else {
1096 s.lines().count() as u32
1097 }
1098 }
1099
1100 fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool {
1101 if prefixes.is_empty() {
1102 return true;
1103 }
1104 prefixes.iter().any(|prefix| {
1105 let prefix_str = prefix.as_unix_str();
1106 if prefix_str == "." {
1107 return true;
1108 }
1109 path == prefix || path.starts_with(&prefix)
1110 })
1111 }
1112
1113 let path_prefixes = path_prefixes.to_vec();
1114
1115 let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
1116 let worktree_files: HashMap<RepoPath, String> = self
1117 .fs
1118 .files()
1119 .iter()
1120 .filter_map(|path| {
1121 let repo_path = path.strip_prefix(&workdir_path).ok()?;
1122 if repo_path.starts_with(".git") {
1123 return None;
1124 }
1125 let content = self
1126 .fs
1127 .read_file_sync(path)
1128 .ok()
1129 .and_then(|bytes| String::from_utf8(bytes).ok())?;
1130 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
1131 Some((RepoPath::from_rel_path(&repo_path), content))
1132 })
1133 .collect();
1134
1135 self.with_state_async(false, move |state| {
1136 let mut entries = Vec::new();
1137 let all_paths: HashSet<&RepoPath> = state
1138 .head_contents
1139 .keys()
1140 .chain(
1141 worktree_files
1142 .keys()
1143 .filter(|p| state.index_contents.contains_key(*p)),
1144 )
1145 .collect();
1146 for path in all_paths {
1147 if !matches_prefixes(path, &path_prefixes) {
1148 continue;
1149 }
1150 let head = state.head_contents.get(path);
1151 let worktree = worktree_files.get(path);
1152 match (head, worktree) {
1153 (Some(old), Some(new)) if old != new => {
1154 entries.push((
1155 path.clone(),
1156 git::status::DiffStat {
1157 added: count_lines(new),
1158 deleted: count_lines(old),
1159 },
1160 ));
1161 }
1162 (Some(old), None) => {
1163 entries.push((
1164 path.clone(),
1165 git::status::DiffStat {
1166 added: 0,
1167 deleted: count_lines(old),
1168 },
1169 ));
1170 }
1171 (None, Some(new)) => {
1172 entries.push((
1173 path.clone(),
1174 git::status::DiffStat {
1175 added: count_lines(new),
1176 deleted: 0,
1177 },
1178 ));
1179 }
1180 _ => {}
1181 }
1182 }
1183 entries.sort_by(|(a, _), (b, _)| a.cmp(b));
1184 Ok(git::status::GitDiffStat {
1185 entries: entries.into(),
1186 })
1187 })
1188 .boxed()
1189 }
1190
1191 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
1192 let executor = self.executor.clone();
1193 let fs = self.fs.clone();
1194 let checkpoints = self.checkpoints.clone();
1195 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1196 async move {
1197 executor.simulate_random_delay().await;
1198 let oid = git::Oid::random(&mut *executor.rng().lock());
1199 let entry = fs.entry(&repository_dir_path)?;
1200 checkpoints.lock().insert(oid, entry);
1201 Ok(GitRepositoryCheckpoint { commit_sha: oid })
1202 }
1203 .boxed()
1204 }
1205
1206 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
1207 let executor = self.executor.clone();
1208 let fs = self.fs.clone();
1209 let checkpoints = self.checkpoints.clone();
1210 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1211 async move {
1212 executor.simulate_random_delay().await;
1213 let checkpoints = checkpoints.lock();
1214 let entry = checkpoints
1215 .get(&checkpoint.commit_sha)
1216 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
1217 fs.insert_entry(&repository_dir_path, entry.clone())?;
1218 Ok(())
1219 }
1220 .boxed()
1221 }
1222
1223 fn create_archive_checkpoint(&self) -> BoxFuture<'_, Result<(String, String)>> {
1224 let executor = self.executor.clone();
1225 let fs = self.fs.clone();
1226 let checkpoints = self.checkpoints.clone();
1227 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1228 async move {
1229 executor.simulate_random_delay().await;
1230 let staged_oid = git::Oid::random(&mut *executor.rng().lock());
1231 let unstaged_oid = git::Oid::random(&mut *executor.rng().lock());
1232 let entry = fs.entry(&repository_dir_path)?;
1233 checkpoints.lock().insert(staged_oid, entry.clone());
1234 checkpoints.lock().insert(unstaged_oid, entry);
1235 Ok((staged_oid.to_string(), unstaged_oid.to_string()))
1236 }
1237 .boxed()
1238 }
1239
1240 fn restore_archive_checkpoint(
1241 &self,
1242 // The fake filesystem doesn't model a separate index, so only the
1243 // unstaged (full working directory) snapshot is restored.
1244 _staged_sha: String,
1245 unstaged_sha: String,
1246 ) -> BoxFuture<'_, Result<()>> {
1247 match unstaged_sha.parse() {
1248 Ok(commit_sha) => self.restore_checkpoint(GitRepositoryCheckpoint { commit_sha }),
1249 Err(error) => async move {
1250 Err(anyhow::anyhow!(error).context("failed to parse unstaged SHA as Oid"))
1251 }
1252 .boxed(),
1253 }
1254 }
1255
1256 fn compare_checkpoints(
1257 &self,
1258 left: GitRepositoryCheckpoint,
1259 right: GitRepositoryCheckpoint,
1260 ) -> BoxFuture<'_, Result<bool>> {
1261 let executor = self.executor.clone();
1262 let checkpoints = self.checkpoints.clone();
1263 async move {
1264 executor.simulate_random_delay().await;
1265 let checkpoints = checkpoints.lock();
1266 let left = checkpoints
1267 .get(&left.commit_sha)
1268 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
1269 let right = checkpoints
1270 .get(&right.commit_sha)
1271 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
1272
1273 Ok(left == right)
1274 }
1275 .boxed()
1276 }
1277
1278 fn diff_checkpoints(
1279 &self,
1280 base_checkpoint: GitRepositoryCheckpoint,
1281 target_checkpoint: GitRepositoryCheckpoint,
1282 ) -> BoxFuture<'_, Result<String>> {
1283 let executor = self.executor.clone();
1284 let checkpoints = self.checkpoints.clone();
1285 async move {
1286 executor.simulate_random_delay().await;
1287 let checkpoints = checkpoints.lock();
1288 let base = checkpoints
1289 .get(&base_checkpoint.commit_sha)
1290 .context(format!(
1291 "invalid base checkpoint: {}",
1292 base_checkpoint.commit_sha
1293 ))?;
1294 let target = checkpoints
1295 .get(&target_checkpoint.commit_sha)
1296 .context(format!(
1297 "invalid target checkpoint: {}",
1298 target_checkpoint.commit_sha
1299 ))?;
1300
1301 fn collect_files(
1302 entry: &FakeFsEntry,
1303 prefix: String,
1304 out: &mut std::collections::BTreeMap<String, String>,
1305 ) {
1306 match entry {
1307 FakeFsEntry::File { content, .. } => {
1308 out.insert(prefix, String::from_utf8_lossy(content).into_owned());
1309 }
1310 FakeFsEntry::Dir { entries, .. } => {
1311 for (name, child) in entries {
1312 let path = if prefix.is_empty() {
1313 name.clone()
1314 } else {
1315 format!("{prefix}/{name}")
1316 };
1317 collect_files(child, path, out);
1318 }
1319 }
1320 FakeFsEntry::Symlink { .. } => {}
1321 }
1322 }
1323
1324 let mut base_files = std::collections::BTreeMap::new();
1325 let mut target_files = std::collections::BTreeMap::new();
1326 collect_files(base, String::new(), &mut base_files);
1327 collect_files(target, String::new(), &mut target_files);
1328
1329 let all_paths: std::collections::BTreeSet<&String> =
1330 base_files.keys().chain(target_files.keys()).collect();
1331
1332 let mut diff = String::new();
1333 for path in all_paths {
1334 match (base_files.get(path), target_files.get(path)) {
1335 (Some(base_content), Some(target_content))
1336 if base_content != target_content =>
1337 {
1338 diff.push_str(&format!("diff --git a/{path} b/{path}\n"));
1339 diff.push_str(&format!("--- a/{path}\n"));
1340 diff.push_str(&format!("+++ b/{path}\n"));
1341 for line in base_content.lines() {
1342 diff.push_str(&format!("-{line}\n"));
1343 }
1344 for line in target_content.lines() {
1345 diff.push_str(&format!("+{line}\n"));
1346 }
1347 }
1348 (Some(_), None) => {
1349 diff.push_str(&format!("diff --git a/{path} /dev/null\n"));
1350 diff.push_str("deleted file\n");
1351 }
1352 (None, Some(_)) => {
1353 diff.push_str(&format!("diff --git /dev/null b/{path}\n"));
1354 diff.push_str("new file\n");
1355 }
1356 _ => {}
1357 }
1358 }
1359 Ok(diff)
1360 }
1361 .boxed()
1362 }
1363
1364 fn default_branch(
1365 &self,
1366 include_remote_name: bool,
1367 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
1368 async move {
1369 Ok(Some(if include_remote_name {
1370 "origin/main".into()
1371 } else {
1372 "main".into()
1373 }))
1374 }
1375 .boxed()
1376 }
1377
1378 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
1379 self.with_state_async(true, move |state| {
1380 state.remotes.insert(name, url);
1381 Ok(())
1382 })
1383 }
1384
1385 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
1386 self.with_state_async(true, move |state| {
1387 state.branches.retain(|branch| {
1388 branch
1389 .split_once('/')
1390 .is_none_or(|(remote, _)| remote != name)
1391 });
1392 state.remotes.remove(&name);
1393 Ok(())
1394 })
1395 }
1396
1397 fn initial_graph_data(
1398 &self,
1399 _log_source: LogSource,
1400 _log_order: LogOrder,
1401 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
1402 ) -> BoxFuture<'_, Result<()>> {
1403 let fs = self.fs.clone();
1404 let dot_git_path = self.dot_git_path.clone();
1405 async move {
1406 let (graph_commits, simulated_error) =
1407 fs.with_git_state(&dot_git_path, false, |state| {
1408 (
1409 state.graph_commits.clone(),
1410 state.simulated_graph_error.clone(),
1411 )
1412 })?;
1413
1414 if let Some(error) = simulated_error {
1415 anyhow::bail!("{}", error);
1416 }
1417
1418 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
1419 request_tx.send(chunk.to_vec()).await.ok();
1420 }
1421 Ok(())
1422 }
1423 .boxed()
1424 }
1425
1426 fn search_commits(
1427 &self,
1428 _log_source: LogSource,
1429 _search_args: SearchCommitArgs,
1430 _request_tx: Sender<Oid>,
1431 ) -> BoxFuture<'_, Result<()>> {
1432 async { bail!("search_commits not supported for FakeGitRepository") }.boxed()
1433 }
1434
1435 fn commit_data_reader(&self) -> Result<CommitDataReader> {
1436 anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
1437 }
1438
1439 fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> {
1440 self.with_state_async(true, move |state| {
1441 state.refs.insert(ref_name, commit);
1442 Ok(())
1443 })
1444 }
1445
1446 fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> {
1447 self.with_state_async(true, move |state| {
1448 state.refs.remove(&ref_name);
1449 Ok(())
1450 })
1451 }
1452
1453 fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> {
1454 async { Ok(()) }.boxed()
1455 }
1456
1457 fn set_trusted(&self, trusted: bool) {
1458 self.is_trusted
1459 .store(trusted, std::sync::atomic::Ordering::Release);
1460 }
1461
1462 fn is_trusted(&self) -> bool {
1463 self.is_trusted.load(std::sync::atomic::Ordering::Acquire)
1464 }
1465}