1use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
2use anyhow::{Context as _, Result, bail};
3use collections::{HashMap, HashSet};
4use futures::future::{self, BoxFuture, join_all};
5use git::{
6 Oid, RunHook,
7 blame::Blame,
8 repository::{
9 AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions,
10 CreateWorktreeTarget, FetchOptions, GRAPH_CHUNK_SIZE, GitRepository,
11 GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote,
12 RepoPath, ResetMode, SearchCommitArgs, Worktree,
13 },
14 stash::GitStash,
15 status::{
16 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
17 UnmergedStatus,
18 },
19};
20use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
21use ignore::gitignore::GitignoreBuilder;
22use parking_lot::Mutex;
23use rope::Rope;
24use smol::{channel::Sender, future::FutureExt as _};
25use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
26use text::LineEnding;
27use util::{paths::PathStyle, rel_path::RelPath};
28
29#[derive(Clone)]
30pub struct FakeGitRepository {
31 pub(crate) fs: Arc<FakeFs>,
32 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
33 pub(crate) executor: BackgroundExecutor,
34 pub(crate) dot_git_path: PathBuf,
35 pub(crate) repository_dir_path: PathBuf,
36 pub(crate) common_dir_path: PathBuf,
37 pub(crate) is_trusted: Arc<AtomicBool>,
38}
39
40#[derive(Debug, Clone)]
41pub struct FakeCommitSnapshot {
42 pub head_contents: HashMap<RepoPath, String>,
43 pub index_contents: HashMap<RepoPath, String>,
44 pub sha: String,
45}
46
47#[derive(Debug, Clone)]
48pub struct FakeGitRepositoryState {
49 pub commit_history: Vec<FakeCommitSnapshot>,
50 pub event_emitter: smol::channel::Sender<PathBuf>,
51 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
52 pub head_contents: HashMap<RepoPath, String>,
53 pub index_contents: HashMap<RepoPath, String>,
54 // everything in commit contents is in oids
55 pub merge_base_contents: HashMap<RepoPath, Oid>,
56 pub oids: HashMap<Oid, String>,
57 pub blames: HashMap<RepoPath, Blame>,
58 pub current_branch_name: Option<String>,
59 pub branches: HashSet<String>,
60 /// List of remotes, keys are names and values are URLs
61 pub remotes: HashMap<String, String>,
62 pub simulated_index_write_error_message: Option<String>,
63 pub simulated_create_worktree_error: Option<String>,
64 pub simulated_graph_error: Option<String>,
65 pub refs: HashMap<String, String>,
66 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
67 pub stash_entries: GitStash,
68}
69
70impl FakeGitRepositoryState {
71 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
72 FakeGitRepositoryState {
73 event_emitter,
74 head_contents: Default::default(),
75 index_contents: Default::default(),
76 unmerged_paths: Default::default(),
77 blames: Default::default(),
78 current_branch_name: Default::default(),
79 branches: Default::default(),
80 simulated_index_write_error_message: Default::default(),
81 simulated_create_worktree_error: Default::default(),
82 simulated_graph_error: None,
83 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
84 merge_base_contents: Default::default(),
85 oids: Default::default(),
86 remotes: HashMap::default(),
87 graph_commits: Vec::new(),
88 commit_history: Vec::new(),
89 stash_entries: Default::default(),
90 }
91 }
92}
93
94impl FakeGitRepository {
95 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
96 where
97 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
98 T: Send,
99 {
100 let fs = self.fs.clone();
101 let executor = self.executor.clone();
102 let dot_git_path = self.dot_git_path.clone();
103 async move {
104 executor.simulate_random_delay().await;
105 fs.with_git_state(&dot_git_path, write, f)?
106 }
107 .boxed()
108 }
109}
110
111impl GitRepository for FakeGitRepository {
112 fn reload_index(&self) {}
113
114 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
115 let fut = self.with_state_async(false, move |state| {
116 state
117 .index_contents
118 .get(&path)
119 .context("not present in index")
120 .cloned()
121 });
122 self.executor.spawn(async move { fut.await.ok() }).boxed()
123 }
124
125 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
126 let fut = self.with_state_async(false, move |state| {
127 state
128 .head_contents
129 .get(&path)
130 .context("not present in HEAD")
131 .cloned()
132 });
133 self.executor.spawn(async move { fut.await.ok() }).boxed()
134 }
135
136 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
137 self.with_state_async(false, move |state| {
138 state.oids.get(&oid).cloned().context("oid does not exist")
139 })
140 .boxed()
141 }
142
143 fn load_commit(
144 &self,
145 _commit: String,
146 _cx: AsyncApp,
147 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
148 unimplemented!()
149 }
150
151 fn set_index_text(
152 &self,
153 path: RepoPath,
154 content: Option<String>,
155 _env: Arc<HashMap<String, String>>,
156 _is_executable: bool,
157 ) -> BoxFuture<'_, anyhow::Result<()>> {
158 self.with_state_async(true, move |state| {
159 if let Some(message) = &state.simulated_index_write_error_message {
160 anyhow::bail!("{message}");
161 } else if let Some(content) = content {
162 state.index_contents.insert(path, content);
163 } else {
164 state.index_contents.remove(&path);
165 }
166 Ok(())
167 })
168 }
169
170 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
171 let name = name.to_string();
172 let fut = self.with_state_async(false, move |state| {
173 state
174 .remotes
175 .get(&name)
176 .context("remote not found")
177 .cloned()
178 });
179 async move { fut.await.ok() }.boxed()
180 }
181
182 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
183 let mut entries = HashMap::default();
184 self.with_state_async(false, |state| {
185 for (path, content) in &state.head_contents {
186 let status = if let Some((oid, original)) = state
187 .merge_base_contents
188 .get(path)
189 .map(|oid| (oid, &state.oids[oid]))
190 {
191 if original == content {
192 continue;
193 }
194 TreeDiffStatus::Modified { old: *oid }
195 } else {
196 TreeDiffStatus::Added
197 };
198 entries.insert(path.clone(), status);
199 }
200 for (path, oid) in &state.merge_base_contents {
201 if !entries.contains_key(path) {
202 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
203 }
204 }
205 Ok(TreeDiff { entries })
206 })
207 .boxed()
208 }
209
210 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
211 self.with_state_async(false, |state| {
212 Ok(revs
213 .into_iter()
214 .map(|rev| state.refs.get(&rev).cloned())
215 .collect())
216 })
217 }
218
219 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
220 async {
221 Ok(CommitDetails {
222 sha: commit.into(),
223 message: "initial commit".into(),
224 ..Default::default()
225 })
226 }
227 .boxed()
228 }
229
230 fn reset(
231 &self,
232 commit: String,
233 mode: ResetMode,
234 _env: Arc<HashMap<String, String>>,
235 ) -> BoxFuture<'_, Result<()>> {
236 self.with_state_async(true, move |state| {
237 let pop_count = if commit == "HEAD~" || commit == "HEAD^" {
238 1
239 } else if let Some(suffix) = commit.strip_prefix("HEAD~") {
240 suffix
241 .parse::<usize>()
242 .with_context(|| format!("Invalid HEAD~ offset: {commit}"))?
243 } else {
244 match state
245 .commit_history
246 .iter()
247 .rposition(|entry| entry.sha == commit)
248 {
249 Some(index) => state.commit_history.len() - index,
250 None => anyhow::bail!("Unknown commit ref: {commit}"),
251 }
252 };
253
254 if pop_count == 0 || pop_count > state.commit_history.len() {
255 anyhow::bail!(
256 "Cannot reset {pop_count} commit(s): only {} in history",
257 state.commit_history.len()
258 );
259 }
260
261 let target_index = state.commit_history.len() - pop_count;
262 let snapshot = state.commit_history[target_index].clone();
263 state.commit_history.truncate(target_index);
264
265 match mode {
266 ResetMode::Soft => {
267 state.head_contents = snapshot.head_contents;
268 }
269 ResetMode::Mixed => {
270 state.head_contents = snapshot.head_contents;
271 state.index_contents = state.head_contents.clone();
272 }
273 }
274
275 state.refs.insert("HEAD".into(), snapshot.sha);
276 Ok(())
277 })
278 }
279
280 fn checkout_files(
281 &self,
282 _commit: String,
283 _paths: Vec<RepoPath>,
284 _env: Arc<HashMap<String, String>>,
285 ) -> BoxFuture<'_, Result<()>> {
286 unimplemented!()
287 }
288
289 fn path(&self) -> PathBuf {
290 self.repository_dir_path.clone()
291 }
292
293 fn main_repository_path(&self) -> PathBuf {
294 self.common_dir_path.clone()
295 }
296
297 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
298 async move { None }.boxed()
299 }
300
301 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
302 let workdir_path = self.dot_git_path.parent().unwrap();
303
304 // Load gitignores
305 let ignores = workdir_path
306 .ancestors()
307 .filter_map(|dir| {
308 let ignore_path = dir.join(".gitignore");
309 let content = self.fs.read_file_sync(ignore_path).ok()?;
310 let content = String::from_utf8(content).ok()?;
311 let mut builder = GitignoreBuilder::new(dir);
312 for line in content.lines() {
313 builder.add_line(Some(dir.into()), line).ok()?;
314 }
315 builder.build().ok()
316 })
317 .collect::<Vec<_>>();
318
319 // Load working copy files.
320 let git_files: HashMap<RepoPath, (String, bool)> = self
321 .fs
322 .files()
323 .iter()
324 .filter_map(|path| {
325 // TODO better simulate git status output in the case of submodules and worktrees
326 let repo_path = path.strip_prefix(workdir_path).ok()?;
327 let mut is_ignored = repo_path.starts_with(".git");
328 for ignore in &ignores {
329 match ignore.matched_path_or_any_parents(path, false) {
330 ignore::Match::None => {}
331 ignore::Match::Ignore(_) => is_ignored = true,
332 ignore::Match::Whitelist(_) => break,
333 }
334 }
335 let content = self
336 .fs
337 .read_file_sync(path)
338 .ok()
339 .map(|content| String::from_utf8(content).unwrap())?;
340 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
341 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
342 })
343 .collect();
344
345 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
346 let mut entries = Vec::new();
347 let paths = state
348 .head_contents
349 .keys()
350 .chain(state.index_contents.keys())
351 .chain(git_files.keys())
352 .collect::<HashSet<_>>();
353 for path in paths {
354 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
355 continue;
356 }
357
358 let head = state.head_contents.get(path);
359 let index = state.index_contents.get(path);
360 let unmerged = state.unmerged_paths.get(path);
361 let fs = git_files.get(path);
362 let status = match (unmerged, head, index, fs) {
363 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
364 (_, Some(head), Some(index), Some((fs, _))) => {
365 FileStatus::Tracked(TrackedStatus {
366 index_status: if head == index {
367 StatusCode::Unmodified
368 } else {
369 StatusCode::Modified
370 },
371 worktree_status: if fs == index {
372 StatusCode::Unmodified
373 } else {
374 StatusCode::Modified
375 },
376 })
377 }
378 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
379 index_status: if head == index {
380 StatusCode::Unmodified
381 } else {
382 StatusCode::Modified
383 },
384 worktree_status: StatusCode::Deleted,
385 }),
386 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
387 index_status: StatusCode::Deleted,
388 worktree_status: StatusCode::Added,
389 }),
390 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
391 index_status: StatusCode::Deleted,
392 worktree_status: StatusCode::Deleted,
393 }),
394 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
395 index_status: StatusCode::Added,
396 worktree_status: if fs == index {
397 StatusCode::Unmodified
398 } else {
399 StatusCode::Modified
400 },
401 }),
402 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
403 index_status: StatusCode::Added,
404 worktree_status: StatusCode::Deleted,
405 }),
406 (_, None, None, Some((_, is_ignored))) => {
407 if *is_ignored {
408 continue;
409 }
410 FileStatus::Untracked
411 }
412 (_, None, None, None) => {
413 unreachable!();
414 }
415 };
416 if status
417 != FileStatus::Tracked(TrackedStatus {
418 index_status: StatusCode::Unmodified,
419 worktree_status: StatusCode::Unmodified,
420 })
421 {
422 entries.push((path.clone(), status));
423 }
424 }
425 entries.sort_by(|a, b| a.0.cmp(&b.0));
426 anyhow::Ok(GitStatus {
427 entries: entries.into(),
428 })
429 });
430 Task::ready(match result {
431 Ok(result) => result,
432 Err(e) => Err(e),
433 })
434 }
435
436 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
437 self.with_state_async(false, |state| Ok(state.stash_entries.clone()))
438 }
439
440 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
441 self.with_state_async(false, move |state| {
442 let current_branch = &state.current_branch_name;
443 let mut branches = state
444 .branches
445 .iter()
446 .map(|branch_name| {
447 let ref_name = if branch_name.starts_with("refs/") {
448 branch_name.into()
449 } else if branch_name.contains('/') {
450 format!("refs/remotes/{branch_name}").into()
451 } else {
452 format!("refs/heads/{branch_name}").into()
453 };
454 Branch {
455 is_head: Some(branch_name) == current_branch.as_ref(),
456 ref_name,
457 most_recent_commit: None,
458 upstream: None,
459 }
460 })
461 .collect::<Vec<_>>();
462 // compute snapshot expects these to be sorted by ref_name
463 // because that's what git itself does
464 branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name));
465 Ok(branches)
466 })
467 }
468
469 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
470 let fs = self.fs.clone();
471 let common_dir_path = self.common_dir_path.clone();
472 let executor = self.executor.clone();
473
474 async move {
475 executor.simulate_random_delay().await;
476
477 let (main_worktree, refs) = fs.with_git_state(&common_dir_path, false, |state| {
478 let work_dir = common_dir_path
479 .parent()
480 .map(PathBuf::from)
481 .unwrap_or_else(|| common_dir_path.clone());
482 let head_sha = state
483 .refs
484 .get("HEAD")
485 .cloned()
486 .unwrap_or_else(|| "0000000".to_string());
487 let branch_ref = state
488 .current_branch_name
489 .as_ref()
490 .map(|name| format!("refs/heads/{name}"))
491 .unwrap_or_else(|| "refs/heads/main".to_string());
492 let main_wt = Worktree {
493 path: work_dir,
494 ref_name: Some(branch_ref.into()),
495 sha: head_sha.into(),
496 is_main: true,
497 };
498 (main_wt, state.refs.clone())
499 })?;
500
501 let mut all = vec![main_worktree];
502
503 let worktrees_dir = common_dir_path.join("worktrees");
504 if let Ok(mut entries) = fs.read_dir(&worktrees_dir).await {
505 use futures::StreamExt;
506 while let Some(Ok(entry_path)) = entries.next().await {
507 let head_content = match fs.load(&entry_path.join("HEAD")).await {
508 Ok(content) => content,
509 Err(_) => continue,
510 };
511 let gitdir_content = match fs.load(&entry_path.join("gitdir")).await {
512 Ok(content) => content,
513 Err(_) => continue,
514 };
515
516 let ref_name = head_content
517 .strip_prefix("ref: ")
518 .map(|s| s.trim().to_string());
519 let sha = ref_name
520 .as_ref()
521 .and_then(|r| refs.get(r))
522 .cloned()
523 .unwrap_or_else(|| head_content.trim().to_string());
524
525 let worktree_path = PathBuf::from(gitdir_content.trim())
526 .parent()
527 .map(PathBuf::from)
528 .unwrap_or_default();
529
530 all.push(Worktree {
531 path: worktree_path,
532 ref_name: ref_name.map(Into::into),
533 sha: sha.into(),
534 is_main: false,
535 });
536 }
537 }
538
539 Ok(all)
540 }
541 .boxed()
542 }
543
544 fn create_worktree(
545 &self,
546 target: CreateWorktreeTarget,
547 path: PathBuf,
548 ) -> BoxFuture<'_, Result<()>> {
549 let fs = self.fs.clone();
550 let executor = self.executor.clone();
551 let dot_git_path = self.dot_git_path.clone();
552 let common_dir_path = self.common_dir_path.clone();
553 async move {
554 executor.simulate_random_delay().await;
555
556 let branch_name = target.branch_name().map(ToOwned::to_owned);
557 let create_branch_ref = matches!(target, CreateWorktreeTarget::NewBranch { .. });
558
559 // Check for simulated error and validate branch state before any side effects.
560 fs.with_git_state(&dot_git_path, false, {
561 let branch_name = branch_name.clone();
562 move |state| {
563 if let Some(message) = &state.simulated_create_worktree_error {
564 anyhow::bail!("{message}");
565 }
566
567 match (create_branch_ref, branch_name.as_ref()) {
568 (true, Some(branch_name)) => {
569 if state.branches.contains(branch_name) {
570 bail!("a branch named '{}' already exists", branch_name);
571 }
572 }
573 (false, Some(branch_name)) => {
574 if !state.branches.contains(branch_name) {
575 bail!("no branch named '{}' exists", branch_name);
576 }
577 }
578 (false, None) => {}
579 (true, None) => bail!("branch name is required to create a branch"),
580 }
581
582 Ok(())
583 }
584 })??;
585
586 let (branch_name, sha, create_branch_ref) = match target {
587 CreateWorktreeTarget::ExistingBranch { branch_name } => {
588 let ref_name = format!("refs/heads/{branch_name}");
589 let sha = fs.with_git_state(&dot_git_path, false, {
590 move |state| {
591 Ok::<_, anyhow::Error>(
592 state
593 .refs
594 .get(&ref_name)
595 .cloned()
596 .unwrap_or_else(|| "fake-sha".to_string()),
597 )
598 }
599 })??;
600 (Some(branch_name), sha, false)
601 }
602 CreateWorktreeTarget::NewBranch {
603 branch_name,
604 base_sha: start_point,
605 } => (
606 Some(branch_name),
607 start_point.unwrap_or_else(|| "fake-sha".to_string()),
608 true,
609 ),
610 CreateWorktreeTarget::Detached {
611 base_sha: start_point,
612 } => (
613 None,
614 start_point.unwrap_or_else(|| "fake-sha".to_string()),
615 false,
616 ),
617 };
618
619 // Create the worktree checkout directory.
620 fs.create_dir(&path).await?;
621
622 // Create .git/worktrees/<name>/ directory with HEAD, commondir, gitdir.
623 let worktree_entry_name = branch_name.as_deref().unwrap_or_else(|| {
624 path.file_name()
625 .and_then(|name| name.to_str())
626 .unwrap_or("detached")
627 });
628 let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name);
629 fs.create_dir(&worktrees_entry_dir).await?;
630
631 let head_content = if let Some(ref branch_name) = branch_name {
632 let ref_name = format!("refs/heads/{branch_name}");
633 format!("ref: {ref_name}")
634 } else {
635 sha.clone()
636 };
637 fs.write_file_internal(
638 worktrees_entry_dir.join("HEAD"),
639 head_content.into_bytes(),
640 false,
641 )?;
642 fs.write_file_internal(
643 worktrees_entry_dir.join("commondir"),
644 common_dir_path.to_string_lossy().into_owned().into_bytes(),
645 false,
646 )?;
647 let worktree_dot_git = path.join(".git");
648 fs.write_file_internal(
649 worktrees_entry_dir.join("gitdir"),
650 worktree_dot_git.to_string_lossy().into_owned().into_bytes(),
651 false,
652 )?;
653
654 // Create .git file in the worktree checkout.
655 fs.write_file_internal(
656 &worktree_dot_git,
657 format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(),
658 false,
659 )?;
660
661 // Update git state for newly created branches.
662 if create_branch_ref {
663 fs.with_git_state(&dot_git_path, true, {
664 let branch_name = branch_name.clone();
665 let sha = sha.clone();
666 move |state| {
667 if let Some(branch_name) = branch_name {
668 let ref_name = format!("refs/heads/{branch_name}");
669 state.refs.insert(ref_name, sha);
670 state.branches.insert(branch_name);
671 }
672 Ok::<(), anyhow::Error>(())
673 }
674 })??;
675 }
676
677 Ok(())
678 }
679 .boxed()
680 }
681
682 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
683 let fs = self.fs.clone();
684 let executor = self.executor.clone();
685 let common_dir_path = self.common_dir_path.clone();
686 async move {
687 executor.simulate_random_delay().await;
688
689 // Read the worktree's .git file to find its entry directory.
690 let dot_git_file = path.join(".git");
691 let content = fs
692 .load(&dot_git_file)
693 .await
694 .with_context(|| format!("no worktree found at path: {}", path.display()))?;
695 let gitdir = content
696 .strip_prefix("gitdir:")
697 .context("invalid .git file in worktree")?
698 .trim();
699 let worktree_entry_dir = PathBuf::from(gitdir);
700
701 // Remove the worktree checkout directory.
702 fs.remove_dir(
703 &path,
704 RemoveOptions {
705 recursive: true,
706 ignore_if_not_exists: false,
707 },
708 )
709 .await?;
710
711 // Remove the .git/worktrees/<name>/ directory.
712 fs.remove_dir(
713 &worktree_entry_dir,
714 RemoveOptions {
715 recursive: true,
716 ignore_if_not_exists: false,
717 },
718 )
719 .await?;
720
721 // Emit a git event on the main .git directory so the scanner
722 // notices the change.
723 fs.with_git_state(&common_dir_path, true, |_| {})?;
724
725 Ok(())
726 }
727 .boxed()
728 }
729
730 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
731 let fs = self.fs.clone();
732 let executor = self.executor.clone();
733 let common_dir_path = self.common_dir_path.clone();
734 async move {
735 executor.simulate_random_delay().await;
736
737 // Read the worktree's .git file to find its entry directory.
738 let dot_git_file = old_path.join(".git");
739 let content = fs
740 .load(&dot_git_file)
741 .await
742 .with_context(|| format!("no worktree found at path: {}", old_path.display()))?;
743 let gitdir = content
744 .strip_prefix("gitdir:")
745 .context("invalid .git file in worktree")?
746 .trim();
747 let worktree_entry_dir = PathBuf::from(gitdir);
748
749 // Move the worktree checkout directory.
750 fs.rename(
751 &old_path,
752 &new_path,
753 RenameOptions {
754 overwrite: false,
755 ignore_if_exists: false,
756 create_parents: true,
757 },
758 )
759 .await?;
760
761 // Update the gitdir file in .git/worktrees/<name>/ to point to the
762 // new location.
763 let new_dot_git = new_path.join(".git");
764 fs.write_file_internal(
765 worktree_entry_dir.join("gitdir"),
766 new_dot_git.to_string_lossy().into_owned().into_bytes(),
767 false,
768 )?;
769
770 // Update the .git file in the moved worktree checkout.
771 fs.write_file_internal(
772 &new_dot_git,
773 format!("gitdir: {}", worktree_entry_dir.display()).into_bytes(),
774 false,
775 )?;
776
777 // Emit a git event on the main .git directory so the scanner
778 // notices the change.
779 fs.with_git_state(&common_dir_path, true, |_| {})?;
780
781 Ok(())
782 }
783 .boxed()
784 }
785
786 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
787 self.with_state_async(true, |state| {
788 state.current_branch_name = Some(name);
789 Ok(())
790 })
791 }
792
793 fn create_branch(
794 &self,
795 name: String,
796 _base_branch: Option<String>,
797 ) -> BoxFuture<'_, Result<()>> {
798 self.with_state_async(true, move |state| {
799 if let Some((remote, _)) = name.split_once('/')
800 && !state.remotes.contains_key(remote)
801 {
802 state.remotes.insert(remote.to_owned(), "".to_owned());
803 }
804 state.branches.insert(name);
805 Ok(())
806 })
807 }
808
809 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
810 self.with_state_async(true, move |state| {
811 if !state.branches.remove(&branch) {
812 bail!("no such branch: {branch}");
813 }
814 state.branches.insert(new_name.clone());
815 if state.current_branch_name == Some(branch) {
816 state.current_branch_name = Some(new_name);
817 }
818 Ok(())
819 })
820 }
821
822 fn delete_branch(&self, _is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> {
823 self.with_state_async(true, move |state| {
824 if !state.branches.remove(&name) {
825 bail!("no such branch: {name}");
826 }
827 Ok(())
828 })
829 }
830
831 fn blame(
832 &self,
833 path: RepoPath,
834 _content: Rope,
835 _line_ending: LineEnding,
836 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
837 self.with_state_async(false, move |state| {
838 state
839 .blames
840 .get(&path)
841 .with_context(|| format!("failed to get blame for {:?}", path))
842 .cloned()
843 })
844 }
845
846 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
847 self.file_history_paginated(path, 0, None)
848 }
849
850 fn file_history_paginated(
851 &self,
852 path: RepoPath,
853 _skip: usize,
854 _limit: Option<usize>,
855 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
856 async move {
857 Ok(git::repository::FileHistory {
858 entries: Vec::new(),
859 path,
860 })
861 }
862 .boxed()
863 }
864
865 fn stage_paths(
866 &self,
867 paths: Vec<RepoPath>,
868 _env: Arc<HashMap<String, String>>,
869 ) -> BoxFuture<'_, Result<()>> {
870 Box::pin(async move {
871 let contents = paths
872 .into_iter()
873 .map(|path| {
874 let abs_path = self
875 .dot_git_path
876 .parent()
877 .unwrap()
878 .join(&path.as_std_path());
879 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
880 })
881 .collect::<Vec<_>>();
882 let contents = join_all(contents).await;
883 self.with_state_async(true, move |state| {
884 for (path, content) in contents {
885 if let Some(content) = content {
886 state.index_contents.insert(path, content);
887 } else {
888 state.index_contents.remove(&path);
889 }
890 }
891 Ok(())
892 })
893 .await
894 })
895 }
896
897 fn unstage_paths(
898 &self,
899 paths: Vec<RepoPath>,
900 _env: Arc<HashMap<String, String>>,
901 ) -> BoxFuture<'_, Result<()>> {
902 self.with_state_async(true, move |state| {
903 for path in paths {
904 match state.head_contents.get(&path) {
905 Some(content) => state.index_contents.insert(path, content.clone()),
906 None => state.index_contents.remove(&path),
907 };
908 }
909 Ok(())
910 })
911 }
912
913 fn stash_paths(
914 &self,
915 _paths: Vec<RepoPath>,
916 _env: Arc<HashMap<String, String>>,
917 ) -> BoxFuture<'_, Result<()>> {
918 unimplemented!()
919 }
920
921 fn stash_pop(
922 &self,
923 _index: Option<usize>,
924 _env: Arc<HashMap<String, String>>,
925 ) -> BoxFuture<'_, Result<()>> {
926 unimplemented!()
927 }
928
929 fn stash_apply(
930 &self,
931 _index: Option<usize>,
932 _env: Arc<HashMap<String, String>>,
933 ) -> BoxFuture<'_, Result<()>> {
934 unimplemented!()
935 }
936
937 fn stash_drop(
938 &self,
939 _index: Option<usize>,
940 _env: Arc<HashMap<String, String>>,
941 ) -> BoxFuture<'_, Result<()>> {
942 unimplemented!()
943 }
944
945 fn commit(
946 &self,
947 _message: gpui::SharedString,
948 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
949 options: CommitOptions,
950 _askpass: AskPassDelegate,
951 _env: Arc<HashMap<String, String>>,
952 ) -> BoxFuture<'_, Result<()>> {
953 self.with_state_async(true, move |state| {
954 if !options.allow_empty && !options.amend && state.index_contents == state.head_contents
955 {
956 anyhow::bail!("nothing to commit (use allow_empty to create an empty commit)");
957 }
958
959 let old_sha = state.refs.get("HEAD").cloned().unwrap_or_default();
960 state.commit_history.push(FakeCommitSnapshot {
961 head_contents: state.head_contents.clone(),
962 index_contents: state.index_contents.clone(),
963 sha: old_sha,
964 });
965
966 state.head_contents = state.index_contents.clone();
967
968 let new_sha = format!("fake-commit-{}", state.commit_history.len());
969 state.refs.insert("HEAD".into(), new_sha);
970
971 Ok(())
972 })
973 }
974
975 fn run_hook(
976 &self,
977 _hook: RunHook,
978 _env: Arc<HashMap<String, String>>,
979 ) -> BoxFuture<'_, Result<()>> {
980 async { Ok(()) }.boxed()
981 }
982
983 fn push(
984 &self,
985 _branch: String,
986 _remote_branch: String,
987 _remote: String,
988 _options: Option<PushOptions>,
989 _askpass: AskPassDelegate,
990 _env: Arc<HashMap<String, String>>,
991 _cx: AsyncApp,
992 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
993 unimplemented!()
994 }
995
996 fn pull(
997 &self,
998 _branch: Option<String>,
999 _remote: String,
1000 _rebase: bool,
1001 _askpass: AskPassDelegate,
1002 _env: Arc<HashMap<String, String>>,
1003 _cx: AsyncApp,
1004 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1005 unimplemented!()
1006 }
1007
1008 fn fetch(
1009 &self,
1010 _fetch_options: FetchOptions,
1011 _askpass: AskPassDelegate,
1012 _env: Arc<HashMap<String, String>>,
1013 _cx: AsyncApp,
1014 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1015 unimplemented!()
1016 }
1017
1018 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
1019 self.with_state_async(false, move |state| {
1020 let remotes = state
1021 .remotes
1022 .keys()
1023 .map(|r| Remote {
1024 name: r.clone().into(),
1025 })
1026 .collect::<Vec<_>>();
1027 Ok(remotes)
1028 })
1029 }
1030
1031 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1032 unimplemented!()
1033 }
1034
1035 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1036 unimplemented!()
1037 }
1038
1039 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
1040 future::ready(Ok(Vec::new())).boxed()
1041 }
1042
1043 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
1044 future::ready(Ok(String::new())).boxed()
1045 }
1046
1047 fn diff_stat(
1048 &self,
1049 path_prefixes: &[RepoPath],
1050 ) -> BoxFuture<'_, Result<git::status::GitDiffStat>> {
1051 fn count_lines(s: &str) -> u32 {
1052 if s.is_empty() {
1053 0
1054 } else {
1055 s.lines().count() as u32
1056 }
1057 }
1058
1059 fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool {
1060 if prefixes.is_empty() {
1061 return true;
1062 }
1063 prefixes.iter().any(|prefix| {
1064 let prefix_str = prefix.as_unix_str();
1065 if prefix_str == "." {
1066 return true;
1067 }
1068 path == prefix || path.starts_with(&prefix)
1069 })
1070 }
1071
1072 let path_prefixes = path_prefixes.to_vec();
1073
1074 let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
1075 let worktree_files: HashMap<RepoPath, String> = self
1076 .fs
1077 .files()
1078 .iter()
1079 .filter_map(|path| {
1080 let repo_path = path.strip_prefix(&workdir_path).ok()?;
1081 if repo_path.starts_with(".git") {
1082 return None;
1083 }
1084 let content = self
1085 .fs
1086 .read_file_sync(path)
1087 .ok()
1088 .and_then(|bytes| String::from_utf8(bytes).ok())?;
1089 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
1090 Some((RepoPath::from_rel_path(&repo_path), content))
1091 })
1092 .collect();
1093
1094 self.with_state_async(false, move |state| {
1095 let mut entries = Vec::new();
1096 let all_paths: HashSet<&RepoPath> = state
1097 .head_contents
1098 .keys()
1099 .chain(
1100 worktree_files
1101 .keys()
1102 .filter(|p| state.index_contents.contains_key(*p)),
1103 )
1104 .collect();
1105 for path in all_paths {
1106 if !matches_prefixes(path, &path_prefixes) {
1107 continue;
1108 }
1109 let head = state.head_contents.get(path);
1110 let worktree = worktree_files.get(path);
1111 match (head, worktree) {
1112 (Some(old), Some(new)) if old != new => {
1113 entries.push((
1114 path.clone(),
1115 git::status::DiffStat {
1116 added: count_lines(new),
1117 deleted: count_lines(old),
1118 },
1119 ));
1120 }
1121 (Some(old), None) => {
1122 entries.push((
1123 path.clone(),
1124 git::status::DiffStat {
1125 added: 0,
1126 deleted: count_lines(old),
1127 },
1128 ));
1129 }
1130 (None, Some(new)) => {
1131 entries.push((
1132 path.clone(),
1133 git::status::DiffStat {
1134 added: count_lines(new),
1135 deleted: 0,
1136 },
1137 ));
1138 }
1139 _ => {}
1140 }
1141 }
1142 entries.sort_by(|(a, _), (b, _)| a.cmp(b));
1143 Ok(git::status::GitDiffStat {
1144 entries: entries.into(),
1145 })
1146 })
1147 .boxed()
1148 }
1149
1150 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
1151 let executor = self.executor.clone();
1152 let fs = self.fs.clone();
1153 let checkpoints = self.checkpoints.clone();
1154 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1155 async move {
1156 executor.simulate_random_delay().await;
1157 let oid = git::Oid::random(&mut *executor.rng().lock());
1158 let entry = fs.entry(&repository_dir_path)?;
1159 checkpoints.lock().insert(oid, entry);
1160 Ok(GitRepositoryCheckpoint { commit_sha: oid })
1161 }
1162 .boxed()
1163 }
1164
1165 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
1166 let executor = self.executor.clone();
1167 let fs = self.fs.clone();
1168 let checkpoints = self.checkpoints.clone();
1169 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1170 async move {
1171 executor.simulate_random_delay().await;
1172 let checkpoints = checkpoints.lock();
1173 let entry = checkpoints
1174 .get(&checkpoint.commit_sha)
1175 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
1176 fs.insert_entry(&repository_dir_path, entry.clone())?;
1177 Ok(())
1178 }
1179 .boxed()
1180 }
1181
1182 fn compare_checkpoints(
1183 &self,
1184 left: GitRepositoryCheckpoint,
1185 right: GitRepositoryCheckpoint,
1186 ) -> BoxFuture<'_, Result<bool>> {
1187 let executor = self.executor.clone();
1188 let checkpoints = self.checkpoints.clone();
1189 async move {
1190 executor.simulate_random_delay().await;
1191 let checkpoints = checkpoints.lock();
1192 let left = checkpoints
1193 .get(&left.commit_sha)
1194 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
1195 let right = checkpoints
1196 .get(&right.commit_sha)
1197 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
1198
1199 Ok(left == right)
1200 }
1201 .boxed()
1202 }
1203
1204 fn diff_checkpoints(
1205 &self,
1206 base_checkpoint: GitRepositoryCheckpoint,
1207 target_checkpoint: GitRepositoryCheckpoint,
1208 ) -> BoxFuture<'_, Result<String>> {
1209 let executor = self.executor.clone();
1210 let checkpoints = self.checkpoints.clone();
1211 async move {
1212 executor.simulate_random_delay().await;
1213 let checkpoints = checkpoints.lock();
1214 let base = checkpoints
1215 .get(&base_checkpoint.commit_sha)
1216 .context(format!(
1217 "invalid base checkpoint: {}",
1218 base_checkpoint.commit_sha
1219 ))?;
1220 let target = checkpoints
1221 .get(&target_checkpoint.commit_sha)
1222 .context(format!(
1223 "invalid target checkpoint: {}",
1224 target_checkpoint.commit_sha
1225 ))?;
1226
1227 fn collect_files(
1228 entry: &FakeFsEntry,
1229 prefix: String,
1230 out: &mut std::collections::BTreeMap<String, String>,
1231 ) {
1232 match entry {
1233 FakeFsEntry::File { content, .. } => {
1234 out.insert(prefix, String::from_utf8_lossy(content).into_owned());
1235 }
1236 FakeFsEntry::Dir { entries, .. } => {
1237 for (name, child) in entries {
1238 let path = if prefix.is_empty() {
1239 name.clone()
1240 } else {
1241 format!("{prefix}/{name}")
1242 };
1243 collect_files(child, path, out);
1244 }
1245 }
1246 FakeFsEntry::Symlink { .. } => {}
1247 }
1248 }
1249
1250 let mut base_files = std::collections::BTreeMap::new();
1251 let mut target_files = std::collections::BTreeMap::new();
1252 collect_files(base, String::new(), &mut base_files);
1253 collect_files(target, String::new(), &mut target_files);
1254
1255 let all_paths: std::collections::BTreeSet<&String> =
1256 base_files.keys().chain(target_files.keys()).collect();
1257
1258 let mut diff = String::new();
1259 for path in all_paths {
1260 match (base_files.get(path), target_files.get(path)) {
1261 (Some(base_content), Some(target_content))
1262 if base_content != target_content =>
1263 {
1264 diff.push_str(&format!("diff --git a/{path} b/{path}\n"));
1265 diff.push_str(&format!("--- a/{path}\n"));
1266 diff.push_str(&format!("+++ b/{path}\n"));
1267 for line in base_content.lines() {
1268 diff.push_str(&format!("-{line}\n"));
1269 }
1270 for line in target_content.lines() {
1271 diff.push_str(&format!("+{line}\n"));
1272 }
1273 }
1274 (Some(_), None) => {
1275 diff.push_str(&format!("diff --git a/{path} /dev/null\n"));
1276 diff.push_str("deleted file\n");
1277 }
1278 (None, Some(_)) => {
1279 diff.push_str(&format!("diff --git /dev/null b/{path}\n"));
1280 diff.push_str("new file\n");
1281 }
1282 _ => {}
1283 }
1284 }
1285 Ok(diff)
1286 }
1287 .boxed()
1288 }
1289
1290 fn default_branch(
1291 &self,
1292 include_remote_name: bool,
1293 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
1294 async move {
1295 Ok(Some(if include_remote_name {
1296 "origin/main".into()
1297 } else {
1298 "main".into()
1299 }))
1300 }
1301 .boxed()
1302 }
1303
1304 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
1305 self.with_state_async(true, move |state| {
1306 state.remotes.insert(name, url);
1307 Ok(())
1308 })
1309 }
1310
1311 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
1312 self.with_state_async(true, move |state| {
1313 state.branches.retain(|branch| {
1314 branch
1315 .split_once('/')
1316 .is_none_or(|(remote, _)| remote != name)
1317 });
1318 state.remotes.remove(&name);
1319 Ok(())
1320 })
1321 }
1322
1323 fn initial_graph_data(
1324 &self,
1325 _log_source: LogSource,
1326 _log_order: LogOrder,
1327 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
1328 ) -> BoxFuture<'_, Result<()>> {
1329 let fs = self.fs.clone();
1330 let dot_git_path = self.dot_git_path.clone();
1331 async move {
1332 let (graph_commits, simulated_error) =
1333 fs.with_git_state(&dot_git_path, false, |state| {
1334 (
1335 state.graph_commits.clone(),
1336 state.simulated_graph_error.clone(),
1337 )
1338 })?;
1339
1340 if let Some(error) = simulated_error {
1341 anyhow::bail!("{}", error);
1342 }
1343
1344 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
1345 request_tx.send(chunk.to_vec()).await.ok();
1346 }
1347 Ok(())
1348 }
1349 .boxed()
1350 }
1351
1352 fn search_commits(
1353 &self,
1354 _log_source: LogSource,
1355 _search_args: SearchCommitArgs,
1356 _request_tx: Sender<Oid>,
1357 ) -> BoxFuture<'_, Result<()>> {
1358 async { bail!("search_commits not supported for FakeGitRepository") }.boxed()
1359 }
1360
1361 fn commit_data_reader(&self) -> Result<CommitDataReader> {
1362 anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
1363 }
1364
1365 fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> {
1366 self.with_state_async(true, move |state| {
1367 state.refs.insert(ref_name, commit);
1368 Ok(())
1369 })
1370 }
1371
1372 fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> {
1373 self.with_state_async(true, move |state| {
1374 state.refs.remove(&ref_name);
1375 Ok(())
1376 })
1377 }
1378
1379 fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> {
1380 async { Ok(()) }.boxed()
1381 }
1382
1383 fn set_trusted(&self, trusted: bool) {
1384 self.is_trusted
1385 .store(trusted, std::sync::atomic::Ordering::Release);
1386 }
1387
1388 fn is_trusted(&self) -> bool {
1389 self.is_trusted.load(std::sync::atomic::Ordering::Acquire)
1390 }
1391}