1use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
2use anyhow::{Context as _, Result, bail};
3use collections::{HashMap, HashSet};
4use futures::future::{self, BoxFuture, join_all};
5use git::{
6 Oid, RunHook,
7 blame::Blame,
8 repository::{
9 AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions,
10 CreateWorktreeTarget, FetchOptions, GRAPH_CHUNK_SIZE, GitRepository,
11 GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote,
12 RepoPath, ResetMode, SearchCommitArgs, Worktree,
13 },
14 stash::GitStash,
15 status::{
16 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
17 UnmergedStatus,
18 },
19};
20use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
21use ignore::gitignore::GitignoreBuilder;
22use parking_lot::Mutex;
23use rope::Rope;
24use smol::{channel::Sender, future::FutureExt as _};
25use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
26use text::LineEnding;
27use util::{paths::PathStyle, rel_path::RelPath};
28
29#[derive(Clone)]
30pub struct FakeGitRepository {
31 pub(crate) fs: Arc<FakeFs>,
32 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
33 pub(crate) executor: BackgroundExecutor,
34 pub(crate) dot_git_path: PathBuf,
35 pub(crate) repository_dir_path: PathBuf,
36 pub(crate) common_dir_path: PathBuf,
37 pub(crate) is_trusted: Arc<AtomicBool>,
38}
39
40#[derive(Debug, Clone)]
41pub struct FakeCommitSnapshot {
42 pub head_contents: HashMap<RepoPath, String>,
43 pub index_contents: HashMap<RepoPath, String>,
44 pub sha: String,
45}
46
47#[derive(Debug, Clone)]
48pub struct FakeGitRepositoryState {
49 pub commit_history: Vec<FakeCommitSnapshot>,
50 pub event_emitter: smol::channel::Sender<PathBuf>,
51 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
52 pub head_contents: HashMap<RepoPath, String>,
53 pub index_contents: HashMap<RepoPath, String>,
54 // everything in commit contents is in oids
55 pub merge_base_contents: HashMap<RepoPath, Oid>,
56 pub oids: HashMap<Oid, String>,
57 pub blames: HashMap<RepoPath, Blame>,
58 pub current_branch_name: Option<String>,
59 pub branches: HashSet<String>,
60 /// List of remotes, keys are names and values are URLs
61 pub remotes: HashMap<String, String>,
62 pub simulated_index_write_error_message: Option<String>,
63 pub simulated_create_worktree_error: Option<String>,
64 pub refs: HashMap<String, String>,
65 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
66 pub stash_entries: GitStash,
67}
68
69impl FakeGitRepositoryState {
70 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
71 FakeGitRepositoryState {
72 event_emitter,
73 head_contents: Default::default(),
74 index_contents: Default::default(),
75 unmerged_paths: Default::default(),
76 blames: Default::default(),
77 current_branch_name: Default::default(),
78 branches: Default::default(),
79 simulated_index_write_error_message: Default::default(),
80 simulated_create_worktree_error: Default::default(),
81 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
82 merge_base_contents: Default::default(),
83 oids: Default::default(),
84 remotes: HashMap::default(),
85 graph_commits: Vec::new(),
86 commit_history: Vec::new(),
87 stash_entries: Default::default(),
88 }
89 }
90}
91
92impl FakeGitRepository {
93 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
94 where
95 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
96 T: Send,
97 {
98 let fs = self.fs.clone();
99 let executor = self.executor.clone();
100 let dot_git_path = self.dot_git_path.clone();
101 async move {
102 executor.simulate_random_delay().await;
103 fs.with_git_state(&dot_git_path, write, f)?
104 }
105 .boxed()
106 }
107}
108
109impl GitRepository for FakeGitRepository {
110 fn reload_index(&self) {}
111
112 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
113 let fut = self.with_state_async(false, move |state| {
114 state
115 .index_contents
116 .get(&path)
117 .context("not present in index")
118 .cloned()
119 });
120 self.executor.spawn(async move { fut.await.ok() }).boxed()
121 }
122
123 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
124 let fut = self.with_state_async(false, move |state| {
125 state
126 .head_contents
127 .get(&path)
128 .context("not present in HEAD")
129 .cloned()
130 });
131 self.executor.spawn(async move { fut.await.ok() }).boxed()
132 }
133
134 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
135 self.with_state_async(false, move |state| {
136 state.oids.get(&oid).cloned().context("oid does not exist")
137 })
138 .boxed()
139 }
140
141 fn load_commit(
142 &self,
143 _commit: String,
144 _cx: AsyncApp,
145 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
146 unimplemented!()
147 }
148
149 fn set_index_text(
150 &self,
151 path: RepoPath,
152 content: Option<String>,
153 _env: Arc<HashMap<String, String>>,
154 _is_executable: bool,
155 ) -> BoxFuture<'_, anyhow::Result<()>> {
156 self.with_state_async(true, move |state| {
157 if let Some(message) = &state.simulated_index_write_error_message {
158 anyhow::bail!("{message}");
159 } else if let Some(content) = content {
160 state.index_contents.insert(path, content);
161 } else {
162 state.index_contents.remove(&path);
163 }
164 Ok(())
165 })
166 }
167
168 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
169 let name = name.to_string();
170 let fut = self.with_state_async(false, move |state| {
171 state
172 .remotes
173 .get(&name)
174 .context("remote not found")
175 .cloned()
176 });
177 async move { fut.await.ok() }.boxed()
178 }
179
180 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
181 let mut entries = HashMap::default();
182 self.with_state_async(false, |state| {
183 for (path, content) in &state.head_contents {
184 let status = if let Some((oid, original)) = state
185 .merge_base_contents
186 .get(path)
187 .map(|oid| (oid, &state.oids[oid]))
188 {
189 if original == content {
190 continue;
191 }
192 TreeDiffStatus::Modified { old: *oid }
193 } else {
194 TreeDiffStatus::Added
195 };
196 entries.insert(path.clone(), status);
197 }
198 for (path, oid) in &state.merge_base_contents {
199 if !entries.contains_key(path) {
200 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
201 }
202 }
203 Ok(TreeDiff { entries })
204 })
205 .boxed()
206 }
207
208 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
209 self.with_state_async(false, |state| {
210 Ok(revs
211 .into_iter()
212 .map(|rev| state.refs.get(&rev).cloned())
213 .collect())
214 })
215 }
216
217 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
218 async {
219 Ok(CommitDetails {
220 sha: commit.into(),
221 message: "initial commit".into(),
222 ..Default::default()
223 })
224 }
225 .boxed()
226 }
227
228 fn reset(
229 &self,
230 commit: String,
231 mode: ResetMode,
232 _env: Arc<HashMap<String, String>>,
233 ) -> BoxFuture<'_, Result<()>> {
234 self.with_state_async(true, move |state| {
235 let pop_count = if commit == "HEAD~" || commit == "HEAD^" {
236 1
237 } else if let Some(suffix) = commit.strip_prefix("HEAD~") {
238 suffix
239 .parse::<usize>()
240 .with_context(|| format!("Invalid HEAD~ offset: {commit}"))?
241 } else {
242 match state
243 .commit_history
244 .iter()
245 .rposition(|entry| entry.sha == commit)
246 {
247 Some(index) => state.commit_history.len() - index,
248 None => anyhow::bail!("Unknown commit ref: {commit}"),
249 }
250 };
251
252 if pop_count == 0 || pop_count > state.commit_history.len() {
253 anyhow::bail!(
254 "Cannot reset {pop_count} commit(s): only {} in history",
255 state.commit_history.len()
256 );
257 }
258
259 let target_index = state.commit_history.len() - pop_count;
260 let snapshot = state.commit_history[target_index].clone();
261 state.commit_history.truncate(target_index);
262
263 match mode {
264 ResetMode::Soft => {
265 state.head_contents = snapshot.head_contents;
266 }
267 ResetMode::Mixed => {
268 state.head_contents = snapshot.head_contents;
269 state.index_contents = state.head_contents.clone();
270 }
271 }
272
273 state.refs.insert("HEAD".into(), snapshot.sha);
274 Ok(())
275 })
276 }
277
278 fn checkout_files(
279 &self,
280 _commit: String,
281 _paths: Vec<RepoPath>,
282 _env: Arc<HashMap<String, String>>,
283 ) -> BoxFuture<'_, Result<()>> {
284 unimplemented!()
285 }
286
287 fn path(&self) -> PathBuf {
288 self.repository_dir_path.clone()
289 }
290
291 fn main_repository_path(&self) -> PathBuf {
292 self.common_dir_path.clone()
293 }
294
295 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
296 async move { None }.boxed()
297 }
298
299 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
300 let workdir_path = self.dot_git_path.parent().unwrap();
301
302 // Load gitignores
303 let ignores = workdir_path
304 .ancestors()
305 .filter_map(|dir| {
306 let ignore_path = dir.join(".gitignore");
307 let content = self.fs.read_file_sync(ignore_path).ok()?;
308 let content = String::from_utf8(content).ok()?;
309 let mut builder = GitignoreBuilder::new(dir);
310 for line in content.lines() {
311 builder.add_line(Some(dir.into()), line).ok()?;
312 }
313 builder.build().ok()
314 })
315 .collect::<Vec<_>>();
316
317 // Load working copy files.
318 let git_files: HashMap<RepoPath, (String, bool)> = self
319 .fs
320 .files()
321 .iter()
322 .filter_map(|path| {
323 // TODO better simulate git status output in the case of submodules and worktrees
324 let repo_path = path.strip_prefix(workdir_path).ok()?;
325 let mut is_ignored = repo_path.starts_with(".git");
326 for ignore in &ignores {
327 match ignore.matched_path_or_any_parents(path, false) {
328 ignore::Match::None => {}
329 ignore::Match::Ignore(_) => is_ignored = true,
330 ignore::Match::Whitelist(_) => break,
331 }
332 }
333 let content = self
334 .fs
335 .read_file_sync(path)
336 .ok()
337 .map(|content| String::from_utf8(content).unwrap())?;
338 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
339 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
340 })
341 .collect();
342
343 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
344 let mut entries = Vec::new();
345 let paths = state
346 .head_contents
347 .keys()
348 .chain(state.index_contents.keys())
349 .chain(git_files.keys())
350 .collect::<HashSet<_>>();
351 for path in paths {
352 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
353 continue;
354 }
355
356 let head = state.head_contents.get(path);
357 let index = state.index_contents.get(path);
358 let unmerged = state.unmerged_paths.get(path);
359 let fs = git_files.get(path);
360 let status = match (unmerged, head, index, fs) {
361 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
362 (_, Some(head), Some(index), Some((fs, _))) => {
363 FileStatus::Tracked(TrackedStatus {
364 index_status: if head == index {
365 StatusCode::Unmodified
366 } else {
367 StatusCode::Modified
368 },
369 worktree_status: if fs == index {
370 StatusCode::Unmodified
371 } else {
372 StatusCode::Modified
373 },
374 })
375 }
376 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
377 index_status: if head == index {
378 StatusCode::Unmodified
379 } else {
380 StatusCode::Modified
381 },
382 worktree_status: StatusCode::Deleted,
383 }),
384 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
385 index_status: StatusCode::Deleted,
386 worktree_status: StatusCode::Added,
387 }),
388 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
389 index_status: StatusCode::Deleted,
390 worktree_status: StatusCode::Deleted,
391 }),
392 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
393 index_status: StatusCode::Added,
394 worktree_status: if fs == index {
395 StatusCode::Unmodified
396 } else {
397 StatusCode::Modified
398 },
399 }),
400 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
401 index_status: StatusCode::Added,
402 worktree_status: StatusCode::Deleted,
403 }),
404 (_, None, None, Some((_, is_ignored))) => {
405 if *is_ignored {
406 continue;
407 }
408 FileStatus::Untracked
409 }
410 (_, None, None, None) => {
411 unreachable!();
412 }
413 };
414 if status
415 != FileStatus::Tracked(TrackedStatus {
416 index_status: StatusCode::Unmodified,
417 worktree_status: StatusCode::Unmodified,
418 })
419 {
420 entries.push((path.clone(), status));
421 }
422 }
423 entries.sort_by(|a, b| a.0.cmp(&b.0));
424 anyhow::Ok(GitStatus {
425 entries: entries.into(),
426 })
427 });
428 Task::ready(match result {
429 Ok(result) => result,
430 Err(e) => Err(e),
431 })
432 }
433
434 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
435 self.with_state_async(false, |state| Ok(state.stash_entries.clone()))
436 }
437
438 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
439 self.with_state_async(false, move |state| {
440 let current_branch = &state.current_branch_name;
441 let mut branches = state
442 .branches
443 .iter()
444 .map(|branch_name| {
445 let ref_name = if branch_name.starts_with("refs/") {
446 branch_name.into()
447 } else if branch_name.contains('/') {
448 format!("refs/remotes/{branch_name}").into()
449 } else {
450 format!("refs/heads/{branch_name}").into()
451 };
452 Branch {
453 is_head: Some(branch_name) == current_branch.as_ref(),
454 ref_name,
455 most_recent_commit: None,
456 upstream: None,
457 }
458 })
459 .collect::<Vec<_>>();
460 // compute snapshot expects these to be sorted by ref_name
461 // because that's what git itself does
462 branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name));
463 Ok(branches)
464 })
465 }
466
467 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
468 let fs = self.fs.clone();
469 let common_dir_path = self.common_dir_path.clone();
470 let executor = self.executor.clone();
471
472 async move {
473 executor.simulate_random_delay().await;
474
475 let (main_worktree, refs) = fs.with_git_state(&common_dir_path, false, |state| {
476 let work_dir = common_dir_path
477 .parent()
478 .map(PathBuf::from)
479 .unwrap_or_else(|| common_dir_path.clone());
480 let head_sha = state
481 .refs
482 .get("HEAD")
483 .cloned()
484 .unwrap_or_else(|| "0000000".to_string());
485 let branch_ref = state
486 .current_branch_name
487 .as_ref()
488 .map(|name| format!("refs/heads/{name}"))
489 .unwrap_or_else(|| "refs/heads/main".to_string());
490 let main_wt = Worktree {
491 path: work_dir,
492 ref_name: Some(branch_ref.into()),
493 sha: head_sha.into(),
494 is_main: true,
495 };
496 (main_wt, state.refs.clone())
497 })?;
498
499 let mut all = vec![main_worktree];
500
501 let worktrees_dir = common_dir_path.join("worktrees");
502 if let Ok(mut entries) = fs.read_dir(&worktrees_dir).await {
503 use futures::StreamExt;
504 while let Some(Ok(entry_path)) = entries.next().await {
505 let head_content = match fs.load(&entry_path.join("HEAD")).await {
506 Ok(content) => content,
507 Err(_) => continue,
508 };
509 let gitdir_content = match fs.load(&entry_path.join("gitdir")).await {
510 Ok(content) => content,
511 Err(_) => continue,
512 };
513
514 let ref_name = head_content
515 .strip_prefix("ref: ")
516 .map(|s| s.trim().to_string());
517 let sha = ref_name
518 .as_ref()
519 .and_then(|r| refs.get(r))
520 .cloned()
521 .unwrap_or_else(|| head_content.trim().to_string());
522
523 let worktree_path = PathBuf::from(gitdir_content.trim())
524 .parent()
525 .map(PathBuf::from)
526 .unwrap_or_default();
527
528 all.push(Worktree {
529 path: worktree_path,
530 ref_name: ref_name.map(Into::into),
531 sha: sha.into(),
532 is_main: false,
533 });
534 }
535 }
536
537 Ok(all)
538 }
539 .boxed()
540 }
541
542 fn create_worktree(
543 &self,
544 target: CreateWorktreeTarget,
545 path: PathBuf,
546 ) -> BoxFuture<'_, Result<()>> {
547 let fs = self.fs.clone();
548 let executor = self.executor.clone();
549 let dot_git_path = self.dot_git_path.clone();
550 let common_dir_path = self.common_dir_path.clone();
551 async move {
552 executor.simulate_random_delay().await;
553
554 let branch_name = target.branch_name().map(ToOwned::to_owned);
555 let create_branch_ref = matches!(target, CreateWorktreeTarget::NewBranch { .. });
556
557 // Check for simulated error and validate branch state before any side effects.
558 fs.with_git_state(&dot_git_path, false, {
559 let branch_name = branch_name.clone();
560 move |state| {
561 if let Some(message) = &state.simulated_create_worktree_error {
562 anyhow::bail!("{message}");
563 }
564
565 match (create_branch_ref, branch_name.as_ref()) {
566 (true, Some(branch_name)) => {
567 if state.branches.contains(branch_name) {
568 bail!("a branch named '{}' already exists", branch_name);
569 }
570 }
571 (false, Some(branch_name)) => {
572 if !state.branches.contains(branch_name) {
573 bail!("no branch named '{}' exists", branch_name);
574 }
575 }
576 (false, None) => {}
577 (true, None) => bail!("branch name is required to create a branch"),
578 }
579
580 Ok(())
581 }
582 })??;
583
584 let (branch_name, sha, create_branch_ref) = match target {
585 CreateWorktreeTarget::ExistingBranch { branch_name } => {
586 let ref_name = format!("refs/heads/{branch_name}");
587 let sha = fs.with_git_state(&dot_git_path, false, {
588 move |state| {
589 Ok::<_, anyhow::Error>(
590 state
591 .refs
592 .get(&ref_name)
593 .cloned()
594 .unwrap_or_else(|| "fake-sha".to_string()),
595 )
596 }
597 })??;
598 (Some(branch_name), sha, false)
599 }
600 CreateWorktreeTarget::NewBranch {
601 branch_name,
602 base_sha: start_point,
603 } => (
604 Some(branch_name),
605 start_point.unwrap_or_else(|| "fake-sha".to_string()),
606 true,
607 ),
608 CreateWorktreeTarget::Detached {
609 base_sha: start_point,
610 } => (
611 None,
612 start_point.unwrap_or_else(|| "fake-sha".to_string()),
613 false,
614 ),
615 };
616
617 // Create the worktree checkout directory.
618 fs.create_dir(&path).await?;
619
620 // Create .git/worktrees/<name>/ directory with HEAD, commondir, gitdir.
621 let worktree_entry_name = branch_name.as_deref().unwrap_or_else(|| {
622 path.file_name()
623 .and_then(|name| name.to_str())
624 .unwrap_or("detached")
625 });
626 let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name);
627 fs.create_dir(&worktrees_entry_dir).await?;
628
629 let head_content = if let Some(ref branch_name) = branch_name {
630 let ref_name = format!("refs/heads/{branch_name}");
631 format!("ref: {ref_name}")
632 } else {
633 sha.clone()
634 };
635 fs.write_file_internal(
636 worktrees_entry_dir.join("HEAD"),
637 head_content.into_bytes(),
638 false,
639 )?;
640 fs.write_file_internal(
641 worktrees_entry_dir.join("commondir"),
642 common_dir_path.to_string_lossy().into_owned().into_bytes(),
643 false,
644 )?;
645 let worktree_dot_git = path.join(".git");
646 fs.write_file_internal(
647 worktrees_entry_dir.join("gitdir"),
648 worktree_dot_git.to_string_lossy().into_owned().into_bytes(),
649 false,
650 )?;
651
652 // Create .git file in the worktree checkout.
653 fs.write_file_internal(
654 &worktree_dot_git,
655 format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(),
656 false,
657 )?;
658
659 // Update git state for newly created branches.
660 if create_branch_ref {
661 fs.with_git_state(&dot_git_path, true, {
662 let branch_name = branch_name.clone();
663 let sha = sha.clone();
664 move |state| {
665 if let Some(branch_name) = branch_name {
666 let ref_name = format!("refs/heads/{branch_name}");
667 state.refs.insert(ref_name, sha);
668 state.branches.insert(branch_name);
669 }
670 Ok::<(), anyhow::Error>(())
671 }
672 })??;
673 }
674
675 Ok(())
676 }
677 .boxed()
678 }
679
680 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
681 let fs = self.fs.clone();
682 let executor = self.executor.clone();
683 let common_dir_path = self.common_dir_path.clone();
684 async move {
685 executor.simulate_random_delay().await;
686
687 // Read the worktree's .git file to find its entry directory.
688 let dot_git_file = path.join(".git");
689 let content = fs
690 .load(&dot_git_file)
691 .await
692 .with_context(|| format!("no worktree found at path: {}", path.display()))?;
693 let gitdir = content
694 .strip_prefix("gitdir:")
695 .context("invalid .git file in worktree")?
696 .trim();
697 let worktree_entry_dir = PathBuf::from(gitdir);
698
699 // Remove the worktree checkout directory.
700 fs.remove_dir(
701 &path,
702 RemoveOptions {
703 recursive: true,
704 ignore_if_not_exists: false,
705 },
706 )
707 .await?;
708
709 // Remove the .git/worktrees/<name>/ directory.
710 fs.remove_dir(
711 &worktree_entry_dir,
712 RemoveOptions {
713 recursive: true,
714 ignore_if_not_exists: false,
715 },
716 )
717 .await?;
718
719 // Emit a git event on the main .git directory so the scanner
720 // notices the change.
721 fs.with_git_state(&common_dir_path, true, |_| {})?;
722
723 Ok(())
724 }
725 .boxed()
726 }
727
728 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
729 let fs = self.fs.clone();
730 let executor = self.executor.clone();
731 let common_dir_path = self.common_dir_path.clone();
732 async move {
733 executor.simulate_random_delay().await;
734
735 // Read the worktree's .git file to find its entry directory.
736 let dot_git_file = old_path.join(".git");
737 let content = fs
738 .load(&dot_git_file)
739 .await
740 .with_context(|| format!("no worktree found at path: {}", old_path.display()))?;
741 let gitdir = content
742 .strip_prefix("gitdir:")
743 .context("invalid .git file in worktree")?
744 .trim();
745 let worktree_entry_dir = PathBuf::from(gitdir);
746
747 // Move the worktree checkout directory.
748 fs.rename(
749 &old_path,
750 &new_path,
751 RenameOptions {
752 overwrite: false,
753 ignore_if_exists: false,
754 create_parents: true,
755 },
756 )
757 .await?;
758
759 // Update the gitdir file in .git/worktrees/<name>/ to point to the
760 // new location.
761 let new_dot_git = new_path.join(".git");
762 fs.write_file_internal(
763 worktree_entry_dir.join("gitdir"),
764 new_dot_git.to_string_lossy().into_owned().into_bytes(),
765 false,
766 )?;
767
768 // Update the .git file in the moved worktree checkout.
769 fs.write_file_internal(
770 &new_dot_git,
771 format!("gitdir: {}", worktree_entry_dir.display()).into_bytes(),
772 false,
773 )?;
774
775 // Emit a git event on the main .git directory so the scanner
776 // notices the change.
777 fs.with_git_state(&common_dir_path, true, |_| {})?;
778
779 Ok(())
780 }
781 .boxed()
782 }
783
784 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
785 self.with_state_async(true, |state| {
786 state.current_branch_name = Some(name);
787 Ok(())
788 })
789 }
790
791 fn create_branch(
792 &self,
793 name: String,
794 _base_branch: Option<String>,
795 ) -> BoxFuture<'_, Result<()>> {
796 self.with_state_async(true, move |state| {
797 if let Some((remote, _)) = name.split_once('/')
798 && !state.remotes.contains_key(remote)
799 {
800 state.remotes.insert(remote.to_owned(), "".to_owned());
801 }
802 state.branches.insert(name);
803 Ok(())
804 })
805 }
806
807 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
808 self.with_state_async(true, move |state| {
809 if !state.branches.remove(&branch) {
810 bail!("no such branch: {branch}");
811 }
812 state.branches.insert(new_name.clone());
813 if state.current_branch_name == Some(branch) {
814 state.current_branch_name = Some(new_name);
815 }
816 Ok(())
817 })
818 }
819
820 fn delete_branch(&self, _is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> {
821 self.with_state_async(true, move |state| {
822 if !state.branches.remove(&name) {
823 bail!("no such branch: {name}");
824 }
825 Ok(())
826 })
827 }
828
829 fn blame(
830 &self,
831 path: RepoPath,
832 _content: Rope,
833 _line_ending: LineEnding,
834 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
835 self.with_state_async(false, move |state| {
836 state
837 .blames
838 .get(&path)
839 .with_context(|| format!("failed to get blame for {:?}", path))
840 .cloned()
841 })
842 }
843
844 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
845 self.file_history_paginated(path, 0, None)
846 }
847
848 fn file_history_paginated(
849 &self,
850 path: RepoPath,
851 _skip: usize,
852 _limit: Option<usize>,
853 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
854 async move {
855 Ok(git::repository::FileHistory {
856 entries: Vec::new(),
857 path,
858 })
859 }
860 .boxed()
861 }
862
863 fn stage_paths(
864 &self,
865 paths: Vec<RepoPath>,
866 _env: Arc<HashMap<String, String>>,
867 ) -> BoxFuture<'_, Result<()>> {
868 Box::pin(async move {
869 let contents = paths
870 .into_iter()
871 .map(|path| {
872 let abs_path = self
873 .dot_git_path
874 .parent()
875 .unwrap()
876 .join(&path.as_std_path());
877 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
878 })
879 .collect::<Vec<_>>();
880 let contents = join_all(contents).await;
881 self.with_state_async(true, move |state| {
882 for (path, content) in contents {
883 if let Some(content) = content {
884 state.index_contents.insert(path, content);
885 } else {
886 state.index_contents.remove(&path);
887 }
888 }
889 Ok(())
890 })
891 .await
892 })
893 }
894
895 fn unstage_paths(
896 &self,
897 paths: Vec<RepoPath>,
898 _env: Arc<HashMap<String, String>>,
899 ) -> BoxFuture<'_, Result<()>> {
900 self.with_state_async(true, move |state| {
901 for path in paths {
902 match state.head_contents.get(&path) {
903 Some(content) => state.index_contents.insert(path, content.clone()),
904 None => state.index_contents.remove(&path),
905 };
906 }
907 Ok(())
908 })
909 }
910
911 fn stash_paths(
912 &self,
913 _paths: Vec<RepoPath>,
914 _env: Arc<HashMap<String, String>>,
915 ) -> BoxFuture<'_, Result<()>> {
916 unimplemented!()
917 }
918
919 fn stash_pop(
920 &self,
921 _index: Option<usize>,
922 _env: Arc<HashMap<String, String>>,
923 ) -> BoxFuture<'_, Result<()>> {
924 unimplemented!()
925 }
926
927 fn stash_apply(
928 &self,
929 _index: Option<usize>,
930 _env: Arc<HashMap<String, String>>,
931 ) -> BoxFuture<'_, Result<()>> {
932 unimplemented!()
933 }
934
935 fn stash_drop(
936 &self,
937 _index: Option<usize>,
938 _env: Arc<HashMap<String, String>>,
939 ) -> BoxFuture<'_, Result<()>> {
940 unimplemented!()
941 }
942
943 fn commit(
944 &self,
945 _message: gpui::SharedString,
946 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
947 options: CommitOptions,
948 _askpass: AskPassDelegate,
949 _env: Arc<HashMap<String, String>>,
950 ) -> BoxFuture<'_, Result<()>> {
951 self.with_state_async(true, move |state| {
952 if !options.allow_empty && !options.amend && state.index_contents == state.head_contents
953 {
954 anyhow::bail!("nothing to commit (use allow_empty to create an empty commit)");
955 }
956
957 let old_sha = state.refs.get("HEAD").cloned().unwrap_or_default();
958 state.commit_history.push(FakeCommitSnapshot {
959 head_contents: state.head_contents.clone(),
960 index_contents: state.index_contents.clone(),
961 sha: old_sha,
962 });
963
964 state.head_contents = state.index_contents.clone();
965
966 let new_sha = format!("fake-commit-{}", state.commit_history.len());
967 state.refs.insert("HEAD".into(), new_sha);
968
969 Ok(())
970 })
971 }
972
973 fn run_hook(
974 &self,
975 _hook: RunHook,
976 _env: Arc<HashMap<String, String>>,
977 ) -> BoxFuture<'_, Result<()>> {
978 async { Ok(()) }.boxed()
979 }
980
981 fn push(
982 &self,
983 _branch: String,
984 _remote_branch: String,
985 _remote: String,
986 _options: Option<PushOptions>,
987 _askpass: AskPassDelegate,
988 _env: Arc<HashMap<String, String>>,
989 _cx: AsyncApp,
990 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
991 unimplemented!()
992 }
993
994 fn pull(
995 &self,
996 _branch: Option<String>,
997 _remote: String,
998 _rebase: bool,
999 _askpass: AskPassDelegate,
1000 _env: Arc<HashMap<String, String>>,
1001 _cx: AsyncApp,
1002 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1003 unimplemented!()
1004 }
1005
1006 fn fetch(
1007 &self,
1008 _fetch_options: FetchOptions,
1009 _askpass: AskPassDelegate,
1010 _env: Arc<HashMap<String, String>>,
1011 _cx: AsyncApp,
1012 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
1013 unimplemented!()
1014 }
1015
1016 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
1017 self.with_state_async(false, move |state| {
1018 let remotes = state
1019 .remotes
1020 .keys()
1021 .map(|r| Remote {
1022 name: r.clone().into(),
1023 })
1024 .collect::<Vec<_>>();
1025 Ok(remotes)
1026 })
1027 }
1028
1029 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1030 unimplemented!()
1031 }
1032
1033 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
1034 unimplemented!()
1035 }
1036
1037 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
1038 future::ready(Ok(Vec::new())).boxed()
1039 }
1040
1041 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
1042 future::ready(Ok(String::new())).boxed()
1043 }
1044
1045 fn diff_stat(
1046 &self,
1047 path_prefixes: &[RepoPath],
1048 ) -> BoxFuture<'_, Result<git::status::GitDiffStat>> {
1049 fn count_lines(s: &str) -> u32 {
1050 if s.is_empty() {
1051 0
1052 } else {
1053 s.lines().count() as u32
1054 }
1055 }
1056
1057 fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool {
1058 if prefixes.is_empty() {
1059 return true;
1060 }
1061 prefixes.iter().any(|prefix| {
1062 let prefix_str = prefix.as_unix_str();
1063 if prefix_str == "." {
1064 return true;
1065 }
1066 path == prefix || path.starts_with(&prefix)
1067 })
1068 }
1069
1070 let path_prefixes = path_prefixes.to_vec();
1071
1072 let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
1073 let worktree_files: HashMap<RepoPath, String> = self
1074 .fs
1075 .files()
1076 .iter()
1077 .filter_map(|path| {
1078 let repo_path = path.strip_prefix(&workdir_path).ok()?;
1079 if repo_path.starts_with(".git") {
1080 return None;
1081 }
1082 let content = self
1083 .fs
1084 .read_file_sync(path)
1085 .ok()
1086 .and_then(|bytes| String::from_utf8(bytes).ok())?;
1087 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
1088 Some((RepoPath::from_rel_path(&repo_path), content))
1089 })
1090 .collect();
1091
1092 self.with_state_async(false, move |state| {
1093 let mut entries = Vec::new();
1094 let all_paths: HashSet<&RepoPath> = state
1095 .head_contents
1096 .keys()
1097 .chain(
1098 worktree_files
1099 .keys()
1100 .filter(|p| state.index_contents.contains_key(*p)),
1101 )
1102 .collect();
1103 for path in all_paths {
1104 if !matches_prefixes(path, &path_prefixes) {
1105 continue;
1106 }
1107 let head = state.head_contents.get(path);
1108 let worktree = worktree_files.get(path);
1109 match (head, worktree) {
1110 (Some(old), Some(new)) if old != new => {
1111 entries.push((
1112 path.clone(),
1113 git::status::DiffStat {
1114 added: count_lines(new),
1115 deleted: count_lines(old),
1116 },
1117 ));
1118 }
1119 (Some(old), None) => {
1120 entries.push((
1121 path.clone(),
1122 git::status::DiffStat {
1123 added: 0,
1124 deleted: count_lines(old),
1125 },
1126 ));
1127 }
1128 (None, Some(new)) => {
1129 entries.push((
1130 path.clone(),
1131 git::status::DiffStat {
1132 added: count_lines(new),
1133 deleted: 0,
1134 },
1135 ));
1136 }
1137 _ => {}
1138 }
1139 }
1140 entries.sort_by(|(a, _), (b, _)| a.cmp(b));
1141 Ok(git::status::GitDiffStat {
1142 entries: entries.into(),
1143 })
1144 })
1145 .boxed()
1146 }
1147
1148 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
1149 let executor = self.executor.clone();
1150 let fs = self.fs.clone();
1151 let checkpoints = self.checkpoints.clone();
1152 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1153 async move {
1154 executor.simulate_random_delay().await;
1155 let oid = git::Oid::random(&mut *executor.rng().lock());
1156 let entry = fs.entry(&repository_dir_path)?;
1157 checkpoints.lock().insert(oid, entry);
1158 Ok(GitRepositoryCheckpoint { commit_sha: oid })
1159 }
1160 .boxed()
1161 }
1162
1163 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
1164 let executor = self.executor.clone();
1165 let fs = self.fs.clone();
1166 let checkpoints = self.checkpoints.clone();
1167 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1168 async move {
1169 executor.simulate_random_delay().await;
1170 let checkpoints = checkpoints.lock();
1171 let entry = checkpoints
1172 .get(&checkpoint.commit_sha)
1173 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
1174 fs.insert_entry(&repository_dir_path, entry.clone())?;
1175 Ok(())
1176 }
1177 .boxed()
1178 }
1179
1180 fn compare_checkpoints(
1181 &self,
1182 left: GitRepositoryCheckpoint,
1183 right: GitRepositoryCheckpoint,
1184 ) -> BoxFuture<'_, Result<bool>> {
1185 let executor = self.executor.clone();
1186 let checkpoints = self.checkpoints.clone();
1187 async move {
1188 executor.simulate_random_delay().await;
1189 let checkpoints = checkpoints.lock();
1190 let left = checkpoints
1191 .get(&left.commit_sha)
1192 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
1193 let right = checkpoints
1194 .get(&right.commit_sha)
1195 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
1196
1197 Ok(left == right)
1198 }
1199 .boxed()
1200 }
1201
1202 fn diff_checkpoints(
1203 &self,
1204 base_checkpoint: GitRepositoryCheckpoint,
1205 target_checkpoint: GitRepositoryCheckpoint,
1206 ) -> BoxFuture<'_, Result<String>> {
1207 let executor = self.executor.clone();
1208 let checkpoints = self.checkpoints.clone();
1209 async move {
1210 executor.simulate_random_delay().await;
1211 let checkpoints = checkpoints.lock();
1212 let base = checkpoints
1213 .get(&base_checkpoint.commit_sha)
1214 .context(format!(
1215 "invalid base checkpoint: {}",
1216 base_checkpoint.commit_sha
1217 ))?;
1218 let target = checkpoints
1219 .get(&target_checkpoint.commit_sha)
1220 .context(format!(
1221 "invalid target checkpoint: {}",
1222 target_checkpoint.commit_sha
1223 ))?;
1224
1225 fn collect_files(
1226 entry: &FakeFsEntry,
1227 prefix: String,
1228 out: &mut std::collections::BTreeMap<String, String>,
1229 ) {
1230 match entry {
1231 FakeFsEntry::File { content, .. } => {
1232 out.insert(prefix, String::from_utf8_lossy(content).into_owned());
1233 }
1234 FakeFsEntry::Dir { entries, .. } => {
1235 for (name, child) in entries {
1236 let path = if prefix.is_empty() {
1237 name.clone()
1238 } else {
1239 format!("{prefix}/{name}")
1240 };
1241 collect_files(child, path, out);
1242 }
1243 }
1244 FakeFsEntry::Symlink { .. } => {}
1245 }
1246 }
1247
1248 let mut base_files = std::collections::BTreeMap::new();
1249 let mut target_files = std::collections::BTreeMap::new();
1250 collect_files(base, String::new(), &mut base_files);
1251 collect_files(target, String::new(), &mut target_files);
1252
1253 let all_paths: std::collections::BTreeSet<&String> =
1254 base_files.keys().chain(target_files.keys()).collect();
1255
1256 let mut diff = String::new();
1257 for path in all_paths {
1258 match (base_files.get(path), target_files.get(path)) {
1259 (Some(base_content), Some(target_content))
1260 if base_content != target_content =>
1261 {
1262 diff.push_str(&format!("diff --git a/{path} b/{path}\n"));
1263 diff.push_str(&format!("--- a/{path}\n"));
1264 diff.push_str(&format!("+++ b/{path}\n"));
1265 for line in base_content.lines() {
1266 diff.push_str(&format!("-{line}\n"));
1267 }
1268 for line in target_content.lines() {
1269 diff.push_str(&format!("+{line}\n"));
1270 }
1271 }
1272 (Some(_), None) => {
1273 diff.push_str(&format!("diff --git a/{path} /dev/null\n"));
1274 diff.push_str("deleted file\n");
1275 }
1276 (None, Some(_)) => {
1277 diff.push_str(&format!("diff --git /dev/null b/{path}\n"));
1278 diff.push_str("new file\n");
1279 }
1280 _ => {}
1281 }
1282 }
1283 Ok(diff)
1284 }
1285 .boxed()
1286 }
1287
1288 fn default_branch(
1289 &self,
1290 include_remote_name: bool,
1291 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
1292 async move {
1293 Ok(Some(if include_remote_name {
1294 "origin/main".into()
1295 } else {
1296 "main".into()
1297 }))
1298 }
1299 .boxed()
1300 }
1301
1302 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
1303 self.with_state_async(true, move |state| {
1304 state.remotes.insert(name, url);
1305 Ok(())
1306 })
1307 }
1308
1309 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
1310 self.with_state_async(true, move |state| {
1311 state.branches.retain(|branch| {
1312 branch
1313 .split_once('/')
1314 .is_none_or(|(remote, _)| remote != name)
1315 });
1316 state.remotes.remove(&name);
1317 Ok(())
1318 })
1319 }
1320
1321 fn initial_graph_data(
1322 &self,
1323 _log_source: LogSource,
1324 _log_order: LogOrder,
1325 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
1326 ) -> BoxFuture<'_, Result<()>> {
1327 let fs = self.fs.clone();
1328 let dot_git_path = self.dot_git_path.clone();
1329 async move {
1330 let graph_commits =
1331 fs.with_git_state(&dot_git_path, false, |state| state.graph_commits.clone())?;
1332
1333 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
1334 request_tx.send(chunk.to_vec()).await.ok();
1335 }
1336 Ok(())
1337 }
1338 .boxed()
1339 }
1340
1341 fn search_commits(
1342 &self,
1343 _log_source: LogSource,
1344 _search_args: SearchCommitArgs,
1345 _request_tx: Sender<Oid>,
1346 ) -> BoxFuture<'_, Result<()>> {
1347 async { bail!("search_commits not supported for FakeGitRepository") }.boxed()
1348 }
1349
1350 fn commit_data_reader(&self) -> Result<CommitDataReader> {
1351 anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
1352 }
1353
1354 fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> {
1355 self.with_state_async(true, move |state| {
1356 state.refs.insert(ref_name, commit);
1357 Ok(())
1358 })
1359 }
1360
1361 fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> {
1362 self.with_state_async(true, move |state| {
1363 state.refs.remove(&ref_name);
1364 Ok(())
1365 })
1366 }
1367
1368 fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> {
1369 async { Ok(()) }.boxed()
1370 }
1371
1372 fn set_trusted(&self, trusted: bool) {
1373 self.is_trusted
1374 .store(trusted, std::sync::atomic::Ordering::Release);
1375 }
1376
1377 fn is_trusted(&self) -> bool {
1378 self.is_trusted.load(std::sync::atomic::Ordering::Acquire)
1379 }
1380}