1use crate::{FakeFs, FakeFsEntry, Fs, RemoveOptions, RenameOptions};
2use anyhow::{Context as _, Result, bail};
3use collections::{HashMap, HashSet};
4use futures::future::{self, BoxFuture, join_all};
5use git::{
6 Oid, RunHook,
7 blame::Blame,
8 repository::{
9 AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions, FetchOptions,
10 GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder,
11 LogSource, PushOptions, Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree,
12 },
13 status::{
14 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
15 UnmergedStatus,
16 },
17};
18use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task};
19use ignore::gitignore::GitignoreBuilder;
20use parking_lot::Mutex;
21use rope::Rope;
22use smol::{channel::Sender, future::FutureExt as _};
23use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool};
24use text::LineEnding;
25use util::{paths::PathStyle, rel_path::RelPath};
26
27#[derive(Clone)]
28pub struct FakeGitRepository {
29 pub(crate) fs: Arc<FakeFs>,
30 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
31 pub(crate) executor: BackgroundExecutor,
32 pub(crate) dot_git_path: PathBuf,
33 pub(crate) repository_dir_path: PathBuf,
34 pub(crate) common_dir_path: PathBuf,
35 pub(crate) is_trusted: Arc<AtomicBool>,
36}
37
38#[derive(Debug, Clone)]
39pub struct FakeGitRepositoryState {
40 pub event_emitter: smol::channel::Sender<PathBuf>,
41 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
42 pub head_contents: HashMap<RepoPath, String>,
43 pub index_contents: HashMap<RepoPath, String>,
44 // everything in commit contents is in oids
45 pub merge_base_contents: HashMap<RepoPath, Oid>,
46 pub oids: HashMap<Oid, String>,
47 pub blames: HashMap<RepoPath, Blame>,
48 pub current_branch_name: Option<String>,
49 pub branches: HashSet<String>,
50 /// List of remotes, keys are names and values are URLs
51 pub remotes: HashMap<String, String>,
52 pub simulated_index_write_error_message: Option<String>,
53 pub simulated_create_worktree_error: Option<String>,
54 pub refs: HashMap<String, String>,
55 pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
56}
57
58impl FakeGitRepositoryState {
59 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
60 FakeGitRepositoryState {
61 event_emitter,
62 head_contents: Default::default(),
63 index_contents: Default::default(),
64 unmerged_paths: Default::default(),
65 blames: Default::default(),
66 current_branch_name: Default::default(),
67 branches: Default::default(),
68 simulated_index_write_error_message: Default::default(),
69 simulated_create_worktree_error: Default::default(),
70 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
71 merge_base_contents: Default::default(),
72 oids: Default::default(),
73 remotes: HashMap::default(),
74 graph_commits: Vec::new(),
75 }
76 }
77}
78
79impl FakeGitRepository {
80 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
81 where
82 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
83 T: Send,
84 {
85 let fs = self.fs.clone();
86 let executor = self.executor.clone();
87 let dot_git_path = self.dot_git_path.clone();
88 async move {
89 executor.simulate_random_delay().await;
90 fs.with_git_state(&dot_git_path, write, f)?
91 }
92 .boxed()
93 }
94}
95
96impl GitRepository for FakeGitRepository {
97 fn reload_index(&self) {}
98
99 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
100 let fut = self.with_state_async(false, move |state| {
101 state
102 .index_contents
103 .get(&path)
104 .context("not present in index")
105 .cloned()
106 });
107 self.executor.spawn(async move { fut.await.ok() }).boxed()
108 }
109
110 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
111 let fut = self.with_state_async(false, move |state| {
112 state
113 .head_contents
114 .get(&path)
115 .context("not present in HEAD")
116 .cloned()
117 });
118 self.executor.spawn(async move { fut.await.ok() }).boxed()
119 }
120
121 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
122 self.with_state_async(false, move |state| {
123 state.oids.get(&oid).cloned().context("oid does not exist")
124 })
125 .boxed()
126 }
127
128 fn load_commit(
129 &self,
130 _commit: String,
131 _cx: AsyncApp,
132 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
133 unimplemented!()
134 }
135
136 fn set_index_text(
137 &self,
138 path: RepoPath,
139 content: Option<String>,
140 _env: Arc<HashMap<String, String>>,
141 _is_executable: bool,
142 ) -> BoxFuture<'_, anyhow::Result<()>> {
143 self.with_state_async(true, move |state| {
144 if let Some(message) = &state.simulated_index_write_error_message {
145 anyhow::bail!("{message}");
146 } else if let Some(content) = content {
147 state.index_contents.insert(path, content);
148 } else {
149 state.index_contents.remove(&path);
150 }
151 Ok(())
152 })
153 }
154
155 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
156 let name = name.to_string();
157 let fut = self.with_state_async(false, move |state| {
158 state
159 .remotes
160 .get(&name)
161 .context("remote not found")
162 .cloned()
163 });
164 async move { fut.await.ok() }.boxed()
165 }
166
167 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
168 let mut entries = HashMap::default();
169 self.with_state_async(false, |state| {
170 for (path, content) in &state.head_contents {
171 let status = if let Some((oid, original)) = state
172 .merge_base_contents
173 .get(path)
174 .map(|oid| (oid, &state.oids[oid]))
175 {
176 if original == content {
177 continue;
178 }
179 TreeDiffStatus::Modified { old: *oid }
180 } else {
181 TreeDiffStatus::Added
182 };
183 entries.insert(path.clone(), status);
184 }
185 for (path, oid) in &state.merge_base_contents {
186 if !entries.contains_key(path) {
187 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
188 }
189 }
190 Ok(TreeDiff { entries })
191 })
192 .boxed()
193 }
194
195 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
196 self.with_state_async(false, |state| {
197 Ok(revs
198 .into_iter()
199 .map(|rev| state.refs.get(&rev).cloned())
200 .collect())
201 })
202 }
203
204 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
205 async {
206 Ok(CommitDetails {
207 sha: commit.into(),
208 message: "initial commit".into(),
209 ..Default::default()
210 })
211 }
212 .boxed()
213 }
214
215 fn reset(
216 &self,
217 _commit: String,
218 _mode: ResetMode,
219 _env: Arc<HashMap<String, String>>,
220 ) -> BoxFuture<'_, Result<()>> {
221 unimplemented!()
222 }
223
224 fn checkout_files(
225 &self,
226 _commit: String,
227 _paths: Vec<RepoPath>,
228 _env: Arc<HashMap<String, String>>,
229 ) -> BoxFuture<'_, Result<()>> {
230 unimplemented!()
231 }
232
233 fn path(&self) -> PathBuf {
234 self.repository_dir_path.clone()
235 }
236
237 fn main_repository_path(&self) -> PathBuf {
238 self.common_dir_path.clone()
239 }
240
241 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
242 async move { None }.boxed()
243 }
244
245 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
246 let workdir_path = self.dot_git_path.parent().unwrap();
247
248 // Load gitignores
249 let ignores = workdir_path
250 .ancestors()
251 .filter_map(|dir| {
252 let ignore_path = dir.join(".gitignore");
253 let content = self.fs.read_file_sync(ignore_path).ok()?;
254 let content = String::from_utf8(content).ok()?;
255 let mut builder = GitignoreBuilder::new(dir);
256 for line in content.lines() {
257 builder.add_line(Some(dir.into()), line).ok()?;
258 }
259 builder.build().ok()
260 })
261 .collect::<Vec<_>>();
262
263 // Load working copy files.
264 let git_files: HashMap<RepoPath, (String, bool)> = self
265 .fs
266 .files()
267 .iter()
268 .filter_map(|path| {
269 // TODO better simulate git status output in the case of submodules and worktrees
270 let repo_path = path.strip_prefix(workdir_path).ok()?;
271 let mut is_ignored = repo_path.starts_with(".git");
272 for ignore in &ignores {
273 match ignore.matched_path_or_any_parents(path, false) {
274 ignore::Match::None => {}
275 ignore::Match::Ignore(_) => is_ignored = true,
276 ignore::Match::Whitelist(_) => break,
277 }
278 }
279 let content = self
280 .fs
281 .read_file_sync(path)
282 .ok()
283 .map(|content| String::from_utf8(content).unwrap())?;
284 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
285 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
286 })
287 .collect();
288
289 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
290 let mut entries = Vec::new();
291 let paths = state
292 .head_contents
293 .keys()
294 .chain(state.index_contents.keys())
295 .chain(git_files.keys())
296 .collect::<HashSet<_>>();
297 for path in paths {
298 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
299 continue;
300 }
301
302 let head = state.head_contents.get(path);
303 let index = state.index_contents.get(path);
304 let unmerged = state.unmerged_paths.get(path);
305 let fs = git_files.get(path);
306 let status = match (unmerged, head, index, fs) {
307 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
308 (_, Some(head), Some(index), Some((fs, _))) => {
309 FileStatus::Tracked(TrackedStatus {
310 index_status: if head == index {
311 StatusCode::Unmodified
312 } else {
313 StatusCode::Modified
314 },
315 worktree_status: if fs == index {
316 StatusCode::Unmodified
317 } else {
318 StatusCode::Modified
319 },
320 })
321 }
322 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
323 index_status: if head == index {
324 StatusCode::Unmodified
325 } else {
326 StatusCode::Modified
327 },
328 worktree_status: StatusCode::Deleted,
329 }),
330 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
331 index_status: StatusCode::Deleted,
332 worktree_status: StatusCode::Added,
333 }),
334 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
335 index_status: StatusCode::Deleted,
336 worktree_status: StatusCode::Deleted,
337 }),
338 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
339 index_status: StatusCode::Added,
340 worktree_status: if fs == index {
341 StatusCode::Unmodified
342 } else {
343 StatusCode::Modified
344 },
345 }),
346 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
347 index_status: StatusCode::Added,
348 worktree_status: StatusCode::Deleted,
349 }),
350 (_, None, None, Some((_, is_ignored))) => {
351 if *is_ignored {
352 continue;
353 }
354 FileStatus::Untracked
355 }
356 (_, None, None, None) => {
357 unreachable!();
358 }
359 };
360 if status
361 != FileStatus::Tracked(TrackedStatus {
362 index_status: StatusCode::Unmodified,
363 worktree_status: StatusCode::Unmodified,
364 })
365 {
366 entries.push((path.clone(), status));
367 }
368 }
369 entries.sort_by(|a, b| a.0.cmp(&b.0));
370 anyhow::Ok(GitStatus {
371 entries: entries.into(),
372 })
373 });
374 Task::ready(match result {
375 Ok(result) => result,
376 Err(e) => Err(e),
377 })
378 }
379
380 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
381 async { Ok(git::stash::GitStash::default()) }.boxed()
382 }
383
384 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
385 self.with_state_async(false, move |state| {
386 let current_branch = &state.current_branch_name;
387 Ok(state
388 .branches
389 .iter()
390 .map(|branch_name| {
391 let ref_name = if branch_name.starts_with("refs/") {
392 branch_name.into()
393 } else if branch_name.contains('/') {
394 format!("refs/remotes/{branch_name}").into()
395 } else {
396 format!("refs/heads/{branch_name}").into()
397 };
398 Branch {
399 is_head: Some(branch_name) == current_branch.as_ref(),
400 ref_name,
401 most_recent_commit: None,
402 upstream: None,
403 }
404 })
405 .collect())
406 })
407 }
408
409 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
410 let fs = self.fs.clone();
411 let common_dir_path = self.common_dir_path.clone();
412 let executor = self.executor.clone();
413
414 async move {
415 executor.simulate_random_delay().await;
416
417 let (main_worktree, refs) = fs.with_git_state(&common_dir_path, false, |state| {
418 let work_dir = common_dir_path
419 .parent()
420 .map(PathBuf::from)
421 .unwrap_or_else(|| common_dir_path.clone());
422 let head_sha = state
423 .refs
424 .get("HEAD")
425 .cloned()
426 .unwrap_or_else(|| "0000000".to_string());
427 let branch_ref = state
428 .current_branch_name
429 .as_ref()
430 .map(|name| format!("refs/heads/{name}"))
431 .unwrap_or_else(|| "refs/heads/main".to_string());
432 let main_wt = Worktree {
433 path: work_dir,
434 ref_name: Some(branch_ref.into()),
435 sha: head_sha.into(),
436 is_main: true,
437 };
438 (main_wt, state.refs.clone())
439 })?;
440
441 let mut all = vec![main_worktree];
442
443 let worktrees_dir = common_dir_path.join("worktrees");
444 if let Ok(mut entries) = fs.read_dir(&worktrees_dir).await {
445 use futures::StreamExt;
446 while let Some(Ok(entry_path)) = entries.next().await {
447 let head_content = match fs.load(&entry_path.join("HEAD")).await {
448 Ok(content) => content,
449 Err(_) => continue,
450 };
451 let gitdir_content = match fs.load(&entry_path.join("gitdir")).await {
452 Ok(content) => content,
453 Err(_) => continue,
454 };
455
456 let ref_name = head_content
457 .strip_prefix("ref: ")
458 .map(|s| s.trim().to_string());
459 let sha = ref_name
460 .as_ref()
461 .and_then(|r| refs.get(r))
462 .cloned()
463 .unwrap_or_else(|| head_content.trim().to_string());
464
465 let worktree_path = PathBuf::from(gitdir_content.trim())
466 .parent()
467 .map(PathBuf::from)
468 .unwrap_or_default();
469
470 all.push(Worktree {
471 path: worktree_path,
472 ref_name: ref_name.map(Into::into),
473 sha: sha.into(),
474 is_main: false,
475 });
476 }
477 }
478
479 Ok(all)
480 }
481 .boxed()
482 }
483
484 fn create_worktree(
485 &self,
486 branch_name: String,
487 path: PathBuf,
488 from_commit: Option<String>,
489 ) -> BoxFuture<'_, Result<()>> {
490 let fs = self.fs.clone();
491 let executor = self.executor.clone();
492 let dot_git_path = self.dot_git_path.clone();
493 let common_dir_path = self.common_dir_path.clone();
494 async move {
495 executor.simulate_random_delay().await;
496 // Check for simulated error and duplicate branch before any side effects.
497 fs.with_git_state(&dot_git_path, false, |state| {
498 if let Some(message) = &state.simulated_create_worktree_error {
499 anyhow::bail!("{message}");
500 }
501 if state.branches.contains(&branch_name) {
502 bail!("a branch named '{}' already exists", branch_name);
503 }
504 Ok(())
505 })??;
506
507 // Create the worktree checkout directory.
508 fs.create_dir(&path).await?;
509
510 // Create .git/worktrees/<name>/ directory with HEAD, commondir, gitdir.
511 let ref_name = format!("refs/heads/{branch_name}");
512 let worktrees_entry_dir = common_dir_path.join("worktrees").join(&branch_name);
513 fs.create_dir(&worktrees_entry_dir).await?;
514
515 fs.write_file_internal(
516 worktrees_entry_dir.join("HEAD"),
517 format!("ref: {ref_name}").into_bytes(),
518 false,
519 )?;
520 fs.write_file_internal(
521 worktrees_entry_dir.join("commondir"),
522 common_dir_path.to_string_lossy().into_owned().into_bytes(),
523 false,
524 )?;
525 let worktree_dot_git = path.join(".git");
526 fs.write_file_internal(
527 worktrees_entry_dir.join("gitdir"),
528 worktree_dot_git.to_string_lossy().into_owned().into_bytes(),
529 false,
530 )?;
531
532 // Create .git file in the worktree checkout.
533 fs.write_file_internal(
534 &worktree_dot_git,
535 format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(),
536 false,
537 )?;
538
539 // Update git state: add ref and branch.
540 let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
541 fs.with_git_state(&dot_git_path, true, move |state| {
542 state.refs.insert(ref_name, sha);
543 state.branches.insert(branch_name);
544 Ok::<(), anyhow::Error>(())
545 })??;
546 Ok(())
547 }
548 .boxed()
549 }
550
551 fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
552 let fs = self.fs.clone();
553 let executor = self.executor.clone();
554 let common_dir_path = self.common_dir_path.clone();
555 async move {
556 executor.simulate_random_delay().await;
557
558 // Read the worktree's .git file to find its entry directory.
559 let dot_git_file = path.join(".git");
560 let content = fs
561 .load(&dot_git_file)
562 .await
563 .with_context(|| format!("no worktree found at path: {}", path.display()))?;
564 let gitdir = content
565 .strip_prefix("gitdir:")
566 .context("invalid .git file in worktree")?
567 .trim();
568 let worktree_entry_dir = PathBuf::from(gitdir);
569
570 // Remove the worktree checkout directory.
571 fs.remove_dir(
572 &path,
573 RemoveOptions {
574 recursive: true,
575 ignore_if_not_exists: false,
576 },
577 )
578 .await?;
579
580 // Remove the .git/worktrees/<name>/ directory.
581 fs.remove_dir(
582 &worktree_entry_dir,
583 RemoveOptions {
584 recursive: true,
585 ignore_if_not_exists: false,
586 },
587 )
588 .await?;
589
590 // Emit a git event on the main .git directory so the scanner
591 // notices the change.
592 fs.with_git_state(&common_dir_path, true, |_| {})?;
593
594 Ok(())
595 }
596 .boxed()
597 }
598
599 fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
600 let fs = self.fs.clone();
601 let executor = self.executor.clone();
602 let common_dir_path = self.common_dir_path.clone();
603 async move {
604 executor.simulate_random_delay().await;
605
606 // Read the worktree's .git file to find its entry directory.
607 let dot_git_file = old_path.join(".git");
608 let content = fs
609 .load(&dot_git_file)
610 .await
611 .with_context(|| format!("no worktree found at path: {}", old_path.display()))?;
612 let gitdir = content
613 .strip_prefix("gitdir:")
614 .context("invalid .git file in worktree")?
615 .trim();
616 let worktree_entry_dir = PathBuf::from(gitdir);
617
618 // Move the worktree checkout directory.
619 fs.rename(
620 &old_path,
621 &new_path,
622 RenameOptions {
623 overwrite: false,
624 ignore_if_exists: false,
625 create_parents: true,
626 },
627 )
628 .await?;
629
630 // Update the gitdir file in .git/worktrees/<name>/ to point to the
631 // new location.
632 let new_dot_git = new_path.join(".git");
633 fs.write_file_internal(
634 worktree_entry_dir.join("gitdir"),
635 new_dot_git.to_string_lossy().into_owned().into_bytes(),
636 false,
637 )?;
638
639 // Update the .git file in the moved worktree checkout.
640 fs.write_file_internal(
641 &new_dot_git,
642 format!("gitdir: {}", worktree_entry_dir.display()).into_bytes(),
643 false,
644 )?;
645
646 // Emit a git event on the main .git directory so the scanner
647 // notices the change.
648 fs.with_git_state(&common_dir_path, true, |_| {})?;
649
650 Ok(())
651 }
652 .boxed()
653 }
654
655 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
656 self.with_state_async(true, |state| {
657 state.current_branch_name = Some(name);
658 Ok(())
659 })
660 }
661
662 fn create_branch(
663 &self,
664 name: String,
665 _base_branch: Option<String>,
666 ) -> BoxFuture<'_, Result<()>> {
667 self.with_state_async(true, move |state| {
668 if let Some((remote, _)) = name.split_once('/')
669 && !state.remotes.contains_key(remote)
670 {
671 state.remotes.insert(remote.to_owned(), "".to_owned());
672 }
673 state.branches.insert(name);
674 Ok(())
675 })
676 }
677
678 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
679 self.with_state_async(true, move |state| {
680 if !state.branches.remove(&branch) {
681 bail!("no such branch: {branch}");
682 }
683 state.branches.insert(new_name.clone());
684 if state.current_branch_name == Some(branch) {
685 state.current_branch_name = Some(new_name);
686 }
687 Ok(())
688 })
689 }
690
691 fn delete_branch(&self, _is_remote: bool, name: String) -> BoxFuture<'_, Result<()>> {
692 self.with_state_async(true, move |state| {
693 if !state.branches.remove(&name) {
694 bail!("no such branch: {name}");
695 }
696 Ok(())
697 })
698 }
699
700 fn blame(
701 &self,
702 path: RepoPath,
703 _content: Rope,
704 _line_ending: LineEnding,
705 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
706 self.with_state_async(false, move |state| {
707 state
708 .blames
709 .get(&path)
710 .with_context(|| format!("failed to get blame for {:?}", path))
711 .cloned()
712 })
713 }
714
715 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
716 self.file_history_paginated(path, 0, None)
717 }
718
719 fn file_history_paginated(
720 &self,
721 path: RepoPath,
722 _skip: usize,
723 _limit: Option<usize>,
724 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
725 async move {
726 Ok(git::repository::FileHistory {
727 entries: Vec::new(),
728 path,
729 })
730 }
731 .boxed()
732 }
733
734 fn stage_paths(
735 &self,
736 paths: Vec<RepoPath>,
737 _env: Arc<HashMap<String, String>>,
738 ) -> BoxFuture<'_, Result<()>> {
739 Box::pin(async move {
740 let contents = paths
741 .into_iter()
742 .map(|path| {
743 let abs_path = self
744 .dot_git_path
745 .parent()
746 .unwrap()
747 .join(&path.as_std_path());
748 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
749 })
750 .collect::<Vec<_>>();
751 let contents = join_all(contents).await;
752 self.with_state_async(true, move |state| {
753 for (path, content) in contents {
754 if let Some(content) = content {
755 state.index_contents.insert(path, content);
756 } else {
757 state.index_contents.remove(&path);
758 }
759 }
760 Ok(())
761 })
762 .await
763 })
764 }
765
766 fn unstage_paths(
767 &self,
768 paths: Vec<RepoPath>,
769 _env: Arc<HashMap<String, String>>,
770 ) -> BoxFuture<'_, Result<()>> {
771 self.with_state_async(true, move |state| {
772 for path in paths {
773 match state.head_contents.get(&path) {
774 Some(content) => state.index_contents.insert(path, content.clone()),
775 None => state.index_contents.remove(&path),
776 };
777 }
778 Ok(())
779 })
780 }
781
782 fn stash_paths(
783 &self,
784 _paths: Vec<RepoPath>,
785 _env: Arc<HashMap<String, String>>,
786 ) -> BoxFuture<'_, Result<()>> {
787 unimplemented!()
788 }
789
790 fn stash_pop(
791 &self,
792 _index: Option<usize>,
793 _env: Arc<HashMap<String, String>>,
794 ) -> BoxFuture<'_, Result<()>> {
795 unimplemented!()
796 }
797
798 fn stash_apply(
799 &self,
800 _index: Option<usize>,
801 _env: Arc<HashMap<String, String>>,
802 ) -> BoxFuture<'_, Result<()>> {
803 unimplemented!()
804 }
805
806 fn stash_drop(
807 &self,
808 _index: Option<usize>,
809 _env: Arc<HashMap<String, String>>,
810 ) -> BoxFuture<'_, Result<()>> {
811 unimplemented!()
812 }
813
814 fn commit(
815 &self,
816 _message: gpui::SharedString,
817 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
818 _options: CommitOptions,
819 _askpass: AskPassDelegate,
820 _env: Arc<HashMap<String, String>>,
821 ) -> BoxFuture<'_, Result<()>> {
822 async { Ok(()) }.boxed()
823 }
824
825 fn run_hook(
826 &self,
827 _hook: RunHook,
828 _env: Arc<HashMap<String, String>>,
829 ) -> BoxFuture<'_, Result<()>> {
830 async { Ok(()) }.boxed()
831 }
832
833 fn push(
834 &self,
835 _branch: String,
836 _remote_branch: String,
837 _remote: String,
838 _options: Option<PushOptions>,
839 _askpass: AskPassDelegate,
840 _env: Arc<HashMap<String, String>>,
841 _cx: AsyncApp,
842 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
843 unimplemented!()
844 }
845
846 fn pull(
847 &self,
848 _branch: Option<String>,
849 _remote: String,
850 _rebase: bool,
851 _askpass: AskPassDelegate,
852 _env: Arc<HashMap<String, String>>,
853 _cx: AsyncApp,
854 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
855 unimplemented!()
856 }
857
858 fn fetch(
859 &self,
860 _fetch_options: FetchOptions,
861 _askpass: AskPassDelegate,
862 _env: Arc<HashMap<String, String>>,
863 _cx: AsyncApp,
864 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
865 unimplemented!()
866 }
867
868 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
869 self.with_state_async(false, move |state| {
870 let remotes = state
871 .remotes
872 .keys()
873 .map(|r| Remote {
874 name: r.clone().into(),
875 })
876 .collect::<Vec<_>>();
877 Ok(remotes)
878 })
879 }
880
881 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
882 unimplemented!()
883 }
884
885 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
886 unimplemented!()
887 }
888
889 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
890 future::ready(Ok(Vec::new())).boxed()
891 }
892
893 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
894 future::ready(Ok(String::new())).boxed()
895 }
896
897 fn diff_stat(
898 &self,
899 path_prefixes: &[RepoPath],
900 ) -> BoxFuture<'_, Result<git::status::GitDiffStat>> {
901 fn count_lines(s: &str) -> u32 {
902 if s.is_empty() {
903 0
904 } else {
905 s.lines().count() as u32
906 }
907 }
908
909 fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool {
910 if prefixes.is_empty() {
911 return true;
912 }
913 prefixes.iter().any(|prefix| {
914 let prefix_str = prefix.as_unix_str();
915 if prefix_str == "." {
916 return true;
917 }
918 path == prefix || path.starts_with(&prefix)
919 })
920 }
921
922 let path_prefixes = path_prefixes.to_vec();
923
924 let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf();
925 let worktree_files: HashMap<RepoPath, String> = self
926 .fs
927 .files()
928 .iter()
929 .filter_map(|path| {
930 let repo_path = path.strip_prefix(&workdir_path).ok()?;
931 if repo_path.starts_with(".git") {
932 return None;
933 }
934 let content = self
935 .fs
936 .read_file_sync(path)
937 .ok()
938 .and_then(|bytes| String::from_utf8(bytes).ok())?;
939 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
940 Some((RepoPath::from_rel_path(&repo_path), content))
941 })
942 .collect();
943
944 self.with_state_async(false, move |state| {
945 let mut entries = Vec::new();
946 let all_paths: HashSet<&RepoPath> = state
947 .head_contents
948 .keys()
949 .chain(
950 worktree_files
951 .keys()
952 .filter(|p| state.index_contents.contains_key(*p)),
953 )
954 .collect();
955 for path in all_paths {
956 if !matches_prefixes(path, &path_prefixes) {
957 continue;
958 }
959 let head = state.head_contents.get(path);
960 let worktree = worktree_files.get(path);
961 match (head, worktree) {
962 (Some(old), Some(new)) if old != new => {
963 entries.push((
964 path.clone(),
965 git::status::DiffStat {
966 added: count_lines(new),
967 deleted: count_lines(old),
968 },
969 ));
970 }
971 (Some(old), None) => {
972 entries.push((
973 path.clone(),
974 git::status::DiffStat {
975 added: 0,
976 deleted: count_lines(old),
977 },
978 ));
979 }
980 (None, Some(new)) => {
981 entries.push((
982 path.clone(),
983 git::status::DiffStat {
984 added: count_lines(new),
985 deleted: 0,
986 },
987 ));
988 }
989 _ => {}
990 }
991 }
992 entries.sort_by(|(a, _), (b, _)| a.cmp(b));
993 Ok(git::status::GitDiffStat {
994 entries: entries.into(),
995 })
996 })
997 .boxed()
998 }
999
1000 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
1001 let executor = self.executor.clone();
1002 let fs = self.fs.clone();
1003 let checkpoints = self.checkpoints.clone();
1004 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1005 async move {
1006 executor.simulate_random_delay().await;
1007 let oid = git::Oid::random(&mut *executor.rng().lock());
1008 let entry = fs.entry(&repository_dir_path)?;
1009 checkpoints.lock().insert(oid, entry);
1010 Ok(GitRepositoryCheckpoint { commit_sha: oid })
1011 }
1012 .boxed()
1013 }
1014
1015 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
1016 let executor = self.executor.clone();
1017 let fs = self.fs.clone();
1018 let checkpoints = self.checkpoints.clone();
1019 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
1020 async move {
1021 executor.simulate_random_delay().await;
1022 let checkpoints = checkpoints.lock();
1023 let entry = checkpoints
1024 .get(&checkpoint.commit_sha)
1025 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
1026 fs.insert_entry(&repository_dir_path, entry.clone())?;
1027 Ok(())
1028 }
1029 .boxed()
1030 }
1031
1032 fn compare_checkpoints(
1033 &self,
1034 left: GitRepositoryCheckpoint,
1035 right: GitRepositoryCheckpoint,
1036 ) -> BoxFuture<'_, Result<bool>> {
1037 let executor = self.executor.clone();
1038 let checkpoints = self.checkpoints.clone();
1039 async move {
1040 executor.simulate_random_delay().await;
1041 let checkpoints = checkpoints.lock();
1042 let left = checkpoints
1043 .get(&left.commit_sha)
1044 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
1045 let right = checkpoints
1046 .get(&right.commit_sha)
1047 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
1048
1049 Ok(left == right)
1050 }
1051 .boxed()
1052 }
1053
1054 fn diff_checkpoints(
1055 &self,
1056 base_checkpoint: GitRepositoryCheckpoint,
1057 target_checkpoint: GitRepositoryCheckpoint,
1058 ) -> BoxFuture<'_, Result<String>> {
1059 let executor = self.executor.clone();
1060 let checkpoints = self.checkpoints.clone();
1061 async move {
1062 executor.simulate_random_delay().await;
1063 let checkpoints = checkpoints.lock();
1064 let base = checkpoints
1065 .get(&base_checkpoint.commit_sha)
1066 .context(format!(
1067 "invalid base checkpoint: {}",
1068 base_checkpoint.commit_sha
1069 ))?;
1070 let target = checkpoints
1071 .get(&target_checkpoint.commit_sha)
1072 .context(format!(
1073 "invalid target checkpoint: {}",
1074 target_checkpoint.commit_sha
1075 ))?;
1076
1077 fn collect_files(
1078 entry: &FakeFsEntry,
1079 prefix: String,
1080 out: &mut std::collections::BTreeMap<String, String>,
1081 ) {
1082 match entry {
1083 FakeFsEntry::File { content, .. } => {
1084 out.insert(prefix, String::from_utf8_lossy(content).into_owned());
1085 }
1086 FakeFsEntry::Dir { entries, .. } => {
1087 for (name, child) in entries {
1088 let path = if prefix.is_empty() {
1089 name.clone()
1090 } else {
1091 format!("{prefix}/{name}")
1092 };
1093 collect_files(child, path, out);
1094 }
1095 }
1096 FakeFsEntry::Symlink { .. } => {}
1097 }
1098 }
1099
1100 let mut base_files = std::collections::BTreeMap::new();
1101 let mut target_files = std::collections::BTreeMap::new();
1102 collect_files(base, String::new(), &mut base_files);
1103 collect_files(target, String::new(), &mut target_files);
1104
1105 let all_paths: std::collections::BTreeSet<&String> =
1106 base_files.keys().chain(target_files.keys()).collect();
1107
1108 let mut diff = String::new();
1109 for path in all_paths {
1110 match (base_files.get(path), target_files.get(path)) {
1111 (Some(base_content), Some(target_content))
1112 if base_content != target_content =>
1113 {
1114 diff.push_str(&format!("diff --git a/{path} b/{path}\n"));
1115 diff.push_str(&format!("--- a/{path}\n"));
1116 diff.push_str(&format!("+++ b/{path}\n"));
1117 for line in base_content.lines() {
1118 diff.push_str(&format!("-{line}\n"));
1119 }
1120 for line in target_content.lines() {
1121 diff.push_str(&format!("+{line}\n"));
1122 }
1123 }
1124 (Some(_), None) => {
1125 diff.push_str(&format!("diff --git a/{path} /dev/null\n"));
1126 diff.push_str("deleted file\n");
1127 }
1128 (None, Some(_)) => {
1129 diff.push_str(&format!("diff --git /dev/null b/{path}\n"));
1130 diff.push_str("new file\n");
1131 }
1132 _ => {}
1133 }
1134 }
1135 Ok(diff)
1136 }
1137 .boxed()
1138 }
1139
1140 fn default_branch(
1141 &self,
1142 include_remote_name: bool,
1143 ) -> BoxFuture<'_, Result<Option<SharedString>>> {
1144 async move {
1145 Ok(Some(if include_remote_name {
1146 "origin/main".into()
1147 } else {
1148 "main".into()
1149 }))
1150 }
1151 .boxed()
1152 }
1153
1154 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
1155 self.with_state_async(true, move |state| {
1156 state.remotes.insert(name, url);
1157 Ok(())
1158 })
1159 }
1160
1161 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
1162 self.with_state_async(true, move |state| {
1163 state.branches.retain(|branch| {
1164 branch
1165 .split_once('/')
1166 .is_none_or(|(remote, _)| remote != name)
1167 });
1168 state.remotes.remove(&name);
1169 Ok(())
1170 })
1171 }
1172
1173 fn initial_graph_data(
1174 &self,
1175 _log_source: LogSource,
1176 _log_order: LogOrder,
1177 request_tx: Sender<Vec<Arc<InitialGraphCommitData>>>,
1178 ) -> BoxFuture<'_, Result<()>> {
1179 let fs = self.fs.clone();
1180 let dot_git_path = self.dot_git_path.clone();
1181 async move {
1182 let graph_commits =
1183 fs.with_git_state(&dot_git_path, false, |state| state.graph_commits.clone())?;
1184
1185 for chunk in graph_commits.chunks(GRAPH_CHUNK_SIZE) {
1186 request_tx.send(chunk.to_vec()).await.ok();
1187 }
1188 Ok(())
1189 }
1190 .boxed()
1191 }
1192
1193 fn search_commits(
1194 &self,
1195 _log_source: LogSource,
1196 _search_args: SearchCommitArgs,
1197 _request_tx: Sender<Oid>,
1198 ) -> BoxFuture<'_, Result<()>> {
1199 async { bail!("search_commits not supported for FakeGitRepository") }.boxed()
1200 }
1201
1202 fn commit_data_reader(&self) -> Result<CommitDataReader> {
1203 anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
1204 }
1205
1206 fn set_trusted(&self, trusted: bool) {
1207 self.is_trusted
1208 .store(trusted, std::sync::atomic::Ordering::Release);
1209 }
1210
1211 fn is_trusted(&self) -> bool {
1212 self.is_trusted.load(std::sync::atomic::Ordering::Acquire)
1213 }
1214}