1use crate::{FakeFs, FakeFsEntry, Fs};
2use anyhow::{Context as _, Result, bail};
3use collections::{HashMap, HashSet};
4use futures::future::{self, BoxFuture, join_all};
5use git::{
6 Oid, RunHook,
7 blame::Blame,
8 repository::{
9 AskPassDelegate, Branch, CommitDetails, CommitOptions, FetchOptions, GitRepository,
10 GitRepositoryCheckpoint, PushOptions, Remote, RepoPath, ResetMode, Worktree,
11 },
12 status::{
13 DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
14 UnmergedStatus,
15 },
16};
17use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task, TaskLabel};
18use ignore::gitignore::GitignoreBuilder;
19use parking_lot::Mutex;
20use rope::Rope;
21use smol::future::FutureExt as _;
22use std::{
23 path::PathBuf,
24 sync::{Arc, LazyLock},
25};
26use text::LineEnding;
27use util::{paths::PathStyle, rel_path::RelPath};
28
29pub static LOAD_INDEX_TEXT_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
30pub static LOAD_HEAD_TEXT_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
31
32#[derive(Clone)]
33pub struct FakeGitRepository {
34 pub(crate) fs: Arc<FakeFs>,
35 pub(crate) checkpoints: Arc<Mutex<HashMap<Oid, FakeFsEntry>>>,
36 pub(crate) executor: BackgroundExecutor,
37 pub(crate) dot_git_path: PathBuf,
38 pub(crate) repository_dir_path: PathBuf,
39 pub(crate) common_dir_path: PathBuf,
40}
41
42#[derive(Debug, Clone)]
43pub struct FakeGitRepositoryState {
44 pub event_emitter: smol::channel::Sender<PathBuf>,
45 pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
46 pub head_contents: HashMap<RepoPath, String>,
47 pub index_contents: HashMap<RepoPath, String>,
48 // everything in commit contents is in oids
49 pub merge_base_contents: HashMap<RepoPath, Oid>,
50 pub oids: HashMap<Oid, String>,
51 pub blames: HashMap<RepoPath, Blame>,
52 pub current_branch_name: Option<String>,
53 pub branches: HashSet<String>,
54 /// List of remotes, keys are names and values are URLs
55 pub remotes: HashMap<String, String>,
56 pub simulated_index_write_error_message: Option<String>,
57 pub refs: HashMap<String, String>,
58}
59
60impl FakeGitRepositoryState {
61 pub fn new(event_emitter: smol::channel::Sender<PathBuf>) -> Self {
62 FakeGitRepositoryState {
63 event_emitter,
64 head_contents: Default::default(),
65 index_contents: Default::default(),
66 unmerged_paths: Default::default(),
67 blames: Default::default(),
68 current_branch_name: Default::default(),
69 branches: Default::default(),
70 simulated_index_write_error_message: Default::default(),
71 refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
72 merge_base_contents: Default::default(),
73 oids: Default::default(),
74 remotes: HashMap::default(),
75 }
76 }
77}
78
79impl FakeGitRepository {
80 fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<'static, Result<T>>
81 where
82 F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> Result<T>,
83 T: Send,
84 {
85 let fs = self.fs.clone();
86 let executor = self.executor.clone();
87 let dot_git_path = self.dot_git_path.clone();
88 async move {
89 executor.simulate_random_delay().await;
90 fs.with_git_state(&dot_git_path, write, f)?
91 }
92 .boxed()
93 }
94}
95
96impl GitRepository for FakeGitRepository {
97 fn reload_index(&self) {}
98
99 fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
100 let fut = self.with_state_async(false, move |state| {
101 state
102 .index_contents
103 .get(&path)
104 .context("not present in index")
105 .cloned()
106 });
107 self.executor
108 .spawn_labeled(*LOAD_INDEX_TEXT_TASK, async move { fut.await.ok() })
109 .boxed()
110 }
111
112 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
113 let fut = self.with_state_async(false, move |state| {
114 state
115 .head_contents
116 .get(&path)
117 .context("not present in HEAD")
118 .cloned()
119 });
120 self.executor
121 .spawn_labeled(*LOAD_HEAD_TEXT_TASK, async move { fut.await.ok() })
122 .boxed()
123 }
124
125 fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
126 self.with_state_async(false, move |state| {
127 state.oids.get(&oid).cloned().context("oid does not exist")
128 })
129 .boxed()
130 }
131
132 fn load_commit(
133 &self,
134 _commit: String,
135 _cx: AsyncApp,
136 ) -> BoxFuture<'_, Result<git::repository::CommitDiff>> {
137 unimplemented!()
138 }
139
140 fn set_index_text(
141 &self,
142 path: RepoPath,
143 content: Option<String>,
144 _env: Arc<HashMap<String, String>>,
145 _is_executable: bool,
146 ) -> BoxFuture<'_, anyhow::Result<()>> {
147 self.with_state_async(true, move |state| {
148 if let Some(message) = &state.simulated_index_write_error_message {
149 anyhow::bail!("{message}");
150 } else if let Some(content) = content {
151 state.index_contents.insert(path, content);
152 } else {
153 state.index_contents.remove(&path);
154 }
155 Ok(())
156 })
157 }
158
159 fn remote_url(&self, name: &str) -> BoxFuture<'_, Option<String>> {
160 let name = name.to_string();
161 let fut = self.with_state_async(false, move |state| {
162 state
163 .remotes
164 .get(&name)
165 .context("remote not found")
166 .cloned()
167 });
168 async move { fut.await.ok() }.boxed()
169 }
170
171 fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
172 let mut entries = HashMap::default();
173 self.with_state_async(false, |state| {
174 for (path, content) in &state.head_contents {
175 let status = if let Some((oid, original)) = state
176 .merge_base_contents
177 .get(path)
178 .map(|oid| (oid, &state.oids[oid]))
179 {
180 if original == content {
181 continue;
182 }
183 TreeDiffStatus::Modified { old: *oid }
184 } else {
185 TreeDiffStatus::Added
186 };
187 entries.insert(path.clone(), status);
188 }
189 for (path, oid) in &state.merge_base_contents {
190 if !entries.contains_key(path) {
191 entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
192 }
193 }
194 Ok(TreeDiff { entries })
195 })
196 .boxed()
197 }
198
199 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
200 self.with_state_async(false, |state| {
201 Ok(revs
202 .into_iter()
203 .map(|rev| state.refs.get(&rev).cloned())
204 .collect())
205 })
206 }
207
208 fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
209 async {
210 Ok(CommitDetails {
211 sha: commit.into(),
212 message: "initial commit".into(),
213 ..Default::default()
214 })
215 }
216 .boxed()
217 }
218
219 fn reset(
220 &self,
221 _commit: String,
222 _mode: ResetMode,
223 _env: Arc<HashMap<String, String>>,
224 ) -> BoxFuture<'_, Result<()>> {
225 unimplemented!()
226 }
227
228 fn checkout_files(
229 &self,
230 _commit: String,
231 _paths: Vec<RepoPath>,
232 _env: Arc<HashMap<String, String>>,
233 ) -> BoxFuture<'_, Result<()>> {
234 unimplemented!()
235 }
236
237 fn path(&self) -> PathBuf {
238 self.repository_dir_path.clone()
239 }
240
241 fn main_repository_path(&self) -> PathBuf {
242 self.common_dir_path.clone()
243 }
244
245 fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
246 async move { None }.boxed()
247 }
248
249 fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>> {
250 let workdir_path = self.dot_git_path.parent().unwrap();
251
252 // Load gitignores
253 let ignores = workdir_path
254 .ancestors()
255 .filter_map(|dir| {
256 let ignore_path = dir.join(".gitignore");
257 let content = self.fs.read_file_sync(ignore_path).ok()?;
258 let content = String::from_utf8(content).ok()?;
259 let mut builder = GitignoreBuilder::new(dir);
260 for line in content.lines() {
261 builder.add_line(Some(dir.into()), line).ok()?;
262 }
263 builder.build().ok()
264 })
265 .collect::<Vec<_>>();
266
267 // Load working copy files.
268 let git_files: HashMap<RepoPath, (String, bool)> = self
269 .fs
270 .files()
271 .iter()
272 .filter_map(|path| {
273 // TODO better simulate git status output in the case of submodules and worktrees
274 let repo_path = path.strip_prefix(workdir_path).ok()?;
275 let mut is_ignored = repo_path.starts_with(".git");
276 for ignore in &ignores {
277 match ignore.matched_path_or_any_parents(path, false) {
278 ignore::Match::None => {}
279 ignore::Match::Ignore(_) => is_ignored = true,
280 ignore::Match::Whitelist(_) => break,
281 }
282 }
283 let content = self
284 .fs
285 .read_file_sync(path)
286 .ok()
287 .map(|content| String::from_utf8(content).unwrap())?;
288 let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
289 Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
290 })
291 .collect();
292
293 let result = self.fs.with_git_state(&self.dot_git_path, false, |state| {
294 let mut entries = Vec::new();
295 let paths = state
296 .head_contents
297 .keys()
298 .chain(state.index_contents.keys())
299 .chain(git_files.keys())
300 .collect::<HashSet<_>>();
301 for path in paths {
302 if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
303 continue;
304 }
305
306 let head = state.head_contents.get(path);
307 let index = state.index_contents.get(path);
308 let unmerged = state.unmerged_paths.get(path);
309 let fs = git_files.get(path);
310 let status = match (unmerged, head, index, fs) {
311 (Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
312 (_, Some(head), Some(index), Some((fs, _))) => {
313 FileStatus::Tracked(TrackedStatus {
314 index_status: if head == index {
315 StatusCode::Unmodified
316 } else {
317 StatusCode::Modified
318 },
319 worktree_status: if fs == index {
320 StatusCode::Unmodified
321 } else {
322 StatusCode::Modified
323 },
324 })
325 }
326 (_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
327 index_status: if head == index {
328 StatusCode::Unmodified
329 } else {
330 StatusCode::Modified
331 },
332 worktree_status: StatusCode::Deleted,
333 }),
334 (_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
335 index_status: StatusCode::Deleted,
336 worktree_status: StatusCode::Added,
337 }),
338 (_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
339 index_status: StatusCode::Deleted,
340 worktree_status: StatusCode::Deleted,
341 }),
342 (_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
343 index_status: StatusCode::Added,
344 worktree_status: if fs == index {
345 StatusCode::Unmodified
346 } else {
347 StatusCode::Modified
348 },
349 }),
350 (_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
351 index_status: StatusCode::Added,
352 worktree_status: StatusCode::Deleted,
353 }),
354 (_, None, None, Some((_, is_ignored))) => {
355 if *is_ignored {
356 continue;
357 }
358 FileStatus::Untracked
359 }
360 (_, None, None, None) => {
361 unreachable!();
362 }
363 };
364 if status
365 != FileStatus::Tracked(TrackedStatus {
366 index_status: StatusCode::Unmodified,
367 worktree_status: StatusCode::Unmodified,
368 })
369 {
370 entries.push((path.clone(), status));
371 }
372 }
373 entries.sort_by(|a, b| a.0.cmp(&b.0));
374 anyhow::Ok(GitStatus {
375 entries: entries.into(),
376 })
377 });
378 Task::ready(match result {
379 Ok(result) => result,
380 Err(e) => Err(e),
381 })
382 }
383
384 fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
385 async { Ok(git::stash::GitStash::default()) }.boxed()
386 }
387
388 fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
389 self.with_state_async(false, move |state| {
390 let current_branch = &state.current_branch_name;
391 Ok(state
392 .branches
393 .iter()
394 .map(|branch_name| {
395 let ref_name = if branch_name.starts_with("refs/") {
396 branch_name.into()
397 } else {
398 format!("refs/heads/{branch_name}").into()
399 };
400 Branch {
401 is_head: Some(branch_name) == current_branch.as_ref(),
402 ref_name,
403 most_recent_commit: None,
404 upstream: None,
405 }
406 })
407 .collect())
408 })
409 }
410
411 fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
412 unimplemented!()
413 }
414
415 fn create_worktree(
416 &self,
417 _: String,
418 _: PathBuf,
419 _: Option<String>,
420 ) -> BoxFuture<'_, Result<()>> {
421 unimplemented!()
422 }
423
424 fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
425 self.with_state_async(true, |state| {
426 state.current_branch_name = Some(name);
427 Ok(())
428 })
429 }
430
431 fn create_branch(
432 &self,
433 name: String,
434 _base_branch: Option<String>,
435 ) -> BoxFuture<'_, Result<()>> {
436 self.with_state_async(true, move |state| {
437 state.branches.insert(name);
438 Ok(())
439 })
440 }
441
442 fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> {
443 self.with_state_async(true, move |state| {
444 if !state.branches.remove(&branch) {
445 bail!("no such branch: {branch}");
446 }
447 state.branches.insert(new_name.clone());
448 if state.current_branch_name == Some(branch) {
449 state.current_branch_name = Some(new_name);
450 }
451 Ok(())
452 })
453 }
454
455 fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
456 self.with_state_async(true, move |state| {
457 if !state.branches.remove(&name) {
458 bail!("no such branch: {name}");
459 }
460 Ok(())
461 })
462 }
463
464 fn blame(
465 &self,
466 path: RepoPath,
467 _content: Rope,
468 _line_ending: LineEnding,
469 ) -> BoxFuture<'_, Result<git::blame::Blame>> {
470 self.with_state_async(false, move |state| {
471 state
472 .blames
473 .get(&path)
474 .with_context(|| format!("failed to get blame for {:?}", path))
475 .cloned()
476 })
477 }
478
479 fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
480 self.file_history_paginated(path, 0, None)
481 }
482
483 fn file_history_paginated(
484 &self,
485 path: RepoPath,
486 _skip: usize,
487 _limit: Option<usize>,
488 ) -> BoxFuture<'_, Result<git::repository::FileHistory>> {
489 async move {
490 Ok(git::repository::FileHistory {
491 entries: Vec::new(),
492 path,
493 })
494 }
495 .boxed()
496 }
497
498 fn stage_paths(
499 &self,
500 paths: Vec<RepoPath>,
501 _env: Arc<HashMap<String, String>>,
502 ) -> BoxFuture<'_, Result<()>> {
503 Box::pin(async move {
504 let contents = paths
505 .into_iter()
506 .map(|path| {
507 let abs_path = self
508 .dot_git_path
509 .parent()
510 .unwrap()
511 .join(&path.as_std_path());
512 Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) })
513 })
514 .collect::<Vec<_>>();
515 let contents = join_all(contents).await;
516 self.with_state_async(true, move |state| {
517 for (path, content) in contents {
518 if let Some(content) = content {
519 state.index_contents.insert(path, content);
520 } else {
521 state.index_contents.remove(&path);
522 }
523 }
524 Ok(())
525 })
526 .await
527 })
528 }
529
530 fn unstage_paths(
531 &self,
532 paths: Vec<RepoPath>,
533 _env: Arc<HashMap<String, String>>,
534 ) -> BoxFuture<'_, Result<()>> {
535 self.with_state_async(true, move |state| {
536 for path in paths {
537 match state.head_contents.get(&path) {
538 Some(content) => state.index_contents.insert(path, content.clone()),
539 None => state.index_contents.remove(&path),
540 };
541 }
542 Ok(())
543 })
544 }
545
546 fn stash_paths(
547 &self,
548 _paths: Vec<RepoPath>,
549 _env: Arc<HashMap<String, String>>,
550 ) -> BoxFuture<'_, Result<()>> {
551 unimplemented!()
552 }
553
554 fn stash_pop(
555 &self,
556 _index: Option<usize>,
557 _env: Arc<HashMap<String, String>>,
558 ) -> BoxFuture<'_, Result<()>> {
559 unimplemented!()
560 }
561
562 fn stash_apply(
563 &self,
564 _index: Option<usize>,
565 _env: Arc<HashMap<String, String>>,
566 ) -> BoxFuture<'_, Result<()>> {
567 unimplemented!()
568 }
569
570 fn stash_drop(
571 &self,
572 _index: Option<usize>,
573 _env: Arc<HashMap<String, String>>,
574 ) -> BoxFuture<'_, Result<()>> {
575 unimplemented!()
576 }
577
578 fn commit(
579 &self,
580 _message: gpui::SharedString,
581 _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
582 _options: CommitOptions,
583 _askpass: AskPassDelegate,
584 _env: Arc<HashMap<String, String>>,
585 ) -> BoxFuture<'_, Result<()>> {
586 async { Ok(()) }.boxed()
587 }
588
589 fn run_hook(
590 &self,
591 _hook: RunHook,
592 _env: Arc<HashMap<String, String>>,
593 ) -> BoxFuture<'_, Result<()>> {
594 async { Ok(()) }.boxed()
595 }
596
597 fn push(
598 &self,
599 _branch: String,
600 _remote: String,
601 _options: Option<PushOptions>,
602 _askpass: AskPassDelegate,
603 _env: Arc<HashMap<String, String>>,
604 _cx: AsyncApp,
605 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
606 unimplemented!()
607 }
608
609 fn pull(
610 &self,
611 _branch: Option<String>,
612 _remote: String,
613 _rebase: bool,
614 _askpass: AskPassDelegate,
615 _env: Arc<HashMap<String, String>>,
616 _cx: AsyncApp,
617 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
618 unimplemented!()
619 }
620
621 fn fetch(
622 &self,
623 _fetch_options: FetchOptions,
624 _askpass: AskPassDelegate,
625 _env: Arc<HashMap<String, String>>,
626 _cx: AsyncApp,
627 ) -> BoxFuture<'_, Result<git::repository::RemoteCommandOutput>> {
628 unimplemented!()
629 }
630
631 fn get_all_remotes(&self) -> BoxFuture<'_, Result<Vec<Remote>>> {
632 self.with_state_async(false, move |state| {
633 let remotes = state
634 .remotes
635 .keys()
636 .map(|r| Remote {
637 name: r.clone().into(),
638 })
639 .collect::<Vec<_>>();
640 Ok(remotes)
641 })
642 }
643
644 fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
645 unimplemented!()
646 }
647
648 fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result<Option<Remote>>> {
649 unimplemented!()
650 }
651
652 fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<gpui::SharedString>>> {
653 future::ready(Ok(Vec::new())).boxed()
654 }
655
656 fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result<String>> {
657 unimplemented!()
658 }
659
660 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
661 let executor = self.executor.clone();
662 let fs = self.fs.clone();
663 let checkpoints = self.checkpoints.clone();
664 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
665 async move {
666 executor.simulate_random_delay().await;
667 let oid = git::Oid::random(&mut executor.rng());
668 let entry = fs.entry(&repository_dir_path)?;
669 checkpoints.lock().insert(oid, entry);
670 Ok(GitRepositoryCheckpoint { commit_sha: oid })
671 }
672 .boxed()
673 }
674
675 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
676 let executor = self.executor.clone();
677 let fs = self.fs.clone();
678 let checkpoints = self.checkpoints.clone();
679 let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
680 async move {
681 executor.simulate_random_delay().await;
682 let checkpoints = checkpoints.lock();
683 let entry = checkpoints
684 .get(&checkpoint.commit_sha)
685 .context(format!("invalid checkpoint: {}", checkpoint.commit_sha))?;
686 fs.insert_entry(&repository_dir_path, entry.clone())?;
687 Ok(())
688 }
689 .boxed()
690 }
691
692 fn compare_checkpoints(
693 &self,
694 left: GitRepositoryCheckpoint,
695 right: GitRepositoryCheckpoint,
696 ) -> BoxFuture<'_, Result<bool>> {
697 let executor = self.executor.clone();
698 let checkpoints = self.checkpoints.clone();
699 async move {
700 executor.simulate_random_delay().await;
701 let checkpoints = checkpoints.lock();
702 let left = checkpoints
703 .get(&left.commit_sha)
704 .context(format!("invalid left checkpoint: {}", left.commit_sha))?;
705 let right = checkpoints
706 .get(&right.commit_sha)
707 .context(format!("invalid right checkpoint: {}", right.commit_sha))?;
708
709 Ok(left == right)
710 }
711 .boxed()
712 }
713
714 fn diff_checkpoints(
715 &self,
716 _base_checkpoint: GitRepositoryCheckpoint,
717 _target_checkpoint: GitRepositoryCheckpoint,
718 ) -> BoxFuture<'_, Result<String>> {
719 unimplemented!()
720 }
721
722 fn default_branch(&self) -> BoxFuture<'_, Result<Option<SharedString>>> {
723 async { Ok(Some("main".into())) }.boxed()
724 }
725
726 fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> {
727 self.with_state_async(true, move |state| {
728 state.remotes.insert(name, url);
729 Ok(())
730 })
731 }
732
733 fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> {
734 self.with_state_async(true, move |state| {
735 state.remotes.remove(&name);
736 Ok(())
737 })
738 }
739}
740
741#[cfg(test)]
742mod tests {
743 use crate::{FakeFs, Fs};
744 use gpui::BackgroundExecutor;
745 use serde_json::json;
746 use std::path::Path;
747 use util::path;
748
749 #[gpui::test]
750 async fn test_checkpoints(executor: BackgroundExecutor) {
751 let fs = FakeFs::new(executor);
752 fs.insert_tree(
753 path!("/"),
754 json!({
755 "bar": {
756 "baz": "qux"
757 },
758 "foo": {
759 ".git": {},
760 "a": "lorem",
761 "b": "ipsum",
762 },
763 }),
764 )
765 .await;
766 fs.with_git_state(Path::new("/foo/.git"), true, |_git| {})
767 .unwrap();
768 let repository = fs
769 .open_repo(Path::new("/foo/.git"), Some("git".as_ref()))
770 .unwrap();
771
772 let checkpoint_1 = repository.checkpoint().await.unwrap();
773 fs.write(Path::new("/foo/b"), b"IPSUM").await.unwrap();
774 fs.write(Path::new("/foo/c"), b"dolor").await.unwrap();
775 let checkpoint_2 = repository.checkpoint().await.unwrap();
776 let checkpoint_3 = repository.checkpoint().await.unwrap();
777
778 assert!(
779 repository
780 .compare_checkpoints(checkpoint_2.clone(), checkpoint_3.clone())
781 .await
782 .unwrap()
783 );
784 assert!(
785 !repository
786 .compare_checkpoints(checkpoint_1.clone(), checkpoint_2.clone())
787 .await
788 .unwrap()
789 );
790
791 repository.restore_checkpoint(checkpoint_1).await.unwrap();
792 assert_eq!(
793 fs.files_with_contents(Path::new("")),
794 [
795 (Path::new(path!("/bar/baz")).into(), b"qux".into()),
796 (Path::new(path!("/foo/a")).into(), b"lorem".into()),
797 (Path::new(path!("/foo/b")).into(), b"ipsum".into())
798 ]
799 );
800 }
801}