1use crate::commit::parse_git_diff_name_status;
2use crate::status::{GitStatus, StatusCode};
3use crate::{Oid, SHORT_SHA_LENGTH};
4use anyhow::{Context as _, Result, anyhow, bail};
5use collections::HashMap;
6use futures::future::BoxFuture;
7use futures::{AsyncWriteExt, FutureExt as _, select_biased};
8use git2::BranchType;
9use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, SharedString};
10use parking_lot::Mutex;
11use rope::Rope;
12use schemars::JsonSchema;
13use serde::Deserialize;
14use std::borrow::{Borrow, Cow};
15use std::ffi::{OsStr, OsString};
16use std::io::prelude::*;
17use std::path::Component;
18use std::process::{ExitStatus, Stdio};
19use std::sync::LazyLock;
20use std::{
21 cmp::Ordering,
22 future,
23 io::{BufRead, BufReader, BufWriter, Read},
24 path::{Path, PathBuf},
25 sync::Arc,
26};
27use sum_tree::MapSeekTarget;
28use thiserror::Error;
29use util::ResultExt;
30use util::command::{new_smol_command, new_std_command};
31use uuid::Uuid;
32
33pub use askpass::{AskPassDelegate, AskPassResult, AskPassSession};
34
35pub const REMOTE_CANCELLED_BY_USER: &str = "Operation cancelled by user";
36
37#[derive(Clone, Debug, Hash, PartialEq, Eq)]
38pub struct Branch {
39 pub is_head: bool,
40 pub ref_name: SharedString,
41 pub upstream: Option<Upstream>,
42 pub most_recent_commit: Option<CommitSummary>,
43}
44
45impl Branch {
46 pub fn name(&self) -> &str {
47 self.ref_name
48 .as_ref()
49 .strip_prefix("refs/heads/")
50 .or_else(|| self.ref_name.as_ref().strip_prefix("refs/remotes/"))
51 .unwrap_or(self.ref_name.as_ref())
52 }
53
54 pub fn is_remote(&self) -> bool {
55 self.ref_name.starts_with("refs/remotes/")
56 }
57
58 pub fn tracking_status(&self) -> Option<UpstreamTrackingStatus> {
59 self.upstream
60 .as_ref()
61 .and_then(|upstream| upstream.tracking.status())
62 }
63
64 pub fn priority_key(&self) -> (bool, Option<i64>) {
65 (
66 self.is_head,
67 self.most_recent_commit
68 .as_ref()
69 .map(|commit| commit.commit_timestamp),
70 )
71 }
72}
73
74#[derive(Clone, Debug, Hash, PartialEq, Eq)]
75pub struct Upstream {
76 pub ref_name: SharedString,
77 pub tracking: UpstreamTracking,
78}
79
80impl Upstream {
81 pub fn is_remote(&self) -> bool {
82 self.remote_name().is_some()
83 }
84
85 pub fn remote_name(&self) -> Option<&str> {
86 self.ref_name
87 .strip_prefix("refs/remotes/")
88 .and_then(|stripped| stripped.split("/").next())
89 }
90
91 pub fn stripped_ref_name(&self) -> Option<&str> {
92 self.ref_name.strip_prefix("refs/remotes/")
93 }
94}
95
96#[derive(Clone, Copy, Default)]
97pub struct CommitOptions {
98 pub amend: bool,
99}
100
101#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
102pub enum UpstreamTracking {
103 /// Remote ref not present in local repository.
104 Gone,
105 /// Remote ref present in local repository (fetched from remote).
106 Tracked(UpstreamTrackingStatus),
107}
108
109impl From<UpstreamTrackingStatus> for UpstreamTracking {
110 fn from(status: UpstreamTrackingStatus) -> Self {
111 UpstreamTracking::Tracked(status)
112 }
113}
114
115impl UpstreamTracking {
116 pub fn is_gone(&self) -> bool {
117 matches!(self, UpstreamTracking::Gone)
118 }
119
120 pub fn status(&self) -> Option<UpstreamTrackingStatus> {
121 match self {
122 UpstreamTracking::Gone => None,
123 UpstreamTracking::Tracked(status) => Some(*status),
124 }
125 }
126}
127
128#[derive(Debug, Clone)]
129pub struct RemoteCommandOutput {
130 pub stdout: String,
131 pub stderr: String,
132}
133
134impl RemoteCommandOutput {
135 pub fn is_empty(&self) -> bool {
136 self.stdout.is_empty() && self.stderr.is_empty()
137 }
138}
139
140#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
141pub struct UpstreamTrackingStatus {
142 pub ahead: u32,
143 pub behind: u32,
144}
145
146#[derive(Clone, Debug, Hash, PartialEq, Eq)]
147pub struct CommitSummary {
148 pub sha: SharedString,
149 pub subject: SharedString,
150 /// This is a unix timestamp
151 pub commit_timestamp: i64,
152 pub has_parent: bool,
153}
154
155#[derive(Clone, Debug, Default, Hash, PartialEq, Eq)]
156pub struct CommitDetails {
157 pub sha: SharedString,
158 pub message: SharedString,
159 pub commit_timestamp: i64,
160 pub author_email: SharedString,
161 pub author_name: SharedString,
162}
163
164#[derive(Debug)]
165pub struct CommitDiff {
166 pub files: Vec<CommitFile>,
167}
168
169#[derive(Debug)]
170pub struct CommitFile {
171 pub path: RepoPath,
172 pub old_text: Option<String>,
173 pub new_text: Option<String>,
174}
175
176impl CommitDetails {
177 pub fn short_sha(&self) -> SharedString {
178 self.sha[..SHORT_SHA_LENGTH].to_string().into()
179 }
180}
181
182#[derive(Debug, Clone, Hash, PartialEq, Eq)]
183pub struct Remote {
184 pub name: SharedString,
185}
186
187pub enum ResetMode {
188 /// Reset the branch pointer, leave index and worktree unchanged (this will make it look like things that were
189 /// committed are now staged).
190 Soft,
191 /// Reset the branch pointer and index, leave worktree unchanged (this makes it look as though things that were
192 /// committed are now unstaged).
193 Mixed,
194}
195
196/// Modifies .git/info/exclude temporarily
197pub struct GitExcludeOverride {
198 git_exclude_path: PathBuf,
199 original_excludes: Option<String>,
200 added_excludes: Option<String>,
201}
202
203impl GitExcludeOverride {
204 pub async fn new(git_exclude_path: PathBuf) -> Result<Self> {
205 let original_excludes = smol::fs::read_to_string(&git_exclude_path).await.ok();
206
207 Ok(GitExcludeOverride {
208 git_exclude_path,
209 original_excludes,
210 added_excludes: None,
211 })
212 }
213
214 pub async fn add_excludes(&mut self, excludes: &str) -> Result<()> {
215 self.added_excludes = Some(if let Some(ref already_added) = self.added_excludes {
216 format!("{already_added}\n{excludes}")
217 } else {
218 excludes.to_string()
219 });
220
221 let mut content = self.original_excludes.clone().unwrap_or_default();
222 content.push_str("\n\n# ====== Auto-added by Zed: =======\n");
223 content.push_str(self.added_excludes.as_ref().unwrap());
224 content.push('\n');
225
226 smol::fs::write(&self.git_exclude_path, content).await?;
227 Ok(())
228 }
229
230 pub async fn restore_original(&mut self) -> Result<()> {
231 if let Some(ref original) = self.original_excludes {
232 smol::fs::write(&self.git_exclude_path, original).await?;
233 } else {
234 if self.git_exclude_path.exists() {
235 smol::fs::remove_file(&self.git_exclude_path).await?;
236 }
237 }
238
239 self.added_excludes = None;
240
241 Ok(())
242 }
243}
244
245impl Drop for GitExcludeOverride {
246 fn drop(&mut self) {
247 if self.added_excludes.is_some() {
248 let git_exclude_path = self.git_exclude_path.clone();
249 let original_excludes = self.original_excludes.clone();
250 smol::spawn(async move {
251 if let Some(original) = original_excludes {
252 smol::fs::write(&git_exclude_path, original).await
253 } else {
254 smol::fs::remove_file(&git_exclude_path).await
255 }
256 })
257 .detach();
258 }
259 }
260}
261
262pub trait GitRepository: Send + Sync {
263 fn reload_index(&self);
264
265 /// Returns the contents of an entry in the repository's index, or None if there is no entry for the given path.
266 ///
267 /// Also returns `None` for symlinks.
268 fn load_index_text(&self, path: RepoPath) -> BoxFuture<Option<String>>;
269
270 /// Returns the contents of an entry in the repository's HEAD, or None if HEAD does not exist or has no entry for the given path.
271 ///
272 /// Also returns `None` for symlinks.
273 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<Option<String>>;
274
275 fn set_index_text(
276 &self,
277 path: RepoPath,
278 content: Option<String>,
279 env: Arc<HashMap<String, String>>,
280 ) -> BoxFuture<anyhow::Result<()>>;
281
282 /// Returns the URL of the remote with the given name.
283 fn remote_url(&self, name: &str) -> Option<String>;
284
285 /// Resolve a list of refs to SHAs.
286 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<Result<Vec<Option<String>>>>;
287
288 fn head_sha(&self) -> BoxFuture<Option<String>> {
289 async move {
290 self.revparse_batch(vec!["HEAD".into()])
291 .await
292 .unwrap_or_default()
293 .into_iter()
294 .next()
295 .flatten()
296 }
297 .boxed()
298 }
299
300 fn merge_message(&self) -> BoxFuture<Option<String>>;
301
302 fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture<Result<GitStatus>>;
303
304 fn branches(&self) -> BoxFuture<Result<Vec<Branch>>>;
305
306 fn change_branch(&self, name: String) -> BoxFuture<Result<()>>;
307 fn create_branch(&self, name: String) -> BoxFuture<Result<()>>;
308
309 fn reset(
310 &self,
311 commit: String,
312 mode: ResetMode,
313 env: Arc<HashMap<String, String>>,
314 ) -> BoxFuture<Result<()>>;
315
316 fn checkout_files(
317 &self,
318 commit: String,
319 paths: Vec<RepoPath>,
320 env: Arc<HashMap<String, String>>,
321 ) -> BoxFuture<Result<()>>;
322
323 fn show(&self, commit: String) -> BoxFuture<Result<CommitDetails>>;
324
325 fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<Result<CommitDiff>>;
326 fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<Result<crate::blame::Blame>>;
327
328 /// Returns the absolute path to the repository. For worktrees, this will be the path to the
329 /// worktree's gitdir within the main repository (typically `.git/worktrees/<name>`).
330 fn path(&self) -> PathBuf;
331
332 fn main_repository_path(&self) -> PathBuf;
333
334 /// Updates the index to match the worktree at the given paths.
335 ///
336 /// If any of the paths have been deleted from the worktree, they will be removed from the index if found there.
337 fn stage_paths(
338 &self,
339 paths: Vec<RepoPath>,
340 env: Arc<HashMap<String, String>>,
341 ) -> BoxFuture<Result<()>>;
342 /// Updates the index to match HEAD at the given paths.
343 ///
344 /// If any of the paths were previously staged but do not exist in HEAD, they will be removed from the index.
345 fn unstage_paths(
346 &self,
347 paths: Vec<RepoPath>,
348 env: Arc<HashMap<String, String>>,
349 ) -> BoxFuture<Result<()>>;
350
351 fn commit(
352 &self,
353 message: SharedString,
354 name_and_email: Option<(SharedString, SharedString)>,
355 options: CommitOptions,
356 env: Arc<HashMap<String, String>>,
357 ) -> BoxFuture<Result<()>>;
358
359 fn push(
360 &self,
361 branch_name: String,
362 upstream_name: String,
363 options: Option<PushOptions>,
364 askpass: AskPassDelegate,
365 env: Arc<HashMap<String, String>>,
366 // This method takes an AsyncApp to ensure it's invoked on the main thread,
367 // otherwise git-credentials-manager won't work.
368 cx: AsyncApp,
369 ) -> BoxFuture<Result<RemoteCommandOutput>>;
370
371 fn pull(
372 &self,
373 branch_name: String,
374 upstream_name: String,
375 askpass: AskPassDelegate,
376 env: Arc<HashMap<String, String>>,
377 // This method takes an AsyncApp to ensure it's invoked on the main thread,
378 // otherwise git-credentials-manager won't work.
379 cx: AsyncApp,
380 ) -> BoxFuture<Result<RemoteCommandOutput>>;
381
382 fn fetch(
383 &self,
384 askpass: AskPassDelegate,
385 env: Arc<HashMap<String, String>>,
386 // This method takes an AsyncApp to ensure it's invoked on the main thread,
387 // otherwise git-credentials-manager won't work.
388 cx: AsyncApp,
389 ) -> BoxFuture<Result<RemoteCommandOutput>>;
390
391 fn get_remotes(&self, branch_name: Option<String>) -> BoxFuture<Result<Vec<Remote>>>;
392
393 /// returns a list of remote branches that contain HEAD
394 fn check_for_pushed_commit(&self) -> BoxFuture<Result<Vec<SharedString>>>;
395
396 /// Run git diff
397 fn diff(&self, diff: DiffType) -> BoxFuture<Result<String>>;
398
399 /// Creates a checkpoint for the repository.
400 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>>;
401
402 /// Resets to a previously-created checkpoint.
403 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<Result<()>>;
404
405 /// Compares two checkpoints, returning true if they are equal
406 fn compare_checkpoints(
407 &self,
408 left: GitRepositoryCheckpoint,
409 right: GitRepositoryCheckpoint,
410 ) -> BoxFuture<Result<bool>>;
411
412 /// Computes a diff between two checkpoints.
413 fn diff_checkpoints(
414 &self,
415 base_checkpoint: GitRepositoryCheckpoint,
416 target_checkpoint: GitRepositoryCheckpoint,
417 ) -> BoxFuture<Result<String>>;
418}
419
420pub enum DiffType {
421 HeadToIndex,
422 HeadToWorktree,
423}
424
425#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, JsonSchema)]
426pub enum PushOptions {
427 SetUpstream,
428 Force,
429}
430
431impl std::fmt::Debug for dyn GitRepository {
432 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
433 f.debug_struct("dyn GitRepository<...>").finish()
434 }
435}
436
437pub struct RealGitRepository {
438 pub repository: Arc<Mutex<git2::Repository>>,
439 pub git_binary_path: PathBuf,
440 executor: BackgroundExecutor,
441}
442
443impl RealGitRepository {
444 pub fn new(
445 dotgit_path: &Path,
446 git_binary_path: Option<PathBuf>,
447 executor: BackgroundExecutor,
448 ) -> Option<Self> {
449 let workdir_root = dotgit_path.parent()?;
450 let repository = git2::Repository::open(workdir_root).log_err()?;
451 Some(Self {
452 repository: Arc::new(Mutex::new(repository)),
453 git_binary_path: git_binary_path.unwrap_or_else(|| PathBuf::from("git")),
454 executor,
455 })
456 }
457
458 fn working_directory(&self) -> Result<PathBuf> {
459 self.repository
460 .lock()
461 .workdir()
462 .context("failed to read git work directory")
463 .map(Path::to_path_buf)
464 }
465}
466
467#[derive(Clone, Debug)]
468pub struct GitRepositoryCheckpoint {
469 pub commit_sha: Oid,
470}
471
472impl GitRepository for RealGitRepository {
473 fn reload_index(&self) {
474 if let Ok(mut index) = self.repository.lock().index() {
475 _ = index.read(false);
476 }
477 }
478
479 fn path(&self) -> PathBuf {
480 let repo = self.repository.lock();
481 repo.path().into()
482 }
483
484 fn main_repository_path(&self) -> PathBuf {
485 let repo = self.repository.lock();
486 repo.commondir().into()
487 }
488
489 fn show(&self, commit: String) -> BoxFuture<Result<CommitDetails>> {
490 let working_directory = self.working_directory();
491 self.executor
492 .spawn(async move {
493 let working_directory = working_directory?;
494 let output = new_std_command("git")
495 .current_dir(&working_directory)
496 .args([
497 "--no-optional-locks",
498 "show",
499 "--no-patch",
500 "--format=%H%x00%B%x00%at%x00%ae%x00%an%x00",
501 &commit,
502 ])
503 .output()?;
504 let output = std::str::from_utf8(&output.stdout)?;
505 let fields = output.split('\0').collect::<Vec<_>>();
506 if fields.len() != 6 {
507 bail!("unexpected git-show output for {commit:?}: {output:?}")
508 }
509 let sha = fields[0].to_string().into();
510 let message = fields[1].to_string().into();
511 let commit_timestamp = fields[2].parse()?;
512 let author_email = fields[3].to_string().into();
513 let author_name = fields[4].to_string().into();
514 Ok(CommitDetails {
515 sha,
516 message,
517 commit_timestamp,
518 author_email,
519 author_name,
520 })
521 })
522 .boxed()
523 }
524
525 fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<Result<CommitDiff>> {
526 let Some(working_directory) = self.repository.lock().workdir().map(ToOwned::to_owned)
527 else {
528 return future::ready(Err(anyhow!("no working directory"))).boxed();
529 };
530 cx.background_spawn(async move {
531 let show_output = util::command::new_std_command("git")
532 .current_dir(&working_directory)
533 .args([
534 "--no-optional-locks",
535 "show",
536 "--format=%P",
537 "-z",
538 "--no-renames",
539 "--name-status",
540 ])
541 .arg(&commit)
542 .stdin(Stdio::null())
543 .stdout(Stdio::piped())
544 .stderr(Stdio::piped())
545 .output()
546 .context("starting git show process")?;
547
548 let show_stdout = String::from_utf8_lossy(&show_output.stdout);
549 let mut lines = show_stdout.split('\n');
550 let parent_sha = lines.next().unwrap().trim().trim_end_matches('\0');
551 let changes = parse_git_diff_name_status(lines.next().unwrap_or(""));
552
553 let mut cat_file_process = util::command::new_std_command("git")
554 .current_dir(&working_directory)
555 .args(["--no-optional-locks", "cat-file", "--batch=%(objectsize)"])
556 .stdin(Stdio::piped())
557 .stdout(Stdio::piped())
558 .stderr(Stdio::piped())
559 .spawn()
560 .context("starting git cat-file process")?;
561
562 use std::io::Write as _;
563 let mut files = Vec::<CommitFile>::new();
564 let mut stdin = BufWriter::with_capacity(512, cat_file_process.stdin.take().unwrap());
565 let mut stdout = BufReader::new(cat_file_process.stdout.take().unwrap());
566 let mut info_line = String::new();
567 let mut newline = [b'\0'];
568 for (path, status_code) in changes {
569 match status_code {
570 StatusCode::Modified => {
571 writeln!(&mut stdin, "{commit}:{}", path.display())?;
572 writeln!(&mut stdin, "{parent_sha}:{}", path.display())?;
573 }
574 StatusCode::Added => {
575 writeln!(&mut stdin, "{commit}:{}", path.display())?;
576 }
577 StatusCode::Deleted => {
578 writeln!(&mut stdin, "{parent_sha}:{}", path.display())?;
579 }
580 _ => continue,
581 }
582 stdin.flush()?;
583
584 info_line.clear();
585 stdout.read_line(&mut info_line)?;
586
587 let len = info_line.trim_end().parse().with_context(|| {
588 format!("invalid object size output from cat-file {info_line}")
589 })?;
590 let mut text = vec![0; len];
591 stdout.read_exact(&mut text)?;
592 stdout.read_exact(&mut newline)?;
593 let text = String::from_utf8_lossy(&text).to_string();
594
595 let mut old_text = None;
596 let mut new_text = None;
597 match status_code {
598 StatusCode::Modified => {
599 info_line.clear();
600 stdout.read_line(&mut info_line)?;
601 let len = info_line.trim_end().parse().with_context(|| {
602 format!("invalid object size output from cat-file {}", info_line)
603 })?;
604 let mut parent_text = vec![0; len];
605 stdout.read_exact(&mut parent_text)?;
606 stdout.read_exact(&mut newline)?;
607 old_text = Some(String::from_utf8_lossy(&parent_text).to_string());
608 new_text = Some(text);
609 }
610 StatusCode::Added => new_text = Some(text),
611 StatusCode::Deleted => old_text = Some(text),
612 _ => continue,
613 }
614
615 files.push(CommitFile {
616 path: path.into(),
617 old_text,
618 new_text,
619 })
620 }
621
622 Ok(CommitDiff { files })
623 })
624 .boxed()
625 }
626
627 fn reset(
628 &self,
629 commit: String,
630 mode: ResetMode,
631 env: Arc<HashMap<String, String>>,
632 ) -> BoxFuture<Result<()>> {
633 async move {
634 let working_directory = self.working_directory();
635
636 let mode_flag = match mode {
637 ResetMode::Mixed => "--mixed",
638 ResetMode::Soft => "--soft",
639 };
640
641 let output = new_smol_command(&self.git_binary_path)
642 .envs(env.iter())
643 .current_dir(&working_directory?)
644 .args(["reset", mode_flag, &commit])
645 .output()
646 .await?;
647 anyhow::ensure!(
648 output.status.success(),
649 "Failed to reset:\n{}",
650 String::from_utf8_lossy(&output.stderr),
651 );
652 Ok(())
653 }
654 .boxed()
655 }
656
657 fn checkout_files(
658 &self,
659 commit: String,
660 paths: Vec<RepoPath>,
661 env: Arc<HashMap<String, String>>,
662 ) -> BoxFuture<Result<()>> {
663 let working_directory = self.working_directory();
664 let git_binary_path = self.git_binary_path.clone();
665 async move {
666 if paths.is_empty() {
667 return Ok(());
668 }
669
670 let output = new_smol_command(&git_binary_path)
671 .current_dir(&working_directory?)
672 .envs(env.iter())
673 .args(["checkout", &commit, "--"])
674 .args(paths.iter().map(|path| path.as_ref()))
675 .output()
676 .await?;
677 anyhow::ensure!(
678 output.status.success(),
679 "Failed to checkout files:\n{}",
680 String::from_utf8_lossy(&output.stderr),
681 );
682 Ok(())
683 }
684 .boxed()
685 }
686
687 fn load_index_text(&self, path: RepoPath) -> BoxFuture<Option<String>> {
688 // https://git-scm.com/book/en/v2/Git-Internals-Git-Objects
689 const GIT_MODE_SYMLINK: u32 = 0o120000;
690
691 let repo = self.repository.clone();
692 self.executor
693 .spawn(async move {
694 fn logic(repo: &git2::Repository, path: &RepoPath) -> Result<Option<String>> {
695 // This check is required because index.get_path() unwraps internally :(
696 check_path_to_repo_path_errors(path)?;
697
698 let mut index = repo.index()?;
699 index.read(false)?;
700
701 const STAGE_NORMAL: i32 = 0;
702 let oid = match index.get_path(path, STAGE_NORMAL) {
703 Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id,
704 _ => return Ok(None),
705 };
706
707 let content = repo.find_blob(oid)?.content().to_owned();
708 Ok(String::from_utf8(content).ok())
709 }
710
711 match logic(&repo.lock(), &path) {
712 Ok(value) => return value,
713 Err(err) => log::error!("Error loading index text: {:?}", err),
714 }
715 None
716 })
717 .boxed()
718 }
719
720 fn load_committed_text(&self, path: RepoPath) -> BoxFuture<Option<String>> {
721 let repo = self.repository.clone();
722 self.executor
723 .spawn(async move {
724 let repo = repo.lock();
725 let head = repo.head().ok()?.peel_to_tree().log_err()?;
726 let entry = head.get_path(&path).ok()?;
727 if entry.filemode() == i32::from(git2::FileMode::Link) {
728 return None;
729 }
730 let content = repo.find_blob(entry.id()).log_err()?.content().to_owned();
731 String::from_utf8(content).ok()
732 })
733 .boxed()
734 }
735
736 fn set_index_text(
737 &self,
738 path: RepoPath,
739 content: Option<String>,
740 env: Arc<HashMap<String, String>>,
741 ) -> BoxFuture<anyhow::Result<()>> {
742 let working_directory = self.working_directory();
743 let git_binary_path = self.git_binary_path.clone();
744 self.executor
745 .spawn(async move {
746 let working_directory = working_directory?;
747 if let Some(content) = content {
748 let mut child = new_smol_command(&git_binary_path)
749 .current_dir(&working_directory)
750 .envs(env.iter())
751 .args(["hash-object", "-w", "--stdin"])
752 .stdin(Stdio::piped())
753 .stdout(Stdio::piped())
754 .spawn()?;
755 child
756 .stdin
757 .take()
758 .unwrap()
759 .write_all(content.as_bytes())
760 .await?;
761 let output = child.output().await?.stdout;
762 let sha = String::from_utf8(output)?;
763
764 log::debug!("indexing SHA: {sha}, path {path:?}");
765
766 let output = new_smol_command(&git_binary_path)
767 .current_dir(&working_directory)
768 .envs(env.iter())
769 .args(["update-index", "--add", "--cacheinfo", "100644", &sha])
770 .arg(path.to_unix_style())
771 .output()
772 .await?;
773
774 anyhow::ensure!(
775 output.status.success(),
776 "Failed to stage:\n{}",
777 String::from_utf8_lossy(&output.stderr)
778 );
779 } else {
780 let output = new_smol_command(&git_binary_path)
781 .current_dir(&working_directory)
782 .envs(env.iter())
783 .args(["update-index", "--force-remove"])
784 .arg(path.to_unix_style())
785 .output()
786 .await?;
787 anyhow::ensure!(
788 output.status.success(),
789 "Failed to unstage:\n{}",
790 String::from_utf8_lossy(&output.stderr)
791 );
792 }
793
794 Ok(())
795 })
796 .boxed()
797 }
798
799 fn remote_url(&self, name: &str) -> Option<String> {
800 let repo = self.repository.lock();
801 let remote = repo.find_remote(name).ok()?;
802 remote.url().map(|url| url.to_string())
803 }
804
805 fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<Result<Vec<Option<String>>>> {
806 let working_directory = self.working_directory();
807 self.executor
808 .spawn(async move {
809 let working_directory = working_directory?;
810 let mut process = new_std_command("git")
811 .current_dir(&working_directory)
812 .args([
813 "--no-optional-locks",
814 "cat-file",
815 "--batch-check=%(objectname)",
816 ])
817 .stdin(Stdio::piped())
818 .stdout(Stdio::piped())
819 .stderr(Stdio::piped())
820 .spawn()?;
821
822 let stdin = process
823 .stdin
824 .take()
825 .context("no stdin for git cat-file subprocess")?;
826 let mut stdin = BufWriter::new(stdin);
827 for rev in &revs {
828 write!(&mut stdin, "{rev}\n")?;
829 }
830 drop(stdin);
831
832 let output = process.wait_with_output()?;
833 let output = std::str::from_utf8(&output.stdout)?;
834 let shas = output
835 .lines()
836 .map(|line| {
837 if line.ends_with("missing") {
838 None
839 } else {
840 Some(line.to_string())
841 }
842 })
843 .collect::<Vec<_>>();
844
845 if shas.len() != revs.len() {
846 // In an octopus merge, git cat-file still only outputs the first sha from MERGE_HEAD.
847 bail!("unexpected number of shas")
848 }
849
850 Ok(shas)
851 })
852 .boxed()
853 }
854
855 fn merge_message(&self) -> BoxFuture<Option<String>> {
856 let path = self.path().join("MERGE_MSG");
857 self.executor
858 .spawn(async move { std::fs::read_to_string(&path).ok() })
859 .boxed()
860 }
861
862 fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture<Result<GitStatus>> {
863 let git_binary_path = self.git_binary_path.clone();
864 let working_directory = self.working_directory();
865 let path_prefixes = path_prefixes.to_owned();
866 self.executor
867 .spawn(async move {
868 let output = new_std_command(&git_binary_path)
869 .current_dir(working_directory?)
870 .args(git_status_args(&path_prefixes))
871 .output()?;
872 if output.status.success() {
873 let stdout = String::from_utf8_lossy(&output.stdout);
874 stdout.parse()
875 } else {
876 let stderr = String::from_utf8_lossy(&output.stderr);
877 anyhow::bail!("git status failed: {stderr}");
878 }
879 })
880 .boxed()
881 }
882
883 fn branches(&self) -> BoxFuture<Result<Vec<Branch>>> {
884 let working_directory = self.working_directory();
885 let git_binary_path = self.git_binary_path.clone();
886 self.executor
887 .spawn(async move {
888 let fields = [
889 "%(HEAD)",
890 "%(objectname)",
891 "%(parent)",
892 "%(refname)",
893 "%(upstream)",
894 "%(upstream:track)",
895 "%(committerdate:unix)",
896 "%(contents:subject)",
897 ]
898 .join("%00");
899 let args = vec![
900 "for-each-ref",
901 "refs/heads/**/*",
902 "refs/remotes/**/*",
903 "--format",
904 &fields,
905 ];
906 let working_directory = working_directory?;
907 let output = new_smol_command(&git_binary_path)
908 .current_dir(&working_directory)
909 .args(args)
910 .output()
911 .await?;
912
913 anyhow::ensure!(
914 output.status.success(),
915 "Failed to git git branches:\n{}",
916 String::from_utf8_lossy(&output.stderr)
917 );
918
919 let input = String::from_utf8_lossy(&output.stdout);
920
921 let mut branches = parse_branch_input(&input)?;
922 if branches.is_empty() {
923 let args = vec!["symbolic-ref", "--quiet", "HEAD"];
924
925 let output = new_smol_command(&git_binary_path)
926 .current_dir(&working_directory)
927 .args(args)
928 .output()
929 .await?;
930
931 // git symbolic-ref returns a non-0 exit code if HEAD points
932 // to something other than a branch
933 if output.status.success() {
934 let name = String::from_utf8_lossy(&output.stdout).trim().to_string();
935
936 branches.push(Branch {
937 ref_name: name.into(),
938 is_head: true,
939 upstream: None,
940 most_recent_commit: None,
941 });
942 }
943 }
944
945 Ok(branches)
946 })
947 .boxed()
948 }
949
950 fn change_branch(&self, name: String) -> BoxFuture<Result<()>> {
951 let repo = self.repository.clone();
952 self.executor
953 .spawn(async move {
954 let repo = repo.lock();
955 let branch = if let Ok(branch) = repo.find_branch(&name, BranchType::Local) {
956 branch
957 } else if let Ok(revision) = repo.find_branch(&name, BranchType::Remote) {
958 let (_, branch_name) =
959 name.split_once("/").context("Unexpected branch format")?;
960 let revision = revision.get();
961 let branch_commit = revision.peel_to_commit()?;
962 let mut branch = repo.branch(&branch_name, &branch_commit, false)?;
963 branch.set_upstream(Some(&name))?;
964 branch
965 } else {
966 anyhow::bail!("Branch not found");
967 };
968
969 let revision = branch.get();
970 let as_tree = revision.peel_to_tree()?;
971 repo.checkout_tree(as_tree.as_object(), None)?;
972 repo.set_head(
973 revision
974 .name()
975 .context("Branch name could not be retrieved")?,
976 )?;
977 Ok(())
978 })
979 .boxed()
980 }
981
982 fn create_branch(&self, name: String) -> BoxFuture<Result<()>> {
983 let repo = self.repository.clone();
984 self.executor
985 .spawn(async move {
986 let repo = repo.lock();
987 let current_commit = repo.head()?.peel_to_commit()?;
988 repo.branch(&name, ¤t_commit, false)?;
989 Ok(())
990 })
991 .boxed()
992 }
993
994 fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<Result<crate::blame::Blame>> {
995 let working_directory = self.working_directory();
996 let git_binary_path = self.git_binary_path.clone();
997
998 let remote_url = self
999 .remote_url("upstream")
1000 .or_else(|| self.remote_url("origin"));
1001
1002 self.executor
1003 .spawn(async move {
1004 crate::blame::Blame::for_path(
1005 &git_binary_path,
1006 &working_directory?,
1007 &path,
1008 &content,
1009 remote_url,
1010 )
1011 .await
1012 })
1013 .boxed()
1014 }
1015
1016 fn diff(&self, diff: DiffType) -> BoxFuture<Result<String>> {
1017 let working_directory = self.working_directory();
1018 let git_binary_path = self.git_binary_path.clone();
1019 self.executor
1020 .spawn(async move {
1021 let args = match diff {
1022 DiffType::HeadToIndex => Some("--staged"),
1023 DiffType::HeadToWorktree => None,
1024 };
1025
1026 let output = new_smol_command(&git_binary_path)
1027 .current_dir(&working_directory?)
1028 .args(["diff"])
1029 .args(args)
1030 .output()
1031 .await?;
1032
1033 anyhow::ensure!(
1034 output.status.success(),
1035 "Failed to run git diff:\n{}",
1036 String::from_utf8_lossy(&output.stderr)
1037 );
1038 Ok(String::from_utf8_lossy(&output.stdout).to_string())
1039 })
1040 .boxed()
1041 }
1042
1043 fn stage_paths(
1044 &self,
1045 paths: Vec<RepoPath>,
1046 env: Arc<HashMap<String, String>>,
1047 ) -> BoxFuture<Result<()>> {
1048 let working_directory = self.working_directory();
1049 let git_binary_path = self.git_binary_path.clone();
1050 self.executor
1051 .spawn(async move {
1052 if !paths.is_empty() {
1053 let output = new_smol_command(&git_binary_path)
1054 .current_dir(&working_directory?)
1055 .envs(env.iter())
1056 .args(["update-index", "--add", "--remove", "--"])
1057 .args(paths.iter().map(|p| p.to_unix_style()))
1058 .output()
1059 .await?;
1060 anyhow::ensure!(
1061 output.status.success(),
1062 "Failed to stage paths:\n{}",
1063 String::from_utf8_lossy(&output.stderr),
1064 );
1065 }
1066 Ok(())
1067 })
1068 .boxed()
1069 }
1070
1071 fn unstage_paths(
1072 &self,
1073 paths: Vec<RepoPath>,
1074 env: Arc<HashMap<String, String>>,
1075 ) -> BoxFuture<Result<()>> {
1076 let working_directory = self.working_directory();
1077 let git_binary_path = self.git_binary_path.clone();
1078
1079 self.executor
1080 .spawn(async move {
1081 if !paths.is_empty() {
1082 let output = new_smol_command(&git_binary_path)
1083 .current_dir(&working_directory?)
1084 .envs(env.iter())
1085 .args(["reset", "--quiet", "--"])
1086 .args(paths.iter().map(|p| p.as_ref()))
1087 .output()
1088 .await?;
1089
1090 anyhow::ensure!(
1091 output.status.success(),
1092 "Failed to unstage:\n{}",
1093 String::from_utf8_lossy(&output.stderr),
1094 );
1095 }
1096 Ok(())
1097 })
1098 .boxed()
1099 }
1100
1101 fn commit(
1102 &self,
1103 message: SharedString,
1104 name_and_email: Option<(SharedString, SharedString)>,
1105 options: CommitOptions,
1106 env: Arc<HashMap<String, String>>,
1107 ) -> BoxFuture<Result<()>> {
1108 let working_directory = self.working_directory();
1109 self.executor
1110 .spawn(async move {
1111 let mut cmd = new_smol_command("git");
1112 cmd.current_dir(&working_directory?)
1113 .envs(env.iter())
1114 .args(["commit", "--quiet", "-m"])
1115 .arg(&message.to_string())
1116 .arg("--cleanup=strip");
1117
1118 if options.amend {
1119 cmd.arg("--amend");
1120 }
1121
1122 if let Some((name, email)) = name_and_email {
1123 cmd.arg("--author").arg(&format!("{name} <{email}>"));
1124 }
1125
1126 let output = cmd.output().await?;
1127
1128 anyhow::ensure!(
1129 output.status.success(),
1130 "Failed to commit:\n{}",
1131 String::from_utf8_lossy(&output.stderr)
1132 );
1133 Ok(())
1134 })
1135 .boxed()
1136 }
1137
1138 fn push(
1139 &self,
1140 branch_name: String,
1141 remote_name: String,
1142 options: Option<PushOptions>,
1143 ask_pass: AskPassDelegate,
1144 env: Arc<HashMap<String, String>>,
1145 cx: AsyncApp,
1146 ) -> BoxFuture<Result<RemoteCommandOutput>> {
1147 let working_directory = self.working_directory();
1148 let executor = cx.background_executor().clone();
1149 async move {
1150 let working_directory = working_directory?;
1151 let mut command = new_smol_command("git");
1152 command
1153 .envs(env.iter())
1154 .current_dir(&working_directory)
1155 .args(["push"])
1156 .args(options.map(|option| match option {
1157 PushOptions::SetUpstream => "--set-upstream",
1158 PushOptions::Force => "--force-with-lease",
1159 }))
1160 .arg(remote_name)
1161 .arg(format!("{}:{}", branch_name, branch_name))
1162 .stdin(smol::process::Stdio::null())
1163 .stdout(smol::process::Stdio::piped())
1164 .stderr(smol::process::Stdio::piped());
1165
1166 run_git_command(env, ask_pass, command, &executor).await
1167 }
1168 .boxed()
1169 }
1170
1171 fn pull(
1172 &self,
1173 branch_name: String,
1174 remote_name: String,
1175 ask_pass: AskPassDelegate,
1176 env: Arc<HashMap<String, String>>,
1177 cx: AsyncApp,
1178 ) -> BoxFuture<Result<RemoteCommandOutput>> {
1179 let working_directory = self.working_directory();
1180 let executor = cx.background_executor().clone();
1181 async move {
1182 let mut command = new_smol_command("git");
1183 command
1184 .envs(env.iter())
1185 .current_dir(&working_directory?)
1186 .args(["pull"])
1187 .arg(remote_name)
1188 .arg(branch_name)
1189 .stdout(smol::process::Stdio::piped())
1190 .stderr(smol::process::Stdio::piped());
1191
1192 run_git_command(env, ask_pass, command, &executor).await
1193 }
1194 .boxed()
1195 }
1196
1197 fn fetch(
1198 &self,
1199 ask_pass: AskPassDelegate,
1200 env: Arc<HashMap<String, String>>,
1201 cx: AsyncApp,
1202 ) -> BoxFuture<Result<RemoteCommandOutput>> {
1203 let working_directory = self.working_directory();
1204 let executor = cx.background_executor().clone();
1205 async move {
1206 let mut command = new_smol_command("git");
1207 command
1208 .envs(env.iter())
1209 .current_dir(&working_directory?)
1210 .args(["fetch", "--all"])
1211 .stdout(smol::process::Stdio::piped())
1212 .stderr(smol::process::Stdio::piped());
1213
1214 run_git_command(env, ask_pass, command, &executor).await
1215 }
1216 .boxed()
1217 }
1218
1219 fn get_remotes(&self, branch_name: Option<String>) -> BoxFuture<Result<Vec<Remote>>> {
1220 let working_directory = self.working_directory();
1221 let git_binary_path = self.git_binary_path.clone();
1222 self.executor
1223 .spawn(async move {
1224 let working_directory = working_directory?;
1225 if let Some(branch_name) = branch_name {
1226 let output = new_smol_command(&git_binary_path)
1227 .current_dir(&working_directory)
1228 .args(["config", "--get"])
1229 .arg(format!("branch.{}.remote", branch_name))
1230 .output()
1231 .await?;
1232
1233 if output.status.success() {
1234 let remote_name = String::from_utf8_lossy(&output.stdout);
1235
1236 return Ok(vec![Remote {
1237 name: remote_name.trim().to_string().into(),
1238 }]);
1239 }
1240 }
1241
1242 let output = new_smol_command(&git_binary_path)
1243 .current_dir(&working_directory)
1244 .args(["remote"])
1245 .output()
1246 .await?;
1247
1248 anyhow::ensure!(
1249 output.status.success(),
1250 "Failed to get remotes:\n{}",
1251 String::from_utf8_lossy(&output.stderr)
1252 );
1253 let remote_names = String::from_utf8_lossy(&output.stdout)
1254 .split('\n')
1255 .filter(|name| !name.is_empty())
1256 .map(|name| Remote {
1257 name: name.trim().to_string().into(),
1258 })
1259 .collect();
1260 Ok(remote_names)
1261 })
1262 .boxed()
1263 }
1264
1265 fn check_for_pushed_commit(&self) -> BoxFuture<Result<Vec<SharedString>>> {
1266 let working_directory = self.working_directory();
1267 let git_binary_path = self.git_binary_path.clone();
1268 self.executor
1269 .spawn(async move {
1270 let working_directory = working_directory?;
1271 let git_cmd = async |args: &[&str]| -> Result<String> {
1272 let output = new_smol_command(&git_binary_path)
1273 .current_dir(&working_directory)
1274 .args(args)
1275 .output()
1276 .await?;
1277 anyhow::ensure!(
1278 output.status.success(),
1279 String::from_utf8_lossy(&output.stderr).to_string()
1280 );
1281 Ok(String::from_utf8(output.stdout)?)
1282 };
1283
1284 let head = git_cmd(&["rev-parse", "HEAD"])
1285 .await
1286 .context("Failed to get HEAD")?
1287 .trim()
1288 .to_owned();
1289
1290 let mut remote_branches = vec![];
1291 let mut add_if_matching = async |remote_head: &str| {
1292 if let Ok(merge_base) = git_cmd(&["merge-base", &head, remote_head]).await {
1293 if merge_base.trim() == head {
1294 if let Some(s) = remote_head.strip_prefix("refs/remotes/") {
1295 remote_branches.push(s.to_owned().into());
1296 }
1297 }
1298 }
1299 };
1300
1301 // check the main branch of each remote
1302 let remotes = git_cmd(&["remote"])
1303 .await
1304 .context("Failed to get remotes")?;
1305 for remote in remotes.lines() {
1306 if let Ok(remote_head) =
1307 git_cmd(&["symbolic-ref", &format!("refs/remotes/{remote}/HEAD")]).await
1308 {
1309 add_if_matching(remote_head.trim()).await;
1310 }
1311 }
1312
1313 // ... and the remote branch that the checked-out one is tracking
1314 if let Ok(remote_head) =
1315 git_cmd(&["rev-parse", "--symbolic-full-name", "@{u}"]).await
1316 {
1317 add_if_matching(remote_head.trim()).await;
1318 }
1319
1320 Ok(remote_branches)
1321 })
1322 .boxed()
1323 }
1324
1325 fn checkpoint(&self) -> BoxFuture<'static, Result<GitRepositoryCheckpoint>> {
1326 let working_directory = self.working_directory();
1327 let git_binary_path = self.git_binary_path.clone();
1328 let executor = self.executor.clone();
1329 self.executor
1330 .spawn(async move {
1331 let working_directory = working_directory?;
1332 let mut git = GitBinary::new(git_binary_path, working_directory.clone(), executor)
1333 .envs(checkpoint_author_envs());
1334 git.with_temp_index(async |git| {
1335 let head_sha = git.run(&["rev-parse", "HEAD"]).await.ok();
1336 let mut excludes = exclude_files(git).await?;
1337
1338 git.run(&["add", "--all"]).await?;
1339 let tree = git.run(&["write-tree"]).await?;
1340 let checkpoint_sha = if let Some(head_sha) = head_sha.as_deref() {
1341 git.run(&["commit-tree", &tree, "-p", head_sha, "-m", "Checkpoint"])
1342 .await?
1343 } else {
1344 git.run(&["commit-tree", &tree, "-m", "Checkpoint"]).await?
1345 };
1346
1347 excludes.restore_original().await?;
1348
1349 Ok(GitRepositoryCheckpoint {
1350 commit_sha: checkpoint_sha.parse()?,
1351 })
1352 })
1353 .await
1354 })
1355 .boxed()
1356 }
1357
1358 fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<Result<()>> {
1359 let working_directory = self.working_directory();
1360 let git_binary_path = self.git_binary_path.clone();
1361
1362 let executor = self.executor.clone();
1363 self.executor
1364 .spawn(async move {
1365 let working_directory = working_directory?;
1366
1367 let git = GitBinary::new(git_binary_path, working_directory, executor);
1368 git.run(&[
1369 "restore",
1370 "--source",
1371 &checkpoint.commit_sha.to_string(),
1372 "--worktree",
1373 ".",
1374 ])
1375 .await?;
1376
1377 // TODO: We don't track binary and large files anymore,
1378 // so the following call would delete them.
1379 // Implement an alternative way to track files added by agent.
1380 //
1381 // git.with_temp_index(async move |git| {
1382 // git.run(&["read-tree", &checkpoint.commit_sha.to_string()])
1383 // .await?;
1384 // git.run(&["clean", "-d", "--force"]).await
1385 // })
1386 // .await?;
1387
1388 Ok(())
1389 })
1390 .boxed()
1391 }
1392
1393 fn compare_checkpoints(
1394 &self,
1395 left: GitRepositoryCheckpoint,
1396 right: GitRepositoryCheckpoint,
1397 ) -> BoxFuture<Result<bool>> {
1398 let working_directory = self.working_directory();
1399 let git_binary_path = self.git_binary_path.clone();
1400
1401 let executor = self.executor.clone();
1402 self.executor
1403 .spawn(async move {
1404 let working_directory = working_directory?;
1405 let git = GitBinary::new(git_binary_path, working_directory, executor);
1406 let result = git
1407 .run(&[
1408 "diff-tree",
1409 "--quiet",
1410 &left.commit_sha.to_string(),
1411 &right.commit_sha.to_string(),
1412 ])
1413 .await;
1414 match result {
1415 Ok(_) => Ok(true),
1416 Err(error) => {
1417 if let Some(GitBinaryCommandError { status, .. }) =
1418 error.downcast_ref::<GitBinaryCommandError>()
1419 {
1420 if status.code() == Some(1) {
1421 return Ok(false);
1422 }
1423 }
1424
1425 Err(error)
1426 }
1427 }
1428 })
1429 .boxed()
1430 }
1431
1432 fn diff_checkpoints(
1433 &self,
1434 base_checkpoint: GitRepositoryCheckpoint,
1435 target_checkpoint: GitRepositoryCheckpoint,
1436 ) -> BoxFuture<Result<String>> {
1437 let working_directory = self.working_directory();
1438 let git_binary_path = self.git_binary_path.clone();
1439
1440 let executor = self.executor.clone();
1441 self.executor
1442 .spawn(async move {
1443 let working_directory = working_directory?;
1444 let git = GitBinary::new(git_binary_path, working_directory, executor);
1445 git.run(&[
1446 "diff",
1447 "--find-renames",
1448 "--patch",
1449 &base_checkpoint.commit_sha.to_string(),
1450 &target_checkpoint.commit_sha.to_string(),
1451 ])
1452 .await
1453 })
1454 .boxed()
1455 }
1456}
1457
1458fn git_status_args(path_prefixes: &[RepoPath]) -> Vec<OsString> {
1459 let mut args = vec![
1460 OsString::from("--no-optional-locks"),
1461 OsString::from("status"),
1462 OsString::from("--porcelain=v1"),
1463 OsString::from("--untracked-files=all"),
1464 OsString::from("--no-renames"),
1465 OsString::from("-z"),
1466 ];
1467 args.extend(path_prefixes.iter().map(|path_prefix| {
1468 if path_prefix.0.as_ref() == Path::new("") {
1469 Path::new(".").into()
1470 } else {
1471 path_prefix.as_os_str().into()
1472 }
1473 }));
1474 args
1475}
1476
1477/// Temporarily git-ignore commonly ignored files and files over 2MB
1478async fn exclude_files(git: &GitBinary) -> Result<GitExcludeOverride> {
1479 const MAX_SIZE: u64 = 2 * 1024 * 1024; // 2 MB
1480 let mut excludes = git.with_exclude_overrides().await?;
1481 excludes
1482 .add_excludes(include_str!("./checkpoint.gitignore"))
1483 .await?;
1484
1485 let working_directory = git.working_directory.clone();
1486 let untracked_files = git.list_untracked_files().await?;
1487 let excluded_paths = untracked_files.into_iter().map(|path| {
1488 let working_directory = working_directory.clone();
1489 smol::spawn(async move {
1490 let full_path = working_directory.join(path.clone());
1491 match smol::fs::metadata(&full_path).await {
1492 Ok(metadata) if metadata.is_file() && metadata.len() >= MAX_SIZE => {
1493 Some(PathBuf::from("/").join(path.clone()))
1494 }
1495 _ => None,
1496 }
1497 })
1498 });
1499
1500 let excluded_paths = futures::future::join_all(excluded_paths).await;
1501 let excluded_paths = excluded_paths.into_iter().flatten().collect::<Vec<_>>();
1502
1503 if !excluded_paths.is_empty() {
1504 let exclude_patterns = excluded_paths
1505 .into_iter()
1506 .map(|path| path.to_string_lossy().to_string())
1507 .collect::<Vec<_>>()
1508 .join("\n");
1509 excludes.add_excludes(&exclude_patterns).await?;
1510 }
1511
1512 Ok(excludes)
1513}
1514
1515struct GitBinary {
1516 git_binary_path: PathBuf,
1517 working_directory: PathBuf,
1518 executor: BackgroundExecutor,
1519 index_file_path: Option<PathBuf>,
1520 envs: HashMap<String, String>,
1521}
1522
1523impl GitBinary {
1524 fn new(
1525 git_binary_path: PathBuf,
1526 working_directory: PathBuf,
1527 executor: BackgroundExecutor,
1528 ) -> Self {
1529 Self {
1530 git_binary_path,
1531 working_directory,
1532 executor,
1533 index_file_path: None,
1534 envs: HashMap::default(),
1535 }
1536 }
1537
1538 async fn list_untracked_files(&self) -> Result<Vec<PathBuf>> {
1539 let status_output = self
1540 .run(&["status", "--porcelain=v1", "--untracked-files=all", "-z"])
1541 .await?;
1542
1543 let paths = status_output
1544 .split('\0')
1545 .filter(|entry| entry.len() >= 3 && entry.starts_with("?? "))
1546 .map(|entry| PathBuf::from(&entry[3..]))
1547 .collect::<Vec<_>>();
1548 Ok(paths)
1549 }
1550
1551 fn envs(mut self, envs: HashMap<String, String>) -> Self {
1552 self.envs = envs;
1553 self
1554 }
1555
1556 pub async fn with_temp_index<R>(
1557 &mut self,
1558 f: impl AsyncFnOnce(&Self) -> Result<R>,
1559 ) -> Result<R> {
1560 let index_file_path = self.path_for_index_id(Uuid::new_v4());
1561
1562 let delete_temp_index = util::defer({
1563 let index_file_path = index_file_path.clone();
1564 let executor = self.executor.clone();
1565 move || {
1566 executor
1567 .spawn(async move {
1568 smol::fs::remove_file(index_file_path).await.log_err();
1569 })
1570 .detach();
1571 }
1572 });
1573
1574 // Copy the default index file so that Git doesn't have to rebuild the
1575 // whole index from scratch. This might fail if this is an empty repository.
1576 smol::fs::copy(
1577 self.working_directory.join(".git").join("index"),
1578 &index_file_path,
1579 )
1580 .await
1581 .ok();
1582
1583 self.index_file_path = Some(index_file_path.clone());
1584 let result = f(self).await;
1585 self.index_file_path = None;
1586 let result = result?;
1587
1588 smol::fs::remove_file(index_file_path).await.ok();
1589 delete_temp_index.abort();
1590
1591 Ok(result)
1592 }
1593
1594 pub async fn with_exclude_overrides(&self) -> Result<GitExcludeOverride> {
1595 let path = self
1596 .working_directory
1597 .join(".git")
1598 .join("info")
1599 .join("exclude");
1600
1601 GitExcludeOverride::new(path).await
1602 }
1603
1604 fn path_for_index_id(&self, id: Uuid) -> PathBuf {
1605 self.working_directory
1606 .join(".git")
1607 .join(format!("index-{}.tmp", id))
1608 }
1609
1610 pub async fn run<S>(&self, args: impl IntoIterator<Item = S>) -> Result<String>
1611 where
1612 S: AsRef<OsStr>,
1613 {
1614 let mut stdout = self.run_raw(args).await?;
1615 if stdout.chars().last() == Some('\n') {
1616 stdout.pop();
1617 }
1618 Ok(stdout)
1619 }
1620
1621 /// Returns the result of the command without trimming the trailing newline.
1622 pub async fn run_raw<S>(&self, args: impl IntoIterator<Item = S>) -> Result<String>
1623 where
1624 S: AsRef<OsStr>,
1625 {
1626 let mut command = self.build_command(args);
1627 let output = command.output().await?;
1628 anyhow::ensure!(
1629 output.status.success(),
1630 GitBinaryCommandError {
1631 stdout: String::from_utf8_lossy(&output.stdout).to_string(),
1632 status: output.status,
1633 }
1634 );
1635 Ok(String::from_utf8(output.stdout)?)
1636 }
1637
1638 fn build_command<S>(&self, args: impl IntoIterator<Item = S>) -> smol::process::Command
1639 where
1640 S: AsRef<OsStr>,
1641 {
1642 let mut command = new_smol_command(&self.git_binary_path);
1643 command.current_dir(&self.working_directory);
1644 command.args(args);
1645 if let Some(index_file_path) = self.index_file_path.as_ref() {
1646 command.env("GIT_INDEX_FILE", index_file_path);
1647 }
1648 command.envs(&self.envs);
1649 command
1650 }
1651}
1652
1653#[derive(Error, Debug)]
1654#[error("Git command failed: {stdout}")]
1655struct GitBinaryCommandError {
1656 stdout: String,
1657 status: ExitStatus,
1658}
1659
1660async fn run_git_command(
1661 env: Arc<HashMap<String, String>>,
1662 ask_pass: AskPassDelegate,
1663 mut command: smol::process::Command,
1664 executor: &BackgroundExecutor,
1665) -> Result<RemoteCommandOutput> {
1666 if env.contains_key("GIT_ASKPASS") {
1667 let git_process = command.spawn()?;
1668 let output = git_process.output().await?;
1669 anyhow::ensure!(
1670 output.status.success(),
1671 "{}",
1672 String::from_utf8_lossy(&output.stderr)
1673 );
1674 Ok(RemoteCommandOutput {
1675 stdout: String::from_utf8_lossy(&output.stdout).to_string(),
1676 stderr: String::from_utf8_lossy(&output.stderr).to_string(),
1677 })
1678 } else {
1679 let ask_pass = AskPassSession::new(executor, ask_pass).await?;
1680 command
1681 .env("GIT_ASKPASS", ask_pass.script_path())
1682 .env("SSH_ASKPASS", ask_pass.script_path())
1683 .env("SSH_ASKPASS_REQUIRE", "force");
1684 let git_process = command.spawn()?;
1685
1686 run_askpass_command(ask_pass, git_process).await
1687 }
1688}
1689
1690async fn run_askpass_command(
1691 mut ask_pass: AskPassSession,
1692 git_process: smol::process::Child,
1693) -> anyhow::Result<RemoteCommandOutput> {
1694 select_biased! {
1695 result = ask_pass.run().fuse() => {
1696 match result {
1697 AskPassResult::CancelledByUser => {
1698 Err(anyhow!(REMOTE_CANCELLED_BY_USER))?
1699 }
1700 AskPassResult::Timedout => {
1701 Err(anyhow!("Connecting to host timed out"))?
1702 }
1703 }
1704 }
1705 output = git_process.output().fuse() => {
1706 let output = output?;
1707 anyhow::ensure!(
1708 output.status.success(),
1709 "{}",
1710 String::from_utf8_lossy(&output.stderr)
1711 );
1712 Ok(RemoteCommandOutput {
1713 stdout: String::from_utf8_lossy(&output.stdout).to_string(),
1714 stderr: String::from_utf8_lossy(&output.stderr).to_string(),
1715 })
1716 }
1717 }
1718}
1719
1720pub static WORK_DIRECTORY_REPO_PATH: LazyLock<RepoPath> =
1721 LazyLock::new(|| RepoPath(Path::new("").into()));
1722
1723#[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)]
1724pub struct RepoPath(pub Arc<Path>);
1725
1726impl RepoPath {
1727 pub fn new(path: PathBuf) -> Self {
1728 debug_assert!(path.is_relative(), "Repo paths must be relative");
1729
1730 RepoPath(path.into())
1731 }
1732
1733 pub fn from_str(path: &str) -> Self {
1734 let path = Path::new(path);
1735 debug_assert!(path.is_relative(), "Repo paths must be relative");
1736
1737 RepoPath(path.into())
1738 }
1739
1740 pub fn to_unix_style(&self) -> Cow<'_, OsStr> {
1741 #[cfg(target_os = "windows")]
1742 {
1743 use std::ffi::OsString;
1744
1745 let path = self.0.as_os_str().to_string_lossy().replace("\\", "/");
1746 Cow::Owned(OsString::from(path))
1747 }
1748 #[cfg(not(target_os = "windows"))]
1749 {
1750 Cow::Borrowed(self.0.as_os_str())
1751 }
1752 }
1753}
1754
1755impl std::fmt::Display for RepoPath {
1756 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1757 self.0.to_string_lossy().fmt(f)
1758 }
1759}
1760
1761impl From<&Path> for RepoPath {
1762 fn from(value: &Path) -> Self {
1763 RepoPath::new(value.into())
1764 }
1765}
1766
1767impl From<Arc<Path>> for RepoPath {
1768 fn from(value: Arc<Path>) -> Self {
1769 RepoPath(value)
1770 }
1771}
1772
1773impl From<PathBuf> for RepoPath {
1774 fn from(value: PathBuf) -> Self {
1775 RepoPath::new(value)
1776 }
1777}
1778
1779impl From<&str> for RepoPath {
1780 fn from(value: &str) -> Self {
1781 Self::from_str(value)
1782 }
1783}
1784
1785impl Default for RepoPath {
1786 fn default() -> Self {
1787 RepoPath(Path::new("").into())
1788 }
1789}
1790
1791impl AsRef<Path> for RepoPath {
1792 fn as_ref(&self) -> &Path {
1793 self.0.as_ref()
1794 }
1795}
1796
1797impl std::ops::Deref for RepoPath {
1798 type Target = Path;
1799
1800 fn deref(&self) -> &Self::Target {
1801 &self.0
1802 }
1803}
1804
1805impl Borrow<Path> for RepoPath {
1806 fn borrow(&self) -> &Path {
1807 self.0.as_ref()
1808 }
1809}
1810
1811#[derive(Debug)]
1812pub struct RepoPathDescendants<'a>(pub &'a Path);
1813
1814impl MapSeekTarget<RepoPath> for RepoPathDescendants<'_> {
1815 fn cmp_cursor(&self, key: &RepoPath) -> Ordering {
1816 if key.starts_with(self.0) {
1817 Ordering::Greater
1818 } else {
1819 self.0.cmp(key)
1820 }
1821 }
1822}
1823
1824fn parse_branch_input(input: &str) -> Result<Vec<Branch>> {
1825 let mut branches = Vec::new();
1826 for line in input.split('\n') {
1827 if line.is_empty() {
1828 continue;
1829 }
1830 let mut fields = line.split('\x00');
1831 let is_current_branch = fields.next().context("no HEAD")? == "*";
1832 let head_sha: SharedString = fields.next().context("no objectname")?.to_string().into();
1833 let parent_sha: SharedString = fields.next().context("no parent")?.to_string().into();
1834 let ref_name = fields.next().context("no refname")?.to_string().into();
1835 let upstream_name = fields.next().context("no upstream")?.to_string();
1836 let upstream_tracking = parse_upstream_track(fields.next().context("no upstream:track")?)?;
1837 let commiterdate = fields.next().context("no committerdate")?.parse::<i64>()?;
1838 let subject: SharedString = fields
1839 .next()
1840 .context("no contents:subject")?
1841 .to_string()
1842 .into();
1843
1844 branches.push(Branch {
1845 is_head: is_current_branch,
1846 ref_name: ref_name,
1847 most_recent_commit: Some(CommitSummary {
1848 sha: head_sha,
1849 subject,
1850 commit_timestamp: commiterdate,
1851 has_parent: !parent_sha.is_empty(),
1852 }),
1853 upstream: if upstream_name.is_empty() {
1854 None
1855 } else {
1856 Some(Upstream {
1857 ref_name: upstream_name.into(),
1858 tracking: upstream_tracking,
1859 })
1860 },
1861 })
1862 }
1863
1864 Ok(branches)
1865}
1866
1867fn parse_upstream_track(upstream_track: &str) -> Result<UpstreamTracking> {
1868 if upstream_track == "" {
1869 return Ok(UpstreamTracking::Tracked(UpstreamTrackingStatus {
1870 ahead: 0,
1871 behind: 0,
1872 }));
1873 }
1874
1875 let upstream_track = upstream_track.strip_prefix("[").context("missing [")?;
1876 let upstream_track = upstream_track.strip_suffix("]").context("missing [")?;
1877 let mut ahead: u32 = 0;
1878 let mut behind: u32 = 0;
1879 for component in upstream_track.split(", ") {
1880 if component == "gone" {
1881 return Ok(UpstreamTracking::Gone);
1882 }
1883 if let Some(ahead_num) = component.strip_prefix("ahead ") {
1884 ahead = ahead_num.parse::<u32>()?;
1885 }
1886 if let Some(behind_num) = component.strip_prefix("behind ") {
1887 behind = behind_num.parse::<u32>()?;
1888 }
1889 }
1890 Ok(UpstreamTracking::Tracked(UpstreamTrackingStatus {
1891 ahead,
1892 behind,
1893 }))
1894}
1895
1896fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> {
1897 match relative_file_path.components().next() {
1898 None => anyhow::bail!("repo path should not be empty"),
1899 Some(Component::Prefix(_)) => anyhow::bail!(
1900 "repo path `{}` should be relative, not a windows prefix",
1901 relative_file_path.to_string_lossy()
1902 ),
1903 Some(Component::RootDir) => {
1904 anyhow::bail!(
1905 "repo path `{}` should be relative",
1906 relative_file_path.to_string_lossy()
1907 )
1908 }
1909 Some(Component::CurDir) => {
1910 anyhow::bail!(
1911 "repo path `{}` should not start with `.`",
1912 relative_file_path.to_string_lossy()
1913 )
1914 }
1915 Some(Component::ParentDir) => {
1916 anyhow::bail!(
1917 "repo path `{}` should not start with `..`",
1918 relative_file_path.to_string_lossy()
1919 )
1920 }
1921 _ => Ok(()),
1922 }
1923}
1924
1925fn checkpoint_author_envs() -> HashMap<String, String> {
1926 HashMap::from_iter([
1927 ("GIT_AUTHOR_NAME".to_string(), "Zed".to_string()),
1928 ("GIT_AUTHOR_EMAIL".to_string(), "hi@zed.dev".to_string()),
1929 ("GIT_COMMITTER_NAME".to_string(), "Zed".to_string()),
1930 ("GIT_COMMITTER_EMAIL".to_string(), "hi@zed.dev".to_string()),
1931 ])
1932}
1933
1934#[cfg(test)]
1935mod tests {
1936 use super::*;
1937 use gpui::TestAppContext;
1938
1939 #[gpui::test]
1940 async fn test_checkpoint_basic(cx: &mut TestAppContext) {
1941 cx.executor().allow_parking();
1942
1943 let repo_dir = tempfile::tempdir().unwrap();
1944
1945 git2::Repository::init(repo_dir.path()).unwrap();
1946 let file_path = repo_dir.path().join("file");
1947 smol::fs::write(&file_path, "initial").await.unwrap();
1948
1949 let repo =
1950 RealGitRepository::new(&repo_dir.path().join(".git"), None, cx.executor()).unwrap();
1951 repo.stage_paths(
1952 vec![RepoPath::from_str("file")],
1953 Arc::new(HashMap::default()),
1954 )
1955 .await
1956 .unwrap();
1957 repo.commit(
1958 "Initial commit".into(),
1959 None,
1960 CommitOptions::default(),
1961 Arc::new(checkpoint_author_envs()),
1962 )
1963 .await
1964 .unwrap();
1965
1966 smol::fs::write(&file_path, "modified before checkpoint")
1967 .await
1968 .unwrap();
1969 smol::fs::write(repo_dir.path().join("new_file_before_checkpoint"), "1")
1970 .await
1971 .unwrap();
1972 let checkpoint = repo.checkpoint().await.unwrap();
1973
1974 // Ensure the user can't see any branches after creating a checkpoint.
1975 assert_eq!(repo.branches().await.unwrap().len(), 1);
1976
1977 smol::fs::write(&file_path, "modified after checkpoint")
1978 .await
1979 .unwrap();
1980 repo.stage_paths(
1981 vec![RepoPath::from_str("file")],
1982 Arc::new(HashMap::default()),
1983 )
1984 .await
1985 .unwrap();
1986 repo.commit(
1987 "Commit after checkpoint".into(),
1988 None,
1989 CommitOptions::default(),
1990 Arc::new(checkpoint_author_envs()),
1991 )
1992 .await
1993 .unwrap();
1994
1995 smol::fs::remove_file(repo_dir.path().join("new_file_before_checkpoint"))
1996 .await
1997 .unwrap();
1998 smol::fs::write(repo_dir.path().join("new_file_after_checkpoint"), "2")
1999 .await
2000 .unwrap();
2001
2002 // Ensure checkpoint stays alive even after a Git GC.
2003 repo.gc().await.unwrap();
2004 repo.restore_checkpoint(checkpoint.clone()).await.unwrap();
2005
2006 assert_eq!(
2007 smol::fs::read_to_string(&file_path).await.unwrap(),
2008 "modified before checkpoint"
2009 );
2010 assert_eq!(
2011 smol::fs::read_to_string(repo_dir.path().join("new_file_before_checkpoint"))
2012 .await
2013 .unwrap(),
2014 "1"
2015 );
2016 // See TODO above
2017 // assert_eq!(
2018 // smol::fs::read_to_string(repo_dir.path().join("new_file_after_checkpoint"))
2019 // .await
2020 // .ok(),
2021 // None
2022 // );
2023 }
2024
2025 #[gpui::test]
2026 async fn test_checkpoint_empty_repo(cx: &mut TestAppContext) {
2027 cx.executor().allow_parking();
2028
2029 let repo_dir = tempfile::tempdir().unwrap();
2030 git2::Repository::init(repo_dir.path()).unwrap();
2031 let repo =
2032 RealGitRepository::new(&repo_dir.path().join(".git"), None, cx.executor()).unwrap();
2033
2034 smol::fs::write(repo_dir.path().join("foo"), "foo")
2035 .await
2036 .unwrap();
2037 let checkpoint_sha = repo.checkpoint().await.unwrap();
2038
2039 // Ensure the user can't see any branches after creating a checkpoint.
2040 assert_eq!(repo.branches().await.unwrap().len(), 1);
2041
2042 smol::fs::write(repo_dir.path().join("foo"), "bar")
2043 .await
2044 .unwrap();
2045 smol::fs::write(repo_dir.path().join("baz"), "qux")
2046 .await
2047 .unwrap();
2048 repo.restore_checkpoint(checkpoint_sha).await.unwrap();
2049 assert_eq!(
2050 smol::fs::read_to_string(repo_dir.path().join("foo"))
2051 .await
2052 .unwrap(),
2053 "foo"
2054 );
2055 // See TODOs above
2056 // assert_eq!(
2057 // smol::fs::read_to_string(repo_dir.path().join("baz"))
2058 // .await
2059 // .ok(),
2060 // None
2061 // );
2062 }
2063
2064 #[gpui::test]
2065 async fn test_compare_checkpoints(cx: &mut TestAppContext) {
2066 cx.executor().allow_parking();
2067
2068 let repo_dir = tempfile::tempdir().unwrap();
2069 git2::Repository::init(repo_dir.path()).unwrap();
2070 let repo =
2071 RealGitRepository::new(&repo_dir.path().join(".git"), None, cx.executor()).unwrap();
2072
2073 smol::fs::write(repo_dir.path().join("file1"), "content1")
2074 .await
2075 .unwrap();
2076 let checkpoint1 = repo.checkpoint().await.unwrap();
2077
2078 smol::fs::write(repo_dir.path().join("file2"), "content2")
2079 .await
2080 .unwrap();
2081 let checkpoint2 = repo.checkpoint().await.unwrap();
2082
2083 assert!(
2084 !repo
2085 .compare_checkpoints(checkpoint1, checkpoint2.clone())
2086 .await
2087 .unwrap()
2088 );
2089
2090 let checkpoint3 = repo.checkpoint().await.unwrap();
2091 assert!(
2092 repo.compare_checkpoints(checkpoint2, checkpoint3)
2093 .await
2094 .unwrap()
2095 );
2096 }
2097
2098 #[gpui::test]
2099 async fn test_checkpoint_exclude_binary_files(cx: &mut TestAppContext) {
2100 cx.executor().allow_parking();
2101
2102 let repo_dir = tempfile::tempdir().unwrap();
2103 let text_path = repo_dir.path().join("main.rs");
2104 let bin_path = repo_dir.path().join("binary.o");
2105
2106 git2::Repository::init(repo_dir.path()).unwrap();
2107
2108 smol::fs::write(&text_path, "fn main() {}").await.unwrap();
2109
2110 smol::fs::write(&bin_path, "some binary file here")
2111 .await
2112 .unwrap();
2113
2114 let repo =
2115 RealGitRepository::new(&repo_dir.path().join(".git"), None, cx.executor()).unwrap();
2116
2117 // initial commit
2118 repo.stage_paths(
2119 vec![RepoPath::from_str("main.rs")],
2120 Arc::new(HashMap::default()),
2121 )
2122 .await
2123 .unwrap();
2124 repo.commit(
2125 "Initial commit".into(),
2126 None,
2127 CommitOptions::default(),
2128 Arc::new(checkpoint_author_envs()),
2129 )
2130 .await
2131 .unwrap();
2132
2133 let checkpoint = repo.checkpoint().await.unwrap();
2134
2135 smol::fs::write(&text_path, "fn main() { println!(\"Modified\"); }")
2136 .await
2137 .unwrap();
2138 smol::fs::write(&bin_path, "Modified binary file")
2139 .await
2140 .unwrap();
2141
2142 repo.restore_checkpoint(checkpoint).await.unwrap();
2143
2144 // Text files should be restored to checkpoint state,
2145 // but binaries should not (they aren't tracked)
2146 assert_eq!(
2147 smol::fs::read_to_string(&text_path).await.unwrap(),
2148 "fn main() {}"
2149 );
2150
2151 assert_eq!(
2152 smol::fs::read_to_string(&bin_path).await.unwrap(),
2153 "Modified binary file"
2154 );
2155 }
2156
2157 #[test]
2158 fn test_branches_parsing() {
2159 // suppress "help: octal escapes are not supported, `\0` is always null"
2160 #[allow(clippy::octal_escapes)]
2161 let input = "*\0060964da10574cd9bf06463a53bf6e0769c5c45e\0\0refs/heads/zed-patches\0refs/remotes/origin/zed-patches\0\01733187470\0generated protobuf\n";
2162 assert_eq!(
2163 parse_branch_input(&input).unwrap(),
2164 vec![Branch {
2165 is_head: true,
2166 ref_name: "refs/heads/zed-patches".into(),
2167 upstream: Some(Upstream {
2168 ref_name: "refs/remotes/origin/zed-patches".into(),
2169 tracking: UpstreamTracking::Tracked(UpstreamTrackingStatus {
2170 ahead: 0,
2171 behind: 0
2172 })
2173 }),
2174 most_recent_commit: Some(CommitSummary {
2175 sha: "060964da10574cd9bf06463a53bf6e0769c5c45e".into(),
2176 subject: "generated protobuf".into(),
2177 commit_timestamp: 1733187470,
2178 has_parent: false,
2179 })
2180 }]
2181 )
2182 }
2183
2184 impl RealGitRepository {
2185 /// Force a Git garbage collection on the repository.
2186 fn gc(&self) -> BoxFuture<Result<()>> {
2187 let working_directory = self.working_directory();
2188 let git_binary_path = self.git_binary_path.clone();
2189 let executor = self.executor.clone();
2190 self.executor
2191 .spawn(async move {
2192 let git_binary_path = git_binary_path.clone();
2193 let working_directory = working_directory?;
2194 let git = GitBinary::new(git_binary_path, working_directory, executor);
2195 git.run(&["gc", "--prune"]).await?;
2196 Ok(())
2197 })
2198 .boxed()
2199 }
2200 }
2201}