1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4
5use crate::{
6 ProjectEnvironment, ProjectItem, ProjectPath,
7 buffer_store::{BufferStore, BufferStoreEvent},
8 worktree_store::{WorktreeStore, WorktreeStoreEvent},
9};
10use anyhow::{Context as _, Result, anyhow, bail};
11use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
12use buffer_diff::{BufferDiff, BufferDiffEvent};
13use client::ProjectId;
14use collections::HashMap;
15pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
16use fs::Fs;
17use futures::{
18 FutureExt, StreamExt,
19 channel::{mpsc, oneshot},
20 future::{self, Shared},
21 stream::FuturesOrdered,
22};
23use git::{
24 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
25 blame::Blame,
26 parse_git_remote_url,
27 repository::{
28 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
29 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
30 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
31 },
32 stash::{GitStash, StashEntry},
33 status::{
34 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
35 UnmergedStatus, UnmergedStatusCode,
36 },
37};
38use gpui::{
39 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
40 WeakEntity,
41};
42use language::{
43 Buffer, BufferEvent, Language, LanguageRegistry,
44 proto::{deserialize_version, serialize_version},
45};
46use parking_lot::Mutex;
47use postage::stream::Stream as _;
48use rpc::{
49 AnyProtoClient, TypedEnvelope,
50 proto::{self, git_reset, split_repository_update},
51};
52use serde::Deserialize;
53use std::{
54 cmp::Ordering,
55 collections::{BTreeSet, VecDeque},
56 future::Future,
57 mem,
58 ops::Range,
59 path::{Path, PathBuf},
60 str::FromStr,
61 sync::{
62 Arc,
63 atomic::{self, AtomicU64},
64 },
65 time::Instant,
66};
67use sum_tree::{Edit, SumTree, TreeSet};
68use task::Shell;
69use text::{Bias, BufferId};
70use util::{
71 ResultExt, debug_panic,
72 paths::{PathStyle, SanitizedPath},
73 post_inc,
74 rel_path::RelPath,
75};
76use worktree::{
77 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
78 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
79};
80use zeroize::Zeroize;
81
82pub struct GitStore {
83 state: GitStoreState,
84 buffer_store: Entity<BufferStore>,
85 worktree_store: Entity<WorktreeStore>,
86 repositories: HashMap<RepositoryId, Entity<Repository>>,
87 active_repo_id: Option<RepositoryId>,
88 #[allow(clippy::type_complexity)]
89 loading_diffs:
90 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
91 diffs: HashMap<BufferId, Entity<BufferGitState>>,
92 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
93 _subscriptions: Vec<Subscription>,
94}
95
96#[derive(Default)]
97struct SharedDiffs {
98 unstaged: Option<Entity<BufferDiff>>,
99 uncommitted: Option<Entity<BufferDiff>>,
100}
101
102struct BufferGitState {
103 unstaged_diff: Option<WeakEntity<BufferDiff>>,
104 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
105 conflict_set: Option<WeakEntity<ConflictSet>>,
106 recalculate_diff_task: Option<Task<Result<()>>>,
107 reparse_conflict_markers_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 language_registry: Option<Arc<LanguageRegistry>>,
110 conflict_updated_futures: Vec<oneshot::Sender<()>>,
111 recalculating_tx: postage::watch::Sender<bool>,
112
113 /// These operation counts are used to ensure that head and index text
114 /// values read from the git repository are up-to-date with any hunk staging
115 /// operations that have been performed on the BufferDiff.
116 ///
117 /// The operation count is incremented immediately when the user initiates a
118 /// hunk stage/unstage operation. Then, upon finishing writing the new index
119 /// text do disk, the `operation count as of write` is updated to reflect
120 /// the operation count that prompted the write.
121 hunk_staging_operation_count: usize,
122 hunk_staging_operation_count_as_of_write: usize,
123
124 head_text: Option<Arc<String>>,
125 index_text: Option<Arc<String>>,
126 head_changed: bool,
127 index_changed: bool,
128 language_changed: bool,
129}
130
131#[derive(Clone, Debug)]
132enum DiffBasesChange {
133 SetIndex(Option<String>),
134 SetHead(Option<String>),
135 SetEach {
136 index: Option<String>,
137 head: Option<String>,
138 },
139 SetBoth(Option<String>),
140}
141
142#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
143enum DiffKind {
144 Unstaged,
145 Uncommitted,
146}
147
148enum GitStoreState {
149 Local {
150 next_repository_id: Arc<AtomicU64>,
151 downstream: Option<LocalDownstreamState>,
152 project_environment: Entity<ProjectEnvironment>,
153 fs: Arc<dyn Fs>,
154 },
155 Remote {
156 upstream_client: AnyProtoClient,
157 upstream_project_id: u64,
158 downstream: Option<(AnyProtoClient, ProjectId)>,
159 },
160}
161
162enum DownstreamUpdate {
163 UpdateRepository(RepositorySnapshot),
164 RemoveRepository(RepositoryId),
165}
166
167struct LocalDownstreamState {
168 client: AnyProtoClient,
169 project_id: ProjectId,
170 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
171 _task: Task<Result<()>>,
172}
173
174#[derive(Clone, Debug)]
175pub struct GitStoreCheckpoint {
176 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
177}
178
179#[derive(Clone, Debug, PartialEq, Eq)]
180pub struct StatusEntry {
181 pub repo_path: RepoPath,
182 pub status: FileStatus,
183}
184
185impl StatusEntry {
186 fn to_proto(&self) -> proto::StatusEntry {
187 let simple_status = match self.status {
188 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
189 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
190 FileStatus::Tracked(TrackedStatus {
191 index_status,
192 worktree_status,
193 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
194 worktree_status
195 } else {
196 index_status
197 }),
198 };
199
200 proto::StatusEntry {
201 repo_path: self.repo_path.to_proto(),
202 simple_status,
203 status: Some(status_to_proto(self.status)),
204 }
205 }
206}
207
208impl TryFrom<proto::StatusEntry> for StatusEntry {
209 type Error = anyhow::Error;
210
211 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
212 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
213 let status = status_from_proto(value.simple_status, value.status)?;
214 Ok(Self { repo_path, status })
215 }
216}
217
218impl sum_tree::Item for StatusEntry {
219 type Summary = PathSummary<GitSummary>;
220
221 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
222 PathSummary {
223 max_path: self.repo_path.0.clone(),
224 item_summary: self.status.summary(),
225 }
226 }
227}
228
229impl sum_tree::KeyedItem for StatusEntry {
230 type Key = PathKey;
231
232 fn key(&self) -> Self::Key {
233 PathKey(self.repo_path.0.clone())
234 }
235}
236
237#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
238pub struct RepositoryId(pub u64);
239
240#[derive(Clone, Debug, Default, PartialEq, Eq)]
241pub struct MergeDetails {
242 pub conflicted_paths: TreeSet<RepoPath>,
243 pub message: Option<SharedString>,
244 pub heads: Vec<Option<SharedString>>,
245}
246
247#[derive(Clone, Debug, PartialEq, Eq)]
248pub struct RepositorySnapshot {
249 pub id: RepositoryId,
250 pub statuses_by_path: SumTree<StatusEntry>,
251 pub work_directory_abs_path: Arc<Path>,
252 pub path_style: PathStyle,
253 pub branch: Option<Branch>,
254 pub head_commit: Option<CommitDetails>,
255 pub scan_id: u64,
256 pub merge: MergeDetails,
257 pub remote_origin_url: Option<String>,
258 pub remote_upstream_url: Option<String>,
259 pub stash_entries: GitStash,
260}
261
262type JobId = u64;
263
264#[derive(Clone, Debug, PartialEq, Eq)]
265pub struct JobInfo {
266 pub start: Instant,
267 pub message: SharedString,
268}
269
270pub struct Repository {
271 this: WeakEntity<Self>,
272 snapshot: RepositorySnapshot,
273 commit_message_buffer: Option<Entity<Buffer>>,
274 git_store: WeakEntity<GitStore>,
275 // For a local repository, holds paths that have had worktree events since the last status scan completed,
276 // and that should be examined during the next status scan.
277 paths_needing_status_update: BTreeSet<RepoPath>,
278 job_sender: mpsc::UnboundedSender<GitJob>,
279 active_jobs: HashMap<JobId, JobInfo>,
280 job_id: JobId,
281 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
282 latest_askpass_id: u64,
283}
284
285impl std::ops::Deref for Repository {
286 type Target = RepositorySnapshot;
287
288 fn deref(&self) -> &Self::Target {
289 &self.snapshot
290 }
291}
292
293#[derive(Clone)]
294pub enum RepositoryState {
295 Local {
296 backend: Arc<dyn GitRepository>,
297 environment: Arc<HashMap<String, String>>,
298 },
299 Remote {
300 project_id: ProjectId,
301 client: AnyProtoClient,
302 },
303}
304
305#[derive(Clone, Debug, PartialEq, Eq)]
306pub enum RepositoryEvent {
307 StatusesChanged {
308 // TODO could report which statuses changed here
309 full_scan: bool,
310 },
311 MergeHeadsChanged,
312 BranchChanged,
313 StashEntriesChanged,
314}
315
316#[derive(Clone, Debug)]
317pub struct JobsUpdated;
318
319#[derive(Debug)]
320pub enum GitStoreEvent {
321 ActiveRepositoryChanged(Option<RepositoryId>),
322 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
323 RepositoryAdded,
324 RepositoryRemoved(RepositoryId),
325 IndexWriteError(anyhow::Error),
326 JobsUpdated,
327 ConflictsUpdated,
328}
329
330impl EventEmitter<RepositoryEvent> for Repository {}
331impl EventEmitter<JobsUpdated> for Repository {}
332impl EventEmitter<GitStoreEvent> for GitStore {}
333
334pub struct GitJob {
335 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
336 key: Option<GitJobKey>,
337}
338
339#[derive(PartialEq, Eq)]
340enum GitJobKey {
341 WriteIndex(RepoPath),
342 ReloadBufferDiffBases,
343 RefreshStatuses,
344 ReloadGitState,
345}
346
347impl GitStore {
348 pub fn local(
349 worktree_store: &Entity<WorktreeStore>,
350 buffer_store: Entity<BufferStore>,
351 environment: Entity<ProjectEnvironment>,
352 fs: Arc<dyn Fs>,
353 cx: &mut Context<Self>,
354 ) -> Self {
355 Self::new(
356 worktree_store.clone(),
357 buffer_store,
358 GitStoreState::Local {
359 next_repository_id: Arc::new(AtomicU64::new(1)),
360 downstream: None,
361 project_environment: environment,
362 fs,
363 },
364 cx,
365 )
366 }
367
368 pub fn remote(
369 worktree_store: &Entity<WorktreeStore>,
370 buffer_store: Entity<BufferStore>,
371 upstream_client: AnyProtoClient,
372 project_id: u64,
373 cx: &mut Context<Self>,
374 ) -> Self {
375 Self::new(
376 worktree_store.clone(),
377 buffer_store,
378 GitStoreState::Remote {
379 upstream_client,
380 upstream_project_id: project_id,
381 downstream: None,
382 },
383 cx,
384 )
385 }
386
387 fn new(
388 worktree_store: Entity<WorktreeStore>,
389 buffer_store: Entity<BufferStore>,
390 state: GitStoreState,
391 cx: &mut Context<Self>,
392 ) -> Self {
393 let _subscriptions = vec![
394 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
395 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
396 ];
397
398 GitStore {
399 state,
400 buffer_store,
401 worktree_store,
402 repositories: HashMap::default(),
403 active_repo_id: None,
404 _subscriptions,
405 loading_diffs: HashMap::default(),
406 shared_diffs: HashMap::default(),
407 diffs: HashMap::default(),
408 }
409 }
410
411 pub fn init(client: &AnyProtoClient) {
412 client.add_entity_request_handler(Self::handle_get_remotes);
413 client.add_entity_request_handler(Self::handle_get_branches);
414 client.add_entity_request_handler(Self::handle_get_default_branch);
415 client.add_entity_request_handler(Self::handle_change_branch);
416 client.add_entity_request_handler(Self::handle_create_branch);
417 client.add_entity_request_handler(Self::handle_rename_branch);
418 client.add_entity_request_handler(Self::handle_git_init);
419 client.add_entity_request_handler(Self::handle_push);
420 client.add_entity_request_handler(Self::handle_pull);
421 client.add_entity_request_handler(Self::handle_fetch);
422 client.add_entity_request_handler(Self::handle_stage);
423 client.add_entity_request_handler(Self::handle_unstage);
424 client.add_entity_request_handler(Self::handle_stash);
425 client.add_entity_request_handler(Self::handle_stash_pop);
426 client.add_entity_request_handler(Self::handle_stash_apply);
427 client.add_entity_request_handler(Self::handle_stash_drop);
428 client.add_entity_request_handler(Self::handle_commit);
429 client.add_entity_request_handler(Self::handle_reset);
430 client.add_entity_request_handler(Self::handle_show);
431 client.add_entity_request_handler(Self::handle_load_commit_diff);
432 client.add_entity_request_handler(Self::handle_checkout_files);
433 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
434 client.add_entity_request_handler(Self::handle_set_index_text);
435 client.add_entity_request_handler(Self::handle_askpass);
436 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
437 client.add_entity_request_handler(Self::handle_git_diff);
438 client.add_entity_request_handler(Self::handle_tree_diff);
439 client.add_entity_request_handler(Self::handle_get_blob_content);
440 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
441 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
442 client.add_entity_message_handler(Self::handle_update_diff_bases);
443 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
444 client.add_entity_request_handler(Self::handle_blame_buffer);
445 client.add_entity_message_handler(Self::handle_update_repository);
446 client.add_entity_message_handler(Self::handle_remove_repository);
447 client.add_entity_request_handler(Self::handle_git_clone);
448 client.add_entity_request_handler(Self::handle_get_worktrees);
449 client.add_entity_request_handler(Self::handle_create_worktree);
450 }
451
452 pub fn is_local(&self) -> bool {
453 matches!(self.state, GitStoreState::Local { .. })
454 }
455 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
456 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
457 let id = repo.read(cx).id;
458 if self.active_repo_id != Some(id) {
459 self.active_repo_id = Some(id);
460 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
461 }
462 }
463 }
464
465 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
466 match &mut self.state {
467 GitStoreState::Remote {
468 downstream: downstream_client,
469 ..
470 } => {
471 for repo in self.repositories.values() {
472 let update = repo.read(cx).snapshot.initial_update(project_id);
473 for update in split_repository_update(update) {
474 client.send(update).log_err();
475 }
476 }
477 *downstream_client = Some((client, ProjectId(project_id)));
478 }
479 GitStoreState::Local {
480 downstream: downstream_client,
481 ..
482 } => {
483 let mut snapshots = HashMap::default();
484 let (updates_tx, mut updates_rx) = mpsc::unbounded();
485 for repo in self.repositories.values() {
486 updates_tx
487 .unbounded_send(DownstreamUpdate::UpdateRepository(
488 repo.read(cx).snapshot.clone(),
489 ))
490 .ok();
491 }
492 *downstream_client = Some(LocalDownstreamState {
493 client: client.clone(),
494 project_id: ProjectId(project_id),
495 updates_tx,
496 _task: cx.spawn(async move |this, cx| {
497 cx.background_spawn(async move {
498 while let Some(update) = updates_rx.next().await {
499 match update {
500 DownstreamUpdate::UpdateRepository(snapshot) => {
501 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
502 {
503 let update =
504 snapshot.build_update(old_snapshot, project_id);
505 *old_snapshot = snapshot;
506 for update in split_repository_update(update) {
507 client.send(update)?;
508 }
509 } else {
510 let update = snapshot.initial_update(project_id);
511 for update in split_repository_update(update) {
512 client.send(update)?;
513 }
514 snapshots.insert(snapshot.id, snapshot);
515 }
516 }
517 DownstreamUpdate::RemoveRepository(id) => {
518 client.send(proto::RemoveRepository {
519 project_id,
520 id: id.to_proto(),
521 })?;
522 }
523 }
524 }
525 anyhow::Ok(())
526 })
527 .await
528 .ok();
529 this.update(cx, |this, _| {
530 if let GitStoreState::Local {
531 downstream: downstream_client,
532 ..
533 } = &mut this.state
534 {
535 downstream_client.take();
536 } else {
537 unreachable!("unshared called on remote store");
538 }
539 })
540 }),
541 });
542 }
543 }
544 }
545
546 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
547 match &mut self.state {
548 GitStoreState::Local {
549 downstream: downstream_client,
550 ..
551 } => {
552 downstream_client.take();
553 }
554 GitStoreState::Remote {
555 downstream: downstream_client,
556 ..
557 } => {
558 downstream_client.take();
559 }
560 }
561 self.shared_diffs.clear();
562 }
563
564 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
565 self.shared_diffs.remove(peer_id);
566 }
567
568 pub fn active_repository(&self) -> Option<Entity<Repository>> {
569 self.active_repo_id
570 .as_ref()
571 .map(|id| self.repositories[id].clone())
572 }
573
574 pub fn open_unstaged_diff(
575 &mut self,
576 buffer: Entity<Buffer>,
577 cx: &mut Context<Self>,
578 ) -> Task<Result<Entity<BufferDiff>>> {
579 let buffer_id = buffer.read(cx).remote_id();
580 if let Some(diff_state) = self.diffs.get(&buffer_id)
581 && let Some(unstaged_diff) = diff_state
582 .read(cx)
583 .unstaged_diff
584 .as_ref()
585 .and_then(|weak| weak.upgrade())
586 {
587 if let Some(task) =
588 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
589 {
590 return cx.background_executor().spawn(async move {
591 task.await;
592 Ok(unstaged_diff)
593 });
594 }
595 return Task::ready(Ok(unstaged_diff));
596 }
597
598 let Some((repo, repo_path)) =
599 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
600 else {
601 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
602 };
603
604 let task = self
605 .loading_diffs
606 .entry((buffer_id, DiffKind::Unstaged))
607 .or_insert_with(|| {
608 let staged_text = repo.update(cx, |repo, cx| {
609 repo.load_staged_text(buffer_id, repo_path, cx)
610 });
611 cx.spawn(async move |this, cx| {
612 Self::open_diff_internal(
613 this,
614 DiffKind::Unstaged,
615 staged_text.await.map(DiffBasesChange::SetIndex),
616 buffer,
617 cx,
618 )
619 .await
620 .map_err(Arc::new)
621 })
622 .shared()
623 })
624 .clone();
625
626 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
627 }
628
629 pub fn open_diff_since(
630 &mut self,
631 oid: Option<git::Oid>,
632 buffer: Entity<Buffer>,
633 repo: Entity<Repository>,
634 languages: Arc<LanguageRegistry>,
635 cx: &mut Context<Self>,
636 ) -> Task<Result<Entity<BufferDiff>>> {
637 cx.spawn(async move |this, cx| {
638 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
639 let content = match oid {
640 None => None,
641 Some(oid) => Some(
642 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
643 .await?,
644 ),
645 };
646 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
647
648 buffer_diff
649 .update(cx, |buffer_diff, cx| {
650 buffer_diff.set_base_text(
651 content.map(Arc::new),
652 buffer_snapshot.language().cloned(),
653 Some(languages.clone()),
654 buffer_snapshot.text,
655 cx,
656 )
657 })?
658 .await?;
659 let unstaged_diff = this
660 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
661 .await?;
662 buffer_diff.update(cx, |buffer_diff, _| {
663 buffer_diff.set_secondary_diff(unstaged_diff);
664 })?;
665
666 this.update(cx, |_, cx| {
667 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
668 .detach();
669 })?;
670
671 Ok(buffer_diff)
672 })
673 }
674
675 pub fn open_uncommitted_diff(
676 &mut self,
677 buffer: Entity<Buffer>,
678 cx: &mut Context<Self>,
679 ) -> Task<Result<Entity<BufferDiff>>> {
680 let buffer_id = buffer.read(cx).remote_id();
681
682 if let Some(diff_state) = self.diffs.get(&buffer_id)
683 && let Some(uncommitted_diff) = diff_state
684 .read(cx)
685 .uncommitted_diff
686 .as_ref()
687 .and_then(|weak| weak.upgrade())
688 {
689 if let Some(task) =
690 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
691 {
692 return cx.background_executor().spawn(async move {
693 task.await;
694 Ok(uncommitted_diff)
695 });
696 }
697 return Task::ready(Ok(uncommitted_diff));
698 }
699
700 let Some((repo, repo_path)) =
701 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
702 else {
703 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
704 };
705
706 let task = self
707 .loading_diffs
708 .entry((buffer_id, DiffKind::Uncommitted))
709 .or_insert_with(|| {
710 let changes = repo.update(cx, |repo, cx| {
711 repo.load_committed_text(buffer_id, repo_path, cx)
712 });
713
714 // todo(lw): hot foreground spawn
715 cx.spawn(async move |this, cx| {
716 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
717 .await
718 .map_err(Arc::new)
719 })
720 .shared()
721 })
722 .clone();
723
724 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
725 }
726
727 async fn open_diff_internal(
728 this: WeakEntity<Self>,
729 kind: DiffKind,
730 texts: Result<DiffBasesChange>,
731 buffer_entity: Entity<Buffer>,
732 cx: &mut AsyncApp,
733 ) -> Result<Entity<BufferDiff>> {
734 let diff_bases_change = match texts {
735 Err(e) => {
736 this.update(cx, |this, cx| {
737 let buffer = buffer_entity.read(cx);
738 let buffer_id = buffer.remote_id();
739 this.loading_diffs.remove(&(buffer_id, kind));
740 })?;
741 return Err(e);
742 }
743 Ok(change) => change,
744 };
745
746 this.update(cx, |this, cx| {
747 let buffer = buffer_entity.read(cx);
748 let buffer_id = buffer.remote_id();
749 let language = buffer.language().cloned();
750 let language_registry = buffer.language_registry();
751 let text_snapshot = buffer.text_snapshot();
752 this.loading_diffs.remove(&(buffer_id, kind));
753
754 let git_store = cx.weak_entity();
755 let diff_state = this
756 .diffs
757 .entry(buffer_id)
758 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
759
760 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
761
762 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
763 diff_state.update(cx, |diff_state, cx| {
764 diff_state.language = language;
765 diff_state.language_registry = language_registry;
766
767 match kind {
768 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
769 DiffKind::Uncommitted => {
770 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
771 diff
772 } else {
773 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
774 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
775 unstaged_diff
776 };
777
778 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
779 diff_state.uncommitted_diff = Some(diff.downgrade())
780 }
781 }
782
783 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
784 let rx = diff_state.wait_for_recalculation();
785
786 anyhow::Ok(async move {
787 if let Some(rx) = rx {
788 rx.await;
789 }
790 Ok(diff)
791 })
792 })
793 })??
794 .await
795 }
796
797 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
798 let diff_state = self.diffs.get(&buffer_id)?;
799 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
800 }
801
802 pub fn get_uncommitted_diff(
803 &self,
804 buffer_id: BufferId,
805 cx: &App,
806 ) -> Option<Entity<BufferDiff>> {
807 let diff_state = self.diffs.get(&buffer_id)?;
808 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
809 }
810
811 pub fn open_conflict_set(
812 &mut self,
813 buffer: Entity<Buffer>,
814 cx: &mut Context<Self>,
815 ) -> Entity<ConflictSet> {
816 log::debug!("open conflict set");
817 let buffer_id = buffer.read(cx).remote_id();
818
819 if let Some(git_state) = self.diffs.get(&buffer_id)
820 && let Some(conflict_set) = git_state
821 .read(cx)
822 .conflict_set
823 .as_ref()
824 .and_then(|weak| weak.upgrade())
825 {
826 let conflict_set = conflict_set;
827 let buffer_snapshot = buffer.read(cx).text_snapshot();
828
829 git_state.update(cx, |state, cx| {
830 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
831 });
832
833 return conflict_set;
834 }
835
836 let is_unmerged = self
837 .repository_and_path_for_buffer_id(buffer_id, cx)
838 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
839 let git_store = cx.weak_entity();
840 let buffer_git_state = self
841 .diffs
842 .entry(buffer_id)
843 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
844 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
845
846 self._subscriptions
847 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
848 cx.emit(GitStoreEvent::ConflictsUpdated);
849 }));
850
851 buffer_git_state.update(cx, |state, cx| {
852 state.conflict_set = Some(conflict_set.downgrade());
853 let buffer_snapshot = buffer.read(cx).text_snapshot();
854 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
855 });
856
857 conflict_set
858 }
859
860 pub fn project_path_git_status(
861 &self,
862 project_path: &ProjectPath,
863 cx: &App,
864 ) -> Option<FileStatus> {
865 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
866 Some(repo.read(cx).status_for_path(&repo_path)?.status)
867 }
868
869 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
870 let mut work_directory_abs_paths = Vec::new();
871 let mut checkpoints = Vec::new();
872 for repository in self.repositories.values() {
873 repository.update(cx, |repository, _| {
874 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
875 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
876 });
877 }
878
879 cx.background_executor().spawn(async move {
880 let checkpoints = future::try_join_all(checkpoints).await?;
881 Ok(GitStoreCheckpoint {
882 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
883 .into_iter()
884 .zip(checkpoints)
885 .collect(),
886 })
887 })
888 }
889
890 pub fn restore_checkpoint(
891 &self,
892 checkpoint: GitStoreCheckpoint,
893 cx: &mut App,
894 ) -> Task<Result<()>> {
895 let repositories_by_work_dir_abs_path = self
896 .repositories
897 .values()
898 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
899 .collect::<HashMap<_, _>>();
900
901 let mut tasks = Vec::new();
902 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
903 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
904 let restore = repository.update(cx, |repository, _| {
905 repository.restore_checkpoint(checkpoint)
906 });
907 tasks.push(async move { restore.await? });
908 }
909 }
910 cx.background_spawn(async move {
911 future::try_join_all(tasks).await?;
912 Ok(())
913 })
914 }
915
916 /// Compares two checkpoints, returning true if they are equal.
917 pub fn compare_checkpoints(
918 &self,
919 left: GitStoreCheckpoint,
920 mut right: GitStoreCheckpoint,
921 cx: &mut App,
922 ) -> Task<Result<bool>> {
923 let repositories_by_work_dir_abs_path = self
924 .repositories
925 .values()
926 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
927 .collect::<HashMap<_, _>>();
928
929 let mut tasks = Vec::new();
930 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
931 if let Some(right_checkpoint) = right
932 .checkpoints_by_work_dir_abs_path
933 .remove(&work_dir_abs_path)
934 {
935 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
936 {
937 let compare = repository.update(cx, |repository, _| {
938 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
939 });
940
941 tasks.push(async move { compare.await? });
942 }
943 } else {
944 return Task::ready(Ok(false));
945 }
946 }
947 cx.background_spawn(async move {
948 Ok(future::try_join_all(tasks)
949 .await?
950 .into_iter()
951 .all(|result| result))
952 })
953 }
954
955 /// Blames a buffer.
956 pub fn blame_buffer(
957 &self,
958 buffer: &Entity<Buffer>,
959 version: Option<clock::Global>,
960 cx: &mut App,
961 ) -> Task<Result<Option<Blame>>> {
962 let buffer = buffer.read(cx);
963 let Some((repo, repo_path)) =
964 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
965 else {
966 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
967 };
968 let content = match &version {
969 Some(version) => buffer.rope_for_version(version),
970 None => buffer.as_rope().clone(),
971 };
972 let version = version.unwrap_or(buffer.version());
973 let buffer_id = buffer.remote_id();
974
975 let rx = repo.update(cx, |repo, _| {
976 repo.send_job(None, move |state, _| async move {
977 match state {
978 RepositoryState::Local { backend, .. } => backend
979 .blame(repo_path.clone(), content)
980 .await
981 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
982 .map(Some),
983 RepositoryState::Remote { project_id, client } => {
984 let response = client
985 .request(proto::BlameBuffer {
986 project_id: project_id.to_proto(),
987 buffer_id: buffer_id.into(),
988 version: serialize_version(&version),
989 })
990 .await?;
991 Ok(deserialize_blame_buffer_response(response))
992 }
993 }
994 })
995 });
996
997 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
998 }
999
1000 pub fn get_permalink_to_line(
1001 &self,
1002 buffer: &Entity<Buffer>,
1003 selection: Range<u32>,
1004 cx: &mut App,
1005 ) -> Task<Result<url::Url>> {
1006 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1007 return Task::ready(Err(anyhow!("buffer has no file")));
1008 };
1009
1010 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1011 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1012 cx,
1013 ) else {
1014 // If we're not in a Git repo, check whether this is a Rust source
1015 // file in the Cargo registry (presumably opened with go-to-definition
1016 // from a normal Rust file). If so, we can put together a permalink
1017 // using crate metadata.
1018 if buffer
1019 .read(cx)
1020 .language()
1021 .is_none_or(|lang| lang.name() != "Rust".into())
1022 {
1023 return Task::ready(Err(anyhow!("no permalink available")));
1024 }
1025 let file_path = file.worktree.read(cx).absolutize(&file.path);
1026 return cx.spawn(async move |cx| {
1027 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1028 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1029 .context("no permalink available")
1030 });
1031 };
1032
1033 let buffer_id = buffer.read(cx).remote_id();
1034 let branch = repo.read(cx).branch.clone();
1035 let remote = branch
1036 .as_ref()
1037 .and_then(|b| b.upstream.as_ref())
1038 .and_then(|b| b.remote_name())
1039 .unwrap_or("origin")
1040 .to_string();
1041
1042 let rx = repo.update(cx, |repo, _| {
1043 repo.send_job(None, move |state, cx| async move {
1044 match state {
1045 RepositoryState::Local { backend, .. } => {
1046 let origin_url = backend
1047 .remote_url(&remote)
1048 .with_context(|| format!("remote \"{remote}\" not found"))?;
1049
1050 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1051
1052 let provider_registry =
1053 cx.update(GitHostingProviderRegistry::default_global)?;
1054
1055 let (provider, remote) =
1056 parse_git_remote_url(provider_registry, &origin_url)
1057 .context("parsing Git remote URL")?;
1058
1059 Ok(provider.build_permalink(
1060 remote,
1061 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1062 ))
1063 }
1064 RepositoryState::Remote { project_id, client } => {
1065 let response = client
1066 .request(proto::GetPermalinkToLine {
1067 project_id: project_id.to_proto(),
1068 buffer_id: buffer_id.into(),
1069 selection: Some(proto::Range {
1070 start: selection.start as u64,
1071 end: selection.end as u64,
1072 }),
1073 })
1074 .await?;
1075
1076 url::Url::parse(&response.permalink).context("failed to parse permalink")
1077 }
1078 }
1079 })
1080 });
1081 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1082 }
1083
1084 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1085 match &self.state {
1086 GitStoreState::Local {
1087 downstream: downstream_client,
1088 ..
1089 } => downstream_client
1090 .as_ref()
1091 .map(|state| (state.client.clone(), state.project_id)),
1092 GitStoreState::Remote {
1093 downstream: downstream_client,
1094 ..
1095 } => downstream_client.clone(),
1096 }
1097 }
1098
1099 fn upstream_client(&self) -> Option<AnyProtoClient> {
1100 match &self.state {
1101 GitStoreState::Local { .. } => None,
1102 GitStoreState::Remote {
1103 upstream_client, ..
1104 } => Some(upstream_client.clone()),
1105 }
1106 }
1107
1108 fn on_worktree_store_event(
1109 &mut self,
1110 worktree_store: Entity<WorktreeStore>,
1111 event: &WorktreeStoreEvent,
1112 cx: &mut Context<Self>,
1113 ) {
1114 let GitStoreState::Local {
1115 project_environment,
1116 downstream,
1117 next_repository_id,
1118 fs,
1119 } = &self.state
1120 else {
1121 return;
1122 };
1123
1124 match event {
1125 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1126 if let Some(worktree) = self
1127 .worktree_store
1128 .read(cx)
1129 .worktree_for_id(*worktree_id, cx)
1130 {
1131 let paths_by_git_repo =
1132 self.process_updated_entries(&worktree, updated_entries, cx);
1133 let downstream = downstream
1134 .as_ref()
1135 .map(|downstream| downstream.updates_tx.clone());
1136 cx.spawn(async move |_, cx| {
1137 let paths_by_git_repo = paths_by_git_repo.await;
1138 for (repo, paths) in paths_by_git_repo {
1139 repo.update(cx, |repo, cx| {
1140 repo.paths_changed(paths, downstream.clone(), cx);
1141 })
1142 .ok();
1143 }
1144 })
1145 .detach();
1146 }
1147 }
1148 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1149 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1150 else {
1151 return;
1152 };
1153 if !worktree.read(cx).is_visible() {
1154 log::debug!(
1155 "not adding repositories for local worktree {:?} because it's not visible",
1156 worktree.read(cx).abs_path()
1157 );
1158 return;
1159 }
1160 self.update_repositories_from_worktree(
1161 project_environment.clone(),
1162 next_repository_id.clone(),
1163 downstream
1164 .as_ref()
1165 .map(|downstream| downstream.updates_tx.clone()),
1166 changed_repos.clone(),
1167 fs.clone(),
1168 cx,
1169 );
1170 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1171 }
1172 _ => {}
1173 }
1174 }
1175 fn on_repository_event(
1176 &mut self,
1177 repo: Entity<Repository>,
1178 event: &RepositoryEvent,
1179 cx: &mut Context<Self>,
1180 ) {
1181 let id = repo.read(cx).id;
1182 let repo_snapshot = repo.read(cx).snapshot.clone();
1183 for (buffer_id, diff) in self.diffs.iter() {
1184 if let Some((buffer_repo, repo_path)) =
1185 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1186 && buffer_repo == repo
1187 {
1188 diff.update(cx, |diff, cx| {
1189 if let Some(conflict_set) = &diff.conflict_set {
1190 let conflict_status_changed =
1191 conflict_set.update(cx, |conflict_set, cx| {
1192 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1193 conflict_set.set_has_conflict(has_conflict, cx)
1194 })?;
1195 if conflict_status_changed {
1196 let buffer_store = self.buffer_store.read(cx);
1197 if let Some(buffer) = buffer_store.get(*buffer_id) {
1198 let _ = diff
1199 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1200 }
1201 }
1202 }
1203 anyhow::Ok(())
1204 })
1205 .ok();
1206 }
1207 }
1208 cx.emit(GitStoreEvent::RepositoryUpdated(
1209 id,
1210 event.clone(),
1211 self.active_repo_id == Some(id),
1212 ))
1213 }
1214
1215 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1216 cx.emit(GitStoreEvent::JobsUpdated)
1217 }
1218
1219 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1220 fn update_repositories_from_worktree(
1221 &mut self,
1222 project_environment: Entity<ProjectEnvironment>,
1223 next_repository_id: Arc<AtomicU64>,
1224 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1225 updated_git_repositories: UpdatedGitRepositoriesSet,
1226 fs: Arc<dyn Fs>,
1227 cx: &mut Context<Self>,
1228 ) {
1229 let mut removed_ids = Vec::new();
1230 for update in updated_git_repositories.iter() {
1231 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1232 let existing_work_directory_abs_path =
1233 repo.read(cx).work_directory_abs_path.clone();
1234 Some(&existing_work_directory_abs_path)
1235 == update.old_work_directory_abs_path.as_ref()
1236 || Some(&existing_work_directory_abs_path)
1237 == update.new_work_directory_abs_path.as_ref()
1238 }) {
1239 if let Some(new_work_directory_abs_path) =
1240 update.new_work_directory_abs_path.clone()
1241 {
1242 existing.update(cx, |existing, cx| {
1243 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1244 existing.schedule_scan(updates_tx.clone(), cx);
1245 });
1246 } else {
1247 removed_ids.push(*id);
1248 }
1249 } else if let UpdatedGitRepository {
1250 new_work_directory_abs_path: Some(work_directory_abs_path),
1251 dot_git_abs_path: Some(dot_git_abs_path),
1252 repository_dir_abs_path: Some(repository_dir_abs_path),
1253 common_dir_abs_path: Some(common_dir_abs_path),
1254 ..
1255 } = update
1256 {
1257 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1258 let git_store = cx.weak_entity();
1259 let repo = cx.new(|cx| {
1260 let mut repo = Repository::local(
1261 id,
1262 work_directory_abs_path.clone(),
1263 dot_git_abs_path.clone(),
1264 repository_dir_abs_path.clone(),
1265 common_dir_abs_path.clone(),
1266 project_environment.downgrade(),
1267 fs.clone(),
1268 git_store,
1269 cx,
1270 );
1271 repo.schedule_scan(updates_tx.clone(), cx);
1272 repo
1273 });
1274 self._subscriptions
1275 .push(cx.subscribe(&repo, Self::on_repository_event));
1276 self._subscriptions
1277 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1278 self.repositories.insert(id, repo);
1279 cx.emit(GitStoreEvent::RepositoryAdded);
1280 self.active_repo_id.get_or_insert_with(|| {
1281 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1282 id
1283 });
1284 }
1285 }
1286
1287 for id in removed_ids {
1288 if self.active_repo_id == Some(id) {
1289 self.active_repo_id = None;
1290 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1291 }
1292 self.repositories.remove(&id);
1293 if let Some(updates_tx) = updates_tx.as_ref() {
1294 updates_tx
1295 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1296 .ok();
1297 }
1298 }
1299 }
1300
1301 fn on_buffer_store_event(
1302 &mut self,
1303 _: Entity<BufferStore>,
1304 event: &BufferStoreEvent,
1305 cx: &mut Context<Self>,
1306 ) {
1307 match event {
1308 BufferStoreEvent::BufferAdded(buffer) => {
1309 cx.subscribe(buffer, |this, buffer, event, cx| {
1310 if let BufferEvent::LanguageChanged = event {
1311 let buffer_id = buffer.read(cx).remote_id();
1312 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1313 diff_state.update(cx, |diff_state, cx| {
1314 diff_state.buffer_language_changed(buffer, cx);
1315 });
1316 }
1317 }
1318 })
1319 .detach();
1320 }
1321 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1322 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1323 diffs.remove(buffer_id);
1324 }
1325 }
1326 BufferStoreEvent::BufferDropped(buffer_id) => {
1327 self.diffs.remove(buffer_id);
1328 for diffs in self.shared_diffs.values_mut() {
1329 diffs.remove(buffer_id);
1330 }
1331 }
1332
1333 _ => {}
1334 }
1335 }
1336
1337 pub fn recalculate_buffer_diffs(
1338 &mut self,
1339 buffers: Vec<Entity<Buffer>>,
1340 cx: &mut Context<Self>,
1341 ) -> impl Future<Output = ()> + use<> {
1342 let mut futures = Vec::new();
1343 for buffer in buffers {
1344 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1345 let buffer = buffer.read(cx).text_snapshot();
1346 diff_state.update(cx, |diff_state, cx| {
1347 diff_state.recalculate_diffs(buffer.clone(), cx);
1348 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1349 });
1350 futures.push(diff_state.update(cx, |diff_state, cx| {
1351 diff_state
1352 .reparse_conflict_markers(buffer, cx)
1353 .map(|_| {})
1354 .boxed()
1355 }));
1356 }
1357 }
1358 async move {
1359 futures::future::join_all(futures).await;
1360 }
1361 }
1362
1363 fn on_buffer_diff_event(
1364 &mut self,
1365 diff: Entity<buffer_diff::BufferDiff>,
1366 event: &BufferDiffEvent,
1367 cx: &mut Context<Self>,
1368 ) {
1369 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1370 let buffer_id = diff.read(cx).buffer_id;
1371 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1372 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1373 diff_state.hunk_staging_operation_count += 1;
1374 diff_state.hunk_staging_operation_count
1375 });
1376 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1377 let recv = repo.update(cx, |repo, cx| {
1378 log::debug!("hunks changed for {}", path.as_unix_str());
1379 repo.spawn_set_index_text_job(
1380 path,
1381 new_index_text.as_ref().map(|rope| rope.to_string()),
1382 Some(hunk_staging_operation_count),
1383 cx,
1384 )
1385 });
1386 let diff = diff.downgrade();
1387 cx.spawn(async move |this, cx| {
1388 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1389 diff.update(cx, |diff, cx| {
1390 diff.clear_pending_hunks(cx);
1391 })
1392 .ok();
1393 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1394 .ok();
1395 }
1396 })
1397 .detach();
1398 }
1399 }
1400 }
1401 }
1402
1403 fn local_worktree_git_repos_changed(
1404 &mut self,
1405 worktree: Entity<Worktree>,
1406 changed_repos: &UpdatedGitRepositoriesSet,
1407 cx: &mut Context<Self>,
1408 ) {
1409 log::debug!("local worktree repos changed");
1410 debug_assert!(worktree.read(cx).is_local());
1411
1412 for repository in self.repositories.values() {
1413 repository.update(cx, |repository, cx| {
1414 let repo_abs_path = &repository.work_directory_abs_path;
1415 if changed_repos.iter().any(|update| {
1416 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1417 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1418 }) {
1419 repository.reload_buffer_diff_bases(cx);
1420 }
1421 });
1422 }
1423 }
1424
1425 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1426 &self.repositories
1427 }
1428
1429 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1430 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1431 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1432 Some(status.status)
1433 }
1434
1435 pub fn repository_and_path_for_buffer_id(
1436 &self,
1437 buffer_id: BufferId,
1438 cx: &App,
1439 ) -> Option<(Entity<Repository>, RepoPath)> {
1440 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1441 let project_path = buffer.read(cx).project_path(cx)?;
1442 self.repository_and_path_for_project_path(&project_path, cx)
1443 }
1444
1445 pub fn repository_and_path_for_project_path(
1446 &self,
1447 path: &ProjectPath,
1448 cx: &App,
1449 ) -> Option<(Entity<Repository>, RepoPath)> {
1450 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1451 self.repositories
1452 .values()
1453 .filter_map(|repo| {
1454 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1455 Some((repo.clone(), repo_path))
1456 })
1457 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1458 }
1459
1460 pub fn git_init(
1461 &self,
1462 path: Arc<Path>,
1463 fallback_branch_name: String,
1464 cx: &App,
1465 ) -> Task<Result<()>> {
1466 match &self.state {
1467 GitStoreState::Local { fs, .. } => {
1468 let fs = fs.clone();
1469 cx.background_executor()
1470 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1471 }
1472 GitStoreState::Remote {
1473 upstream_client,
1474 upstream_project_id: project_id,
1475 ..
1476 } => {
1477 let client = upstream_client.clone();
1478 let project_id = *project_id;
1479 cx.background_executor().spawn(async move {
1480 client
1481 .request(proto::GitInit {
1482 project_id: project_id,
1483 abs_path: path.to_string_lossy().into_owned(),
1484 fallback_branch_name,
1485 })
1486 .await?;
1487 Ok(())
1488 })
1489 }
1490 }
1491 }
1492
1493 pub fn git_clone(
1494 &self,
1495 repo: String,
1496 path: impl Into<Arc<std::path::Path>>,
1497 cx: &App,
1498 ) -> Task<Result<()>> {
1499 let path = path.into();
1500 match &self.state {
1501 GitStoreState::Local { fs, .. } => {
1502 let fs = fs.clone();
1503 cx.background_executor()
1504 .spawn(async move { fs.git_clone(&repo, &path).await })
1505 }
1506 GitStoreState::Remote {
1507 upstream_client,
1508 upstream_project_id,
1509 ..
1510 } => {
1511 if upstream_client.is_via_collab() {
1512 return Task::ready(Err(anyhow!(
1513 "Git Clone isn't supported for project guests"
1514 )));
1515 }
1516 let request = upstream_client.request(proto::GitClone {
1517 project_id: *upstream_project_id,
1518 abs_path: path.to_string_lossy().into_owned(),
1519 remote_repo: repo,
1520 });
1521
1522 cx.background_spawn(async move {
1523 let result = request.await?;
1524
1525 match result.success {
1526 true => Ok(()),
1527 false => Err(anyhow!("Git Clone failed")),
1528 }
1529 })
1530 }
1531 }
1532 }
1533
1534 async fn handle_update_repository(
1535 this: Entity<Self>,
1536 envelope: TypedEnvelope<proto::UpdateRepository>,
1537 mut cx: AsyncApp,
1538 ) -> Result<()> {
1539 this.update(&mut cx, |this, cx| {
1540 let path_style = this.worktree_store.read(cx).path_style();
1541 let mut update = envelope.payload;
1542
1543 let id = RepositoryId::from_proto(update.id);
1544 let client = this.upstream_client().context("no upstream client")?;
1545
1546 let mut repo_subscription = None;
1547 let repo = this.repositories.entry(id).or_insert_with(|| {
1548 let git_store = cx.weak_entity();
1549 let repo = cx.new(|cx| {
1550 Repository::remote(
1551 id,
1552 Path::new(&update.abs_path).into(),
1553 path_style,
1554 ProjectId(update.project_id),
1555 client,
1556 git_store,
1557 cx,
1558 )
1559 });
1560 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1561 cx.emit(GitStoreEvent::RepositoryAdded);
1562 repo
1563 });
1564 this._subscriptions.extend(repo_subscription);
1565
1566 repo.update(cx, {
1567 let update = update.clone();
1568 |repo, cx| repo.apply_remote_update(update, cx)
1569 })?;
1570
1571 this.active_repo_id.get_or_insert_with(|| {
1572 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1573 id
1574 });
1575
1576 if let Some((client, project_id)) = this.downstream_client() {
1577 update.project_id = project_id.to_proto();
1578 client.send(update).log_err();
1579 }
1580 Ok(())
1581 })?
1582 }
1583
1584 async fn handle_remove_repository(
1585 this: Entity<Self>,
1586 envelope: TypedEnvelope<proto::RemoveRepository>,
1587 mut cx: AsyncApp,
1588 ) -> Result<()> {
1589 this.update(&mut cx, |this, cx| {
1590 let mut update = envelope.payload;
1591 let id = RepositoryId::from_proto(update.id);
1592 this.repositories.remove(&id);
1593 if let Some((client, project_id)) = this.downstream_client() {
1594 update.project_id = project_id.to_proto();
1595 client.send(update).log_err();
1596 }
1597 if this.active_repo_id == Some(id) {
1598 this.active_repo_id = None;
1599 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1600 }
1601 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1602 })
1603 }
1604
1605 async fn handle_git_init(
1606 this: Entity<Self>,
1607 envelope: TypedEnvelope<proto::GitInit>,
1608 cx: AsyncApp,
1609 ) -> Result<proto::Ack> {
1610 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1611 let name = envelope.payload.fallback_branch_name;
1612 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1613 .await?;
1614
1615 Ok(proto::Ack {})
1616 }
1617
1618 async fn handle_git_clone(
1619 this: Entity<Self>,
1620 envelope: TypedEnvelope<proto::GitClone>,
1621 cx: AsyncApp,
1622 ) -> Result<proto::GitCloneResponse> {
1623 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1624 let repo_name = envelope.payload.remote_repo;
1625 let result = cx
1626 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1627 .await;
1628
1629 Ok(proto::GitCloneResponse {
1630 success: result.is_ok(),
1631 })
1632 }
1633
1634 async fn handle_fetch(
1635 this: Entity<Self>,
1636 envelope: TypedEnvelope<proto::Fetch>,
1637 mut cx: AsyncApp,
1638 ) -> Result<proto::RemoteMessageResponse> {
1639 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1640 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1641 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1642 let askpass_id = envelope.payload.askpass_id;
1643
1644 let askpass = make_remote_delegate(
1645 this,
1646 envelope.payload.project_id,
1647 repository_id,
1648 askpass_id,
1649 &mut cx,
1650 );
1651
1652 let remote_output = repository_handle
1653 .update(&mut cx, |repository_handle, cx| {
1654 repository_handle.fetch(fetch_options, askpass, cx)
1655 })?
1656 .await??;
1657
1658 Ok(proto::RemoteMessageResponse {
1659 stdout: remote_output.stdout,
1660 stderr: remote_output.stderr,
1661 })
1662 }
1663
1664 async fn handle_push(
1665 this: Entity<Self>,
1666 envelope: TypedEnvelope<proto::Push>,
1667 mut cx: AsyncApp,
1668 ) -> Result<proto::RemoteMessageResponse> {
1669 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1670 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1671
1672 let askpass_id = envelope.payload.askpass_id;
1673 let askpass = make_remote_delegate(
1674 this,
1675 envelope.payload.project_id,
1676 repository_id,
1677 askpass_id,
1678 &mut cx,
1679 );
1680
1681 let options = envelope
1682 .payload
1683 .options
1684 .as_ref()
1685 .map(|_| match envelope.payload.options() {
1686 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1687 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1688 });
1689
1690 let branch_name = envelope.payload.branch_name.into();
1691 let remote_name = envelope.payload.remote_name.into();
1692
1693 let remote_output = repository_handle
1694 .update(&mut cx, |repository_handle, cx| {
1695 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1696 })?
1697 .await??;
1698 Ok(proto::RemoteMessageResponse {
1699 stdout: remote_output.stdout,
1700 stderr: remote_output.stderr,
1701 })
1702 }
1703
1704 async fn handle_pull(
1705 this: Entity<Self>,
1706 envelope: TypedEnvelope<proto::Pull>,
1707 mut cx: AsyncApp,
1708 ) -> Result<proto::RemoteMessageResponse> {
1709 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1710 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1711 let askpass_id = envelope.payload.askpass_id;
1712 let askpass = make_remote_delegate(
1713 this,
1714 envelope.payload.project_id,
1715 repository_id,
1716 askpass_id,
1717 &mut cx,
1718 );
1719
1720 let branch_name = envelope.payload.branch_name.into();
1721 let remote_name = envelope.payload.remote_name.into();
1722
1723 let remote_message = repository_handle
1724 .update(&mut cx, |repository_handle, cx| {
1725 repository_handle.pull(branch_name, remote_name, askpass, cx)
1726 })?
1727 .await??;
1728
1729 Ok(proto::RemoteMessageResponse {
1730 stdout: remote_message.stdout,
1731 stderr: remote_message.stderr,
1732 })
1733 }
1734
1735 async fn handle_stage(
1736 this: Entity<Self>,
1737 envelope: TypedEnvelope<proto::Stage>,
1738 mut cx: AsyncApp,
1739 ) -> Result<proto::Ack> {
1740 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1741 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1742
1743 let entries = envelope
1744 .payload
1745 .paths
1746 .into_iter()
1747 .map(|path| RepoPath::new(&path))
1748 .collect::<Result<Vec<_>>>()?;
1749
1750 repository_handle
1751 .update(&mut cx, |repository_handle, cx| {
1752 repository_handle.stage_entries(entries, cx)
1753 })?
1754 .await?;
1755 Ok(proto::Ack {})
1756 }
1757
1758 async fn handle_unstage(
1759 this: Entity<Self>,
1760 envelope: TypedEnvelope<proto::Unstage>,
1761 mut cx: AsyncApp,
1762 ) -> Result<proto::Ack> {
1763 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1764 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1765
1766 let entries = envelope
1767 .payload
1768 .paths
1769 .into_iter()
1770 .map(|path| RepoPath::new(&path))
1771 .collect::<Result<Vec<_>>>()?;
1772
1773 repository_handle
1774 .update(&mut cx, |repository_handle, cx| {
1775 repository_handle.unstage_entries(entries, cx)
1776 })?
1777 .await?;
1778
1779 Ok(proto::Ack {})
1780 }
1781
1782 async fn handle_stash(
1783 this: Entity<Self>,
1784 envelope: TypedEnvelope<proto::Stash>,
1785 mut cx: AsyncApp,
1786 ) -> Result<proto::Ack> {
1787 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1788 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1789
1790 let entries = envelope
1791 .payload
1792 .paths
1793 .into_iter()
1794 .map(|path| RepoPath::new(&path))
1795 .collect::<Result<Vec<_>>>()?;
1796
1797 repository_handle
1798 .update(&mut cx, |repository_handle, cx| {
1799 repository_handle.stash_entries(entries, cx)
1800 })?
1801 .await?;
1802
1803 Ok(proto::Ack {})
1804 }
1805
1806 async fn handle_stash_pop(
1807 this: Entity<Self>,
1808 envelope: TypedEnvelope<proto::StashPop>,
1809 mut cx: AsyncApp,
1810 ) -> Result<proto::Ack> {
1811 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1812 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1813 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1814
1815 repository_handle
1816 .update(&mut cx, |repository_handle, cx| {
1817 repository_handle.stash_pop(stash_index, cx)
1818 })?
1819 .await?;
1820
1821 Ok(proto::Ack {})
1822 }
1823
1824 async fn handle_stash_apply(
1825 this: Entity<Self>,
1826 envelope: TypedEnvelope<proto::StashApply>,
1827 mut cx: AsyncApp,
1828 ) -> Result<proto::Ack> {
1829 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1830 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1831 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1832
1833 repository_handle
1834 .update(&mut cx, |repository_handle, cx| {
1835 repository_handle.stash_apply(stash_index, cx)
1836 })?
1837 .await?;
1838
1839 Ok(proto::Ack {})
1840 }
1841
1842 async fn handle_stash_drop(
1843 this: Entity<Self>,
1844 envelope: TypedEnvelope<proto::StashDrop>,
1845 mut cx: AsyncApp,
1846 ) -> Result<proto::Ack> {
1847 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1848 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1849 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1850
1851 repository_handle
1852 .update(&mut cx, |repository_handle, cx| {
1853 repository_handle.stash_drop(stash_index, cx)
1854 })?
1855 .await??;
1856
1857 Ok(proto::Ack {})
1858 }
1859
1860 async fn handle_set_index_text(
1861 this: Entity<Self>,
1862 envelope: TypedEnvelope<proto::SetIndexText>,
1863 mut cx: AsyncApp,
1864 ) -> Result<proto::Ack> {
1865 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1866 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1867 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1868
1869 repository_handle
1870 .update(&mut cx, |repository_handle, cx| {
1871 repository_handle.spawn_set_index_text_job(
1872 repo_path,
1873 envelope.payload.text,
1874 None,
1875 cx,
1876 )
1877 })?
1878 .await??;
1879 Ok(proto::Ack {})
1880 }
1881
1882 async fn handle_commit(
1883 this: Entity<Self>,
1884 envelope: TypedEnvelope<proto::Commit>,
1885 mut cx: AsyncApp,
1886 ) -> Result<proto::Ack> {
1887 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1888 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1889
1890 let message = SharedString::from(envelope.payload.message);
1891 let name = envelope.payload.name.map(SharedString::from);
1892 let email = envelope.payload.email.map(SharedString::from);
1893 let options = envelope.payload.options.unwrap_or_default();
1894
1895 repository_handle
1896 .update(&mut cx, |repository_handle, cx| {
1897 repository_handle.commit(
1898 message,
1899 name.zip(email),
1900 CommitOptions {
1901 amend: options.amend,
1902 signoff: options.signoff,
1903 },
1904 cx,
1905 )
1906 })?
1907 .await??;
1908 Ok(proto::Ack {})
1909 }
1910
1911 async fn handle_get_remotes(
1912 this: Entity<Self>,
1913 envelope: TypedEnvelope<proto::GetRemotes>,
1914 mut cx: AsyncApp,
1915 ) -> Result<proto::GetRemotesResponse> {
1916 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1917 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1918
1919 let branch_name = envelope.payload.branch_name;
1920
1921 let remotes = repository_handle
1922 .update(&mut cx, |repository_handle, _| {
1923 repository_handle.get_remotes(branch_name)
1924 })?
1925 .await??;
1926
1927 Ok(proto::GetRemotesResponse {
1928 remotes: remotes
1929 .into_iter()
1930 .map(|remotes| proto::get_remotes_response::Remote {
1931 name: remotes.name.to_string(),
1932 })
1933 .collect::<Vec<_>>(),
1934 })
1935 }
1936
1937 async fn handle_get_worktrees(
1938 this: Entity<Self>,
1939 envelope: TypedEnvelope<proto::GitGetWorktrees>,
1940 mut cx: AsyncApp,
1941 ) -> Result<proto::GitWorktreesResponse> {
1942 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1943 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1944
1945 let worktrees = repository_handle
1946 .update(&mut cx, |repository_handle, _| {
1947 repository_handle.worktrees()
1948 })?
1949 .await??;
1950
1951 Ok(proto::GitWorktreesResponse {
1952 worktrees: worktrees
1953 .into_iter()
1954 .map(|worktree| worktree_to_proto(&worktree))
1955 .collect::<Vec<_>>(),
1956 })
1957 }
1958
1959 async fn handle_create_worktree(
1960 this: Entity<Self>,
1961 envelope: TypedEnvelope<proto::GitCreateWorktree>,
1962 mut cx: AsyncApp,
1963 ) -> Result<proto::Ack> {
1964 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1965 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1966 let directory = PathBuf::from(envelope.payload.directory);
1967 let name = envelope.payload.name;
1968 let commit = envelope.payload.commit;
1969
1970 repository_handle
1971 .update(&mut cx, |repository_handle, _| {
1972 repository_handle.create_worktree(name, directory, commit)
1973 })?
1974 .await??;
1975
1976 Ok(proto::Ack {})
1977 }
1978
1979 async fn handle_get_branches(
1980 this: Entity<Self>,
1981 envelope: TypedEnvelope<proto::GitGetBranches>,
1982 mut cx: AsyncApp,
1983 ) -> Result<proto::GitBranchesResponse> {
1984 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1985 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1986
1987 let branches = repository_handle
1988 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1989 .await??;
1990
1991 Ok(proto::GitBranchesResponse {
1992 branches: branches
1993 .into_iter()
1994 .map(|branch| branch_to_proto(&branch))
1995 .collect::<Vec<_>>(),
1996 })
1997 }
1998 async fn handle_get_default_branch(
1999 this: Entity<Self>,
2000 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2001 mut cx: AsyncApp,
2002 ) -> Result<proto::GetDefaultBranchResponse> {
2003 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2004 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2005
2006 let branch = repository_handle
2007 .update(&mut cx, |repository_handle, _| {
2008 repository_handle.default_branch()
2009 })?
2010 .await??
2011 .map(Into::into);
2012
2013 Ok(proto::GetDefaultBranchResponse { branch })
2014 }
2015 async fn handle_create_branch(
2016 this: Entity<Self>,
2017 envelope: TypedEnvelope<proto::GitCreateBranch>,
2018 mut cx: AsyncApp,
2019 ) -> Result<proto::Ack> {
2020 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2021 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2022 let branch_name = envelope.payload.branch_name;
2023
2024 repository_handle
2025 .update(&mut cx, |repository_handle, _| {
2026 repository_handle.create_branch(branch_name)
2027 })?
2028 .await??;
2029
2030 Ok(proto::Ack {})
2031 }
2032
2033 async fn handle_change_branch(
2034 this: Entity<Self>,
2035 envelope: TypedEnvelope<proto::GitChangeBranch>,
2036 mut cx: AsyncApp,
2037 ) -> Result<proto::Ack> {
2038 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2039 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2040 let branch_name = envelope.payload.branch_name;
2041
2042 repository_handle
2043 .update(&mut cx, |repository_handle, _| {
2044 repository_handle.change_branch(branch_name)
2045 })?
2046 .await??;
2047
2048 Ok(proto::Ack {})
2049 }
2050
2051 async fn handle_rename_branch(
2052 this: Entity<Self>,
2053 envelope: TypedEnvelope<proto::GitRenameBranch>,
2054 mut cx: AsyncApp,
2055 ) -> Result<proto::Ack> {
2056 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2057 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2058 let branch = envelope.payload.branch;
2059 let new_name = envelope.payload.new_name;
2060
2061 repository_handle
2062 .update(&mut cx, |repository_handle, _| {
2063 repository_handle.rename_branch(branch, new_name)
2064 })?
2065 .await??;
2066
2067 Ok(proto::Ack {})
2068 }
2069
2070 async fn handle_show(
2071 this: Entity<Self>,
2072 envelope: TypedEnvelope<proto::GitShow>,
2073 mut cx: AsyncApp,
2074 ) -> Result<proto::GitCommitDetails> {
2075 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2076 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2077
2078 let commit = repository_handle
2079 .update(&mut cx, |repository_handle, _| {
2080 repository_handle.show(envelope.payload.commit)
2081 })?
2082 .await??;
2083 Ok(proto::GitCommitDetails {
2084 sha: commit.sha.into(),
2085 message: commit.message.into(),
2086 commit_timestamp: commit.commit_timestamp,
2087 author_email: commit.author_email.into(),
2088 author_name: commit.author_name.into(),
2089 })
2090 }
2091
2092 async fn handle_load_commit_diff(
2093 this: Entity<Self>,
2094 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2095 mut cx: AsyncApp,
2096 ) -> Result<proto::LoadCommitDiffResponse> {
2097 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2098 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2099
2100 let commit_diff = repository_handle
2101 .update(&mut cx, |repository_handle, _| {
2102 repository_handle.load_commit_diff(envelope.payload.commit)
2103 })?
2104 .await??;
2105 Ok(proto::LoadCommitDiffResponse {
2106 files: commit_diff
2107 .files
2108 .into_iter()
2109 .map(|file| proto::CommitFile {
2110 path: file.path.to_proto(),
2111 old_text: file.old_text,
2112 new_text: file.new_text,
2113 })
2114 .collect(),
2115 })
2116 }
2117
2118 async fn handle_reset(
2119 this: Entity<Self>,
2120 envelope: TypedEnvelope<proto::GitReset>,
2121 mut cx: AsyncApp,
2122 ) -> Result<proto::Ack> {
2123 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2124 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2125
2126 let mode = match envelope.payload.mode() {
2127 git_reset::ResetMode::Soft => ResetMode::Soft,
2128 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2129 };
2130
2131 repository_handle
2132 .update(&mut cx, |repository_handle, cx| {
2133 repository_handle.reset(envelope.payload.commit, mode, cx)
2134 })?
2135 .await??;
2136 Ok(proto::Ack {})
2137 }
2138
2139 async fn handle_checkout_files(
2140 this: Entity<Self>,
2141 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2142 mut cx: AsyncApp,
2143 ) -> Result<proto::Ack> {
2144 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2145 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2146 let paths = envelope
2147 .payload
2148 .paths
2149 .iter()
2150 .map(|s| RepoPath::from_proto(s))
2151 .collect::<Result<Vec<_>>>()?;
2152
2153 repository_handle
2154 .update(&mut cx, |repository_handle, cx| {
2155 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2156 })?
2157 .await??;
2158 Ok(proto::Ack {})
2159 }
2160
2161 async fn handle_open_commit_message_buffer(
2162 this: Entity<Self>,
2163 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2164 mut cx: AsyncApp,
2165 ) -> Result<proto::OpenBufferResponse> {
2166 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2167 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2168 let buffer = repository
2169 .update(&mut cx, |repository, cx| {
2170 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2171 })?
2172 .await?;
2173
2174 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2175 this.update(&mut cx, |this, cx| {
2176 this.buffer_store.update(cx, |buffer_store, cx| {
2177 buffer_store
2178 .create_buffer_for_peer(
2179 &buffer,
2180 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2181 cx,
2182 )
2183 .detach_and_log_err(cx);
2184 })
2185 })?;
2186
2187 Ok(proto::OpenBufferResponse {
2188 buffer_id: buffer_id.to_proto(),
2189 })
2190 }
2191
2192 async fn handle_askpass(
2193 this: Entity<Self>,
2194 envelope: TypedEnvelope<proto::AskPassRequest>,
2195 mut cx: AsyncApp,
2196 ) -> Result<proto::AskPassResponse> {
2197 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2198 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2199
2200 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2201 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2202 debug_panic!("no askpass found");
2203 anyhow::bail!("no askpass found");
2204 };
2205
2206 let response = askpass
2207 .ask_password(envelope.payload.prompt)
2208 .await
2209 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2210
2211 delegates
2212 .lock()
2213 .insert(envelope.payload.askpass_id, askpass);
2214
2215 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2216 Ok(proto::AskPassResponse {
2217 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2218 })
2219 }
2220
2221 async fn handle_check_for_pushed_commits(
2222 this: Entity<Self>,
2223 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2224 mut cx: AsyncApp,
2225 ) -> Result<proto::CheckForPushedCommitsResponse> {
2226 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2227 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2228
2229 let branches = repository_handle
2230 .update(&mut cx, |repository_handle, _| {
2231 repository_handle.check_for_pushed_commits()
2232 })?
2233 .await??;
2234 Ok(proto::CheckForPushedCommitsResponse {
2235 pushed_to: branches
2236 .into_iter()
2237 .map(|commit| commit.to_string())
2238 .collect(),
2239 })
2240 }
2241
2242 async fn handle_git_diff(
2243 this: Entity<Self>,
2244 envelope: TypedEnvelope<proto::GitDiff>,
2245 mut cx: AsyncApp,
2246 ) -> Result<proto::GitDiffResponse> {
2247 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2248 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2249 let diff_type = match envelope.payload.diff_type() {
2250 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2251 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2252 };
2253
2254 let mut diff = repository_handle
2255 .update(&mut cx, |repository_handle, cx| {
2256 repository_handle.diff(diff_type, cx)
2257 })?
2258 .await??;
2259 const ONE_MB: usize = 1_000_000;
2260 if diff.len() > ONE_MB {
2261 diff = diff.chars().take(ONE_MB).collect()
2262 }
2263
2264 Ok(proto::GitDiffResponse { diff })
2265 }
2266
2267 async fn handle_tree_diff(
2268 this: Entity<Self>,
2269 request: TypedEnvelope<proto::GetTreeDiff>,
2270 mut cx: AsyncApp,
2271 ) -> Result<proto::GetTreeDiffResponse> {
2272 let repository_id = RepositoryId(request.payload.repository_id);
2273 let diff_type = if request.payload.is_merge {
2274 DiffTreeType::MergeBase {
2275 base: request.payload.base.into(),
2276 head: request.payload.head.into(),
2277 }
2278 } else {
2279 DiffTreeType::Since {
2280 base: request.payload.base.into(),
2281 head: request.payload.head.into(),
2282 }
2283 };
2284
2285 let diff = this
2286 .update(&mut cx, |this, cx| {
2287 let repository = this.repositories().get(&repository_id)?;
2288 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2289 })?
2290 .context("missing repository")?
2291 .await??;
2292
2293 Ok(proto::GetTreeDiffResponse {
2294 entries: diff
2295 .entries
2296 .into_iter()
2297 .map(|(path, status)| proto::TreeDiffStatus {
2298 path: path.0.to_proto(),
2299 status: match status {
2300 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2301 TreeDiffStatus::Modified { .. } => {
2302 proto::tree_diff_status::Status::Modified.into()
2303 }
2304 TreeDiffStatus::Deleted { .. } => {
2305 proto::tree_diff_status::Status::Deleted.into()
2306 }
2307 },
2308 oid: match status {
2309 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2310 Some(old.to_string())
2311 }
2312 TreeDiffStatus::Added => None,
2313 },
2314 })
2315 .collect(),
2316 })
2317 }
2318
2319 async fn handle_get_blob_content(
2320 this: Entity<Self>,
2321 request: TypedEnvelope<proto::GetBlobContent>,
2322 mut cx: AsyncApp,
2323 ) -> Result<proto::GetBlobContentResponse> {
2324 let oid = git::Oid::from_str(&request.payload.oid)?;
2325 let repository_id = RepositoryId(request.payload.repository_id);
2326 let content = this
2327 .update(&mut cx, |this, cx| {
2328 let repository = this.repositories().get(&repository_id)?;
2329 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2330 })?
2331 .context("missing repository")?
2332 .await?;
2333 Ok(proto::GetBlobContentResponse { content })
2334 }
2335
2336 async fn handle_open_unstaged_diff(
2337 this: Entity<Self>,
2338 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2339 mut cx: AsyncApp,
2340 ) -> Result<proto::OpenUnstagedDiffResponse> {
2341 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2342 let diff = this
2343 .update(&mut cx, |this, cx| {
2344 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2345 Some(this.open_unstaged_diff(buffer, cx))
2346 })?
2347 .context("missing buffer")?
2348 .await?;
2349 this.update(&mut cx, |this, _| {
2350 let shared_diffs = this
2351 .shared_diffs
2352 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2353 .or_default();
2354 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2355 })?;
2356 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2357 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2358 }
2359
2360 async fn handle_open_uncommitted_diff(
2361 this: Entity<Self>,
2362 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2363 mut cx: AsyncApp,
2364 ) -> Result<proto::OpenUncommittedDiffResponse> {
2365 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2366 let diff = this
2367 .update(&mut cx, |this, cx| {
2368 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2369 Some(this.open_uncommitted_diff(buffer, cx))
2370 })?
2371 .context("missing buffer")?
2372 .await?;
2373 this.update(&mut cx, |this, _| {
2374 let shared_diffs = this
2375 .shared_diffs
2376 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2377 .or_default();
2378 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2379 })?;
2380 diff.read_with(&cx, |diff, cx| {
2381 use proto::open_uncommitted_diff_response::Mode;
2382
2383 let unstaged_diff = diff.secondary_diff();
2384 let index_snapshot = unstaged_diff.and_then(|diff| {
2385 let diff = diff.read(cx);
2386 diff.base_text_exists().then(|| diff.base_text())
2387 });
2388
2389 let mode;
2390 let staged_text;
2391 let committed_text;
2392 if diff.base_text_exists() {
2393 let committed_snapshot = diff.base_text();
2394 committed_text = Some(committed_snapshot.text());
2395 if let Some(index_text) = index_snapshot {
2396 if index_text.remote_id() == committed_snapshot.remote_id() {
2397 mode = Mode::IndexMatchesHead;
2398 staged_text = None;
2399 } else {
2400 mode = Mode::IndexAndHead;
2401 staged_text = Some(index_text.text());
2402 }
2403 } else {
2404 mode = Mode::IndexAndHead;
2405 staged_text = None;
2406 }
2407 } else {
2408 mode = Mode::IndexAndHead;
2409 committed_text = None;
2410 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2411 }
2412
2413 proto::OpenUncommittedDiffResponse {
2414 committed_text,
2415 staged_text,
2416 mode: mode.into(),
2417 }
2418 })
2419 }
2420
2421 async fn handle_update_diff_bases(
2422 this: Entity<Self>,
2423 request: TypedEnvelope<proto::UpdateDiffBases>,
2424 mut cx: AsyncApp,
2425 ) -> Result<()> {
2426 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2427 this.update(&mut cx, |this, cx| {
2428 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2429 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2430 {
2431 let buffer = buffer.read(cx).text_snapshot();
2432 diff_state.update(cx, |diff_state, cx| {
2433 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2434 })
2435 }
2436 })
2437 }
2438
2439 async fn handle_blame_buffer(
2440 this: Entity<Self>,
2441 envelope: TypedEnvelope<proto::BlameBuffer>,
2442 mut cx: AsyncApp,
2443 ) -> Result<proto::BlameBufferResponse> {
2444 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2445 let version = deserialize_version(&envelope.payload.version);
2446 let buffer = this.read_with(&cx, |this, cx| {
2447 this.buffer_store.read(cx).get_existing(buffer_id)
2448 })??;
2449 buffer
2450 .update(&mut cx, |buffer, _| {
2451 buffer.wait_for_version(version.clone())
2452 })?
2453 .await?;
2454 let blame = this
2455 .update(&mut cx, |this, cx| {
2456 this.blame_buffer(&buffer, Some(version), cx)
2457 })?
2458 .await?;
2459 Ok(serialize_blame_buffer_response(blame))
2460 }
2461
2462 async fn handle_get_permalink_to_line(
2463 this: Entity<Self>,
2464 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2465 mut cx: AsyncApp,
2466 ) -> Result<proto::GetPermalinkToLineResponse> {
2467 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2468 // let version = deserialize_version(&envelope.payload.version);
2469 let selection = {
2470 let proto_selection = envelope
2471 .payload
2472 .selection
2473 .context("no selection to get permalink for defined")?;
2474 proto_selection.start as u32..proto_selection.end as u32
2475 };
2476 let buffer = this.read_with(&cx, |this, cx| {
2477 this.buffer_store.read(cx).get_existing(buffer_id)
2478 })??;
2479 let permalink = this
2480 .update(&mut cx, |this, cx| {
2481 this.get_permalink_to_line(&buffer, selection, cx)
2482 })?
2483 .await?;
2484 Ok(proto::GetPermalinkToLineResponse {
2485 permalink: permalink.to_string(),
2486 })
2487 }
2488
2489 fn repository_for_request(
2490 this: &Entity<Self>,
2491 id: RepositoryId,
2492 cx: &mut AsyncApp,
2493 ) -> Result<Entity<Repository>> {
2494 this.read_with(cx, |this, _| {
2495 this.repositories
2496 .get(&id)
2497 .context("missing repository handle")
2498 .cloned()
2499 })?
2500 }
2501
2502 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2503 self.repositories
2504 .iter()
2505 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2506 .collect()
2507 }
2508
2509 fn process_updated_entries(
2510 &self,
2511 worktree: &Entity<Worktree>,
2512 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2513 cx: &mut App,
2514 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2515 let path_style = worktree.read(cx).path_style();
2516 let mut repo_paths = self
2517 .repositories
2518 .values()
2519 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2520 .collect::<Vec<_>>();
2521 let mut entries: Vec<_> = updated_entries
2522 .iter()
2523 .map(|(path, _, _)| path.clone())
2524 .collect();
2525 entries.sort();
2526 let worktree = worktree.read(cx);
2527
2528 let entries = entries
2529 .into_iter()
2530 .map(|path| worktree.absolutize(&path))
2531 .collect::<Arc<[_]>>();
2532
2533 let executor = cx.background_executor().clone();
2534 cx.background_executor().spawn(async move {
2535 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2536 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2537 let mut tasks = FuturesOrdered::new();
2538 for (repo_path, repo) in repo_paths.into_iter().rev() {
2539 let entries = entries.clone();
2540 let task = executor.spawn(async move {
2541 // Find all repository paths that belong to this repo
2542 let mut ix = entries.partition_point(|path| path < &*repo_path);
2543 if ix == entries.len() {
2544 return None;
2545 };
2546
2547 let mut paths = Vec::new();
2548 // All paths prefixed by a given repo will constitute a continuous range.
2549 while let Some(path) = entries.get(ix)
2550 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2551 &repo_path, path, path_style,
2552 )
2553 {
2554 paths.push((repo_path, ix));
2555 ix += 1;
2556 }
2557 if paths.is_empty() {
2558 None
2559 } else {
2560 Some((repo, paths))
2561 }
2562 });
2563 tasks.push_back(task);
2564 }
2565
2566 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2567 let mut path_was_used = vec![false; entries.len()];
2568 let tasks = tasks.collect::<Vec<_>>().await;
2569 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2570 // We always want to assign a path to it's innermost repository.
2571 for t in tasks {
2572 let Some((repo, paths)) = t else {
2573 continue;
2574 };
2575 let entry = paths_by_git_repo.entry(repo).or_default();
2576 for (repo_path, ix) in paths {
2577 if path_was_used[ix] {
2578 continue;
2579 }
2580 path_was_used[ix] = true;
2581 entry.push(repo_path);
2582 }
2583 }
2584
2585 paths_by_git_repo
2586 })
2587 }
2588}
2589
2590impl BufferGitState {
2591 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2592 Self {
2593 unstaged_diff: Default::default(),
2594 uncommitted_diff: Default::default(),
2595 recalculate_diff_task: Default::default(),
2596 language: Default::default(),
2597 language_registry: Default::default(),
2598 recalculating_tx: postage::watch::channel_with(false).0,
2599 hunk_staging_operation_count: 0,
2600 hunk_staging_operation_count_as_of_write: 0,
2601 head_text: Default::default(),
2602 index_text: Default::default(),
2603 head_changed: Default::default(),
2604 index_changed: Default::default(),
2605 language_changed: Default::default(),
2606 conflict_updated_futures: Default::default(),
2607 conflict_set: Default::default(),
2608 reparse_conflict_markers_task: Default::default(),
2609 }
2610 }
2611
2612 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2613 self.language = buffer.read(cx).language().cloned();
2614 self.language_changed = true;
2615 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2616 }
2617
2618 fn reparse_conflict_markers(
2619 &mut self,
2620 buffer: text::BufferSnapshot,
2621 cx: &mut Context<Self>,
2622 ) -> oneshot::Receiver<()> {
2623 let (tx, rx) = oneshot::channel();
2624
2625 let Some(conflict_set) = self
2626 .conflict_set
2627 .as_ref()
2628 .and_then(|conflict_set| conflict_set.upgrade())
2629 else {
2630 return rx;
2631 };
2632
2633 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2634 if conflict_set.has_conflict {
2635 Some(conflict_set.snapshot())
2636 } else {
2637 None
2638 }
2639 });
2640
2641 if let Some(old_snapshot) = old_snapshot {
2642 self.conflict_updated_futures.push(tx);
2643 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2644 let (snapshot, changed_range) = cx
2645 .background_spawn(async move {
2646 let new_snapshot = ConflictSet::parse(&buffer);
2647 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2648 (new_snapshot, changed_range)
2649 })
2650 .await;
2651 this.update(cx, |this, cx| {
2652 if let Some(conflict_set) = &this.conflict_set {
2653 conflict_set
2654 .update(cx, |conflict_set, cx| {
2655 conflict_set.set_snapshot(snapshot, changed_range, cx);
2656 })
2657 .ok();
2658 }
2659 let futures = std::mem::take(&mut this.conflict_updated_futures);
2660 for tx in futures {
2661 tx.send(()).ok();
2662 }
2663 })
2664 }))
2665 }
2666
2667 rx
2668 }
2669
2670 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2671 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2672 }
2673
2674 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2675 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2676 }
2677
2678 fn handle_base_texts_updated(
2679 &mut self,
2680 buffer: text::BufferSnapshot,
2681 message: proto::UpdateDiffBases,
2682 cx: &mut Context<Self>,
2683 ) {
2684 use proto::update_diff_bases::Mode;
2685
2686 let Some(mode) = Mode::from_i32(message.mode) else {
2687 return;
2688 };
2689
2690 let diff_bases_change = match mode {
2691 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2692 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2693 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2694 Mode::IndexAndHead => DiffBasesChange::SetEach {
2695 index: message.staged_text,
2696 head: message.committed_text,
2697 },
2698 };
2699
2700 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2701 }
2702
2703 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2704 if *self.recalculating_tx.borrow() {
2705 let mut rx = self.recalculating_tx.subscribe();
2706 Some(async move {
2707 loop {
2708 let is_recalculating = rx.recv().await;
2709 if is_recalculating != Some(true) {
2710 break;
2711 }
2712 }
2713 })
2714 } else {
2715 None
2716 }
2717 }
2718
2719 fn diff_bases_changed(
2720 &mut self,
2721 buffer: text::BufferSnapshot,
2722 diff_bases_change: Option<DiffBasesChange>,
2723 cx: &mut Context<Self>,
2724 ) {
2725 match diff_bases_change {
2726 Some(DiffBasesChange::SetIndex(index)) => {
2727 self.index_text = index.map(|mut index| {
2728 text::LineEnding::normalize(&mut index);
2729 Arc::new(index)
2730 });
2731 self.index_changed = true;
2732 }
2733 Some(DiffBasesChange::SetHead(head)) => {
2734 self.head_text = head.map(|mut head| {
2735 text::LineEnding::normalize(&mut head);
2736 Arc::new(head)
2737 });
2738 self.head_changed = true;
2739 }
2740 Some(DiffBasesChange::SetBoth(text)) => {
2741 let text = text.map(|mut text| {
2742 text::LineEnding::normalize(&mut text);
2743 Arc::new(text)
2744 });
2745 self.head_text = text.clone();
2746 self.index_text = text;
2747 self.head_changed = true;
2748 self.index_changed = true;
2749 }
2750 Some(DiffBasesChange::SetEach { index, head }) => {
2751 self.index_text = index.map(|mut index| {
2752 text::LineEnding::normalize(&mut index);
2753 Arc::new(index)
2754 });
2755 self.index_changed = true;
2756 self.head_text = head.map(|mut head| {
2757 text::LineEnding::normalize(&mut head);
2758 Arc::new(head)
2759 });
2760 self.head_changed = true;
2761 }
2762 None => {}
2763 }
2764
2765 self.recalculate_diffs(buffer, cx)
2766 }
2767
2768 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2769 *self.recalculating_tx.borrow_mut() = true;
2770
2771 let language = self.language.clone();
2772 let language_registry = self.language_registry.clone();
2773 let unstaged_diff = self.unstaged_diff();
2774 let uncommitted_diff = self.uncommitted_diff();
2775 let head = self.head_text.clone();
2776 let index = self.index_text.clone();
2777 let index_changed = self.index_changed;
2778 let head_changed = self.head_changed;
2779 let language_changed = self.language_changed;
2780 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2781 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2782 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2783 (None, None) => true,
2784 _ => false,
2785 };
2786 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2787 log::debug!(
2788 "start recalculating diffs for buffer {}",
2789 buffer.remote_id()
2790 );
2791
2792 let mut new_unstaged_diff = None;
2793 if let Some(unstaged_diff) = &unstaged_diff {
2794 new_unstaged_diff = Some(
2795 BufferDiff::update_diff(
2796 unstaged_diff.clone(),
2797 buffer.clone(),
2798 index,
2799 index_changed,
2800 language_changed,
2801 language.clone(),
2802 language_registry.clone(),
2803 cx,
2804 )
2805 .await?,
2806 );
2807 }
2808
2809 let mut new_uncommitted_diff = None;
2810 if let Some(uncommitted_diff) = &uncommitted_diff {
2811 new_uncommitted_diff = if index_matches_head {
2812 new_unstaged_diff.clone()
2813 } else {
2814 Some(
2815 BufferDiff::update_diff(
2816 uncommitted_diff.clone(),
2817 buffer.clone(),
2818 head,
2819 head_changed,
2820 language_changed,
2821 language.clone(),
2822 language_registry.clone(),
2823 cx,
2824 )
2825 .await?,
2826 )
2827 }
2828 }
2829
2830 let cancel = this.update(cx, |this, _| {
2831 // This checks whether all pending stage/unstage operations
2832 // have quiesced (i.e. both the corresponding write and the
2833 // read of that write have completed). If not, then we cancel
2834 // this recalculation attempt to avoid invalidating pending
2835 // state too quickly; another recalculation will come along
2836 // later and clear the pending state once the state of the index has settled.
2837 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2838 *this.recalculating_tx.borrow_mut() = false;
2839 true
2840 } else {
2841 false
2842 }
2843 })?;
2844 if cancel {
2845 log::debug!(
2846 concat!(
2847 "aborting recalculating diffs for buffer {}",
2848 "due to subsequent hunk operations",
2849 ),
2850 buffer.remote_id()
2851 );
2852 return Ok(());
2853 }
2854
2855 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2856 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2857 {
2858 unstaged_diff.update(cx, |diff, cx| {
2859 if language_changed {
2860 diff.language_changed(cx);
2861 }
2862 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2863 })?
2864 } else {
2865 None
2866 };
2867
2868 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2869 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2870 {
2871 uncommitted_diff.update(cx, |diff, cx| {
2872 if language_changed {
2873 diff.language_changed(cx);
2874 }
2875 diff.set_snapshot_with_secondary(
2876 new_uncommitted_diff,
2877 &buffer,
2878 unstaged_changed_range,
2879 true,
2880 cx,
2881 );
2882 })?;
2883 }
2884
2885 log::debug!(
2886 "finished recalculating diffs for buffer {}",
2887 buffer.remote_id()
2888 );
2889
2890 if let Some(this) = this.upgrade() {
2891 this.update(cx, |this, _| {
2892 this.index_changed = false;
2893 this.head_changed = false;
2894 this.language_changed = false;
2895 *this.recalculating_tx.borrow_mut() = false;
2896 })?;
2897 }
2898
2899 Ok(())
2900 }));
2901 }
2902}
2903
2904fn make_remote_delegate(
2905 this: Entity<GitStore>,
2906 project_id: u64,
2907 repository_id: RepositoryId,
2908 askpass_id: u64,
2909 cx: &mut AsyncApp,
2910) -> AskPassDelegate {
2911 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2912 this.update(cx, |this, cx| {
2913 let Some((client, _)) = this.downstream_client() else {
2914 return;
2915 };
2916 let response = client.request(proto::AskPassRequest {
2917 project_id,
2918 repository_id: repository_id.to_proto(),
2919 askpass_id,
2920 prompt,
2921 });
2922 cx.spawn(async move |_, _| {
2923 let mut response = response.await?.response;
2924 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2925 .ok();
2926 response.zeroize();
2927 anyhow::Ok(())
2928 })
2929 .detach_and_log_err(cx);
2930 })
2931 .log_err();
2932 })
2933}
2934
2935impl RepositoryId {
2936 pub fn to_proto(self) -> u64 {
2937 self.0
2938 }
2939
2940 pub fn from_proto(id: u64) -> Self {
2941 RepositoryId(id)
2942 }
2943}
2944
2945impl RepositorySnapshot {
2946 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2947 Self {
2948 id,
2949 statuses_by_path: Default::default(),
2950 work_directory_abs_path,
2951 branch: None,
2952 head_commit: None,
2953 scan_id: 0,
2954 merge: Default::default(),
2955 remote_origin_url: None,
2956 remote_upstream_url: None,
2957 stash_entries: Default::default(),
2958 path_style,
2959 }
2960 }
2961
2962 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2963 proto::UpdateRepository {
2964 branch_summary: self.branch.as_ref().map(branch_to_proto),
2965 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2966 updated_statuses: self
2967 .statuses_by_path
2968 .iter()
2969 .map(|entry| entry.to_proto())
2970 .collect(),
2971 removed_statuses: Default::default(),
2972 current_merge_conflicts: self
2973 .merge
2974 .conflicted_paths
2975 .iter()
2976 .map(|repo_path| repo_path.to_proto())
2977 .collect(),
2978 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2979 project_id,
2980 id: self.id.to_proto(),
2981 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2982 entry_ids: vec![self.id.to_proto()],
2983 scan_id: self.scan_id,
2984 is_last_update: true,
2985 stash_entries: self
2986 .stash_entries
2987 .entries
2988 .iter()
2989 .map(stash_to_proto)
2990 .collect(),
2991 }
2992 }
2993
2994 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2995 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2996 let mut removed_statuses: Vec<String> = Vec::new();
2997
2998 let mut new_statuses = self.statuses_by_path.iter().peekable();
2999 let mut old_statuses = old.statuses_by_path.iter().peekable();
3000
3001 let mut current_new_entry = new_statuses.next();
3002 let mut current_old_entry = old_statuses.next();
3003 loop {
3004 match (current_new_entry, current_old_entry) {
3005 (Some(new_entry), Some(old_entry)) => {
3006 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3007 Ordering::Less => {
3008 updated_statuses.push(new_entry.to_proto());
3009 current_new_entry = new_statuses.next();
3010 }
3011 Ordering::Equal => {
3012 if new_entry.status != old_entry.status {
3013 updated_statuses.push(new_entry.to_proto());
3014 }
3015 current_old_entry = old_statuses.next();
3016 current_new_entry = new_statuses.next();
3017 }
3018 Ordering::Greater => {
3019 removed_statuses.push(old_entry.repo_path.to_proto());
3020 current_old_entry = old_statuses.next();
3021 }
3022 }
3023 }
3024 (None, Some(old_entry)) => {
3025 removed_statuses.push(old_entry.repo_path.to_proto());
3026 current_old_entry = old_statuses.next();
3027 }
3028 (Some(new_entry), None) => {
3029 updated_statuses.push(new_entry.to_proto());
3030 current_new_entry = new_statuses.next();
3031 }
3032 (None, None) => break,
3033 }
3034 }
3035
3036 proto::UpdateRepository {
3037 branch_summary: self.branch.as_ref().map(branch_to_proto),
3038 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3039 updated_statuses,
3040 removed_statuses,
3041 current_merge_conflicts: self
3042 .merge
3043 .conflicted_paths
3044 .iter()
3045 .map(|path| path.to_proto())
3046 .collect(),
3047 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3048 project_id,
3049 id: self.id.to_proto(),
3050 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3051 entry_ids: vec![],
3052 scan_id: self.scan_id,
3053 is_last_update: true,
3054 stash_entries: self
3055 .stash_entries
3056 .entries
3057 .iter()
3058 .map(stash_to_proto)
3059 .collect(),
3060 }
3061 }
3062
3063 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3064 self.statuses_by_path.iter().cloned()
3065 }
3066
3067 pub fn status_summary(&self) -> GitSummary {
3068 self.statuses_by_path.summary().item_summary
3069 }
3070
3071 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3072 self.statuses_by_path
3073 .get(&PathKey(path.0.clone()), ())
3074 .cloned()
3075 }
3076
3077 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3078 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3079 }
3080
3081 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3082 self.path_style
3083 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3084 .unwrap()
3085 .into()
3086 }
3087
3088 #[inline]
3089 fn abs_path_to_repo_path_inner(
3090 work_directory_abs_path: &Path,
3091 abs_path: &Path,
3092 path_style: PathStyle,
3093 ) -> Option<RepoPath> {
3094 abs_path
3095 .strip_prefix(&work_directory_abs_path)
3096 .ok()
3097 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3098 }
3099
3100 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3101 self.merge.conflicted_paths.contains(repo_path)
3102 }
3103
3104 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3105 let had_conflict_on_last_merge_head_change =
3106 self.merge.conflicted_paths.contains(repo_path);
3107 let has_conflict_currently = self
3108 .status_for_path(repo_path)
3109 .is_some_and(|entry| entry.status.is_conflicted());
3110 had_conflict_on_last_merge_head_change || has_conflict_currently
3111 }
3112
3113 /// This is the name that will be displayed in the repository selector for this repository.
3114 pub fn display_name(&self) -> SharedString {
3115 self.work_directory_abs_path
3116 .file_name()
3117 .unwrap_or_default()
3118 .to_string_lossy()
3119 .to_string()
3120 .into()
3121 }
3122}
3123
3124pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3125 proto::StashEntry {
3126 oid: entry.oid.as_bytes().to_vec(),
3127 message: entry.message.clone(),
3128 branch: entry.branch.clone(),
3129 index: entry.index as u64,
3130 timestamp: entry.timestamp,
3131 }
3132}
3133
3134pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3135 Ok(StashEntry {
3136 oid: Oid::from_bytes(&entry.oid)?,
3137 message: entry.message.clone(),
3138 index: entry.index as usize,
3139 branch: entry.branch.clone(),
3140 timestamp: entry.timestamp,
3141 })
3142}
3143
3144impl MergeDetails {
3145 async fn load(
3146 backend: &Arc<dyn GitRepository>,
3147 status: &SumTree<StatusEntry>,
3148 prev_snapshot: &RepositorySnapshot,
3149 ) -> Result<(MergeDetails, bool)> {
3150 log::debug!("load merge details");
3151 let message = backend.merge_message().await;
3152 let heads = backend
3153 .revparse_batch(vec![
3154 "MERGE_HEAD".into(),
3155 "CHERRY_PICK_HEAD".into(),
3156 "REBASE_HEAD".into(),
3157 "REVERT_HEAD".into(),
3158 "APPLY_HEAD".into(),
3159 ])
3160 .await
3161 .log_err()
3162 .unwrap_or_default()
3163 .into_iter()
3164 .map(|opt| opt.map(SharedString::from))
3165 .collect::<Vec<_>>();
3166 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3167 let conflicted_paths = if merge_heads_changed {
3168 let current_conflicted_paths = TreeSet::from_ordered_entries(
3169 status
3170 .iter()
3171 .filter(|entry| entry.status.is_conflicted())
3172 .map(|entry| entry.repo_path.clone()),
3173 );
3174
3175 // It can happen that we run a scan while a lengthy merge is in progress
3176 // that will eventually result in conflicts, but before those conflicts
3177 // are reported by `git status`. Since for the moment we only care about
3178 // the merge heads state for the purposes of tracking conflicts, don't update
3179 // this state until we see some conflicts.
3180 if heads.iter().any(Option::is_some)
3181 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3182 && current_conflicted_paths.is_empty()
3183 {
3184 log::debug!("not updating merge heads because no conflicts found");
3185 return Ok((
3186 MergeDetails {
3187 message: message.map(SharedString::from),
3188 ..prev_snapshot.merge.clone()
3189 },
3190 false,
3191 ));
3192 }
3193
3194 current_conflicted_paths
3195 } else {
3196 prev_snapshot.merge.conflicted_paths.clone()
3197 };
3198 let details = MergeDetails {
3199 conflicted_paths,
3200 message: message.map(SharedString::from),
3201 heads,
3202 };
3203 Ok((details, merge_heads_changed))
3204 }
3205}
3206
3207impl Repository {
3208 pub fn snapshot(&self) -> RepositorySnapshot {
3209 self.snapshot.clone()
3210 }
3211
3212 fn local(
3213 id: RepositoryId,
3214 work_directory_abs_path: Arc<Path>,
3215 dot_git_abs_path: Arc<Path>,
3216 repository_dir_abs_path: Arc<Path>,
3217 common_dir_abs_path: Arc<Path>,
3218 project_environment: WeakEntity<ProjectEnvironment>,
3219 fs: Arc<dyn Fs>,
3220 git_store: WeakEntity<GitStore>,
3221 cx: &mut Context<Self>,
3222 ) -> Self {
3223 let snapshot =
3224 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3225 Repository {
3226 this: cx.weak_entity(),
3227 git_store,
3228 snapshot,
3229 commit_message_buffer: None,
3230 askpass_delegates: Default::default(),
3231 paths_needing_status_update: Default::default(),
3232 latest_askpass_id: 0,
3233 job_sender: Repository::spawn_local_git_worker(
3234 work_directory_abs_path,
3235 dot_git_abs_path,
3236 repository_dir_abs_path,
3237 common_dir_abs_path,
3238 project_environment,
3239 fs,
3240 cx,
3241 ),
3242 job_id: 0,
3243 active_jobs: Default::default(),
3244 }
3245 }
3246
3247 fn remote(
3248 id: RepositoryId,
3249 work_directory_abs_path: Arc<Path>,
3250 path_style: PathStyle,
3251 project_id: ProjectId,
3252 client: AnyProtoClient,
3253 git_store: WeakEntity<GitStore>,
3254 cx: &mut Context<Self>,
3255 ) -> Self {
3256 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3257 Self {
3258 this: cx.weak_entity(),
3259 snapshot,
3260 commit_message_buffer: None,
3261 git_store,
3262 paths_needing_status_update: Default::default(),
3263 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3264 askpass_delegates: Default::default(),
3265 latest_askpass_id: 0,
3266 active_jobs: Default::default(),
3267 job_id: 0,
3268 }
3269 }
3270
3271 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3272 self.git_store.upgrade()
3273 }
3274
3275 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3276 let this = cx.weak_entity();
3277 let git_store = self.git_store.clone();
3278 let _ = self.send_keyed_job(
3279 Some(GitJobKey::ReloadBufferDiffBases),
3280 None,
3281 |state, mut cx| async move {
3282 let RepositoryState::Local { backend, .. } = state else {
3283 log::error!("tried to recompute diffs for a non-local repository");
3284 return Ok(());
3285 };
3286
3287 let Some(this) = this.upgrade() else {
3288 return Ok(());
3289 };
3290
3291 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3292 git_store.update(cx, |git_store, cx| {
3293 git_store
3294 .diffs
3295 .iter()
3296 .filter_map(|(buffer_id, diff_state)| {
3297 let buffer_store = git_store.buffer_store.read(cx);
3298 let buffer = buffer_store.get(*buffer_id)?;
3299 let file = File::from_dyn(buffer.read(cx).file())?;
3300 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3301 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3302 log::debug!(
3303 "start reload diff bases for repo path {}",
3304 repo_path.as_unix_str()
3305 );
3306 diff_state.update(cx, |diff_state, _| {
3307 let has_unstaged_diff = diff_state
3308 .unstaged_diff
3309 .as_ref()
3310 .is_some_and(|diff| diff.is_upgradable());
3311 let has_uncommitted_diff = diff_state
3312 .uncommitted_diff
3313 .as_ref()
3314 .is_some_and(|set| set.is_upgradable());
3315
3316 Some((
3317 buffer,
3318 repo_path,
3319 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3320 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3321 ))
3322 })
3323 })
3324 .collect::<Vec<_>>()
3325 })
3326 })??;
3327
3328 let buffer_diff_base_changes = cx
3329 .background_spawn(async move {
3330 let mut changes = Vec::new();
3331 for (buffer, repo_path, current_index_text, current_head_text) in
3332 &repo_diff_state_updates
3333 {
3334 let index_text = if current_index_text.is_some() {
3335 backend.load_index_text(repo_path.clone()).await
3336 } else {
3337 None
3338 };
3339 let head_text = if current_head_text.is_some() {
3340 backend.load_committed_text(repo_path.clone()).await
3341 } else {
3342 None
3343 };
3344
3345 let change =
3346 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3347 (Some(current_index), Some(current_head)) => {
3348 let index_changed =
3349 index_text.as_ref() != current_index.as_deref();
3350 let head_changed =
3351 head_text.as_ref() != current_head.as_deref();
3352 if index_changed && head_changed {
3353 if index_text == head_text {
3354 Some(DiffBasesChange::SetBoth(head_text))
3355 } else {
3356 Some(DiffBasesChange::SetEach {
3357 index: index_text,
3358 head: head_text,
3359 })
3360 }
3361 } else if index_changed {
3362 Some(DiffBasesChange::SetIndex(index_text))
3363 } else if head_changed {
3364 Some(DiffBasesChange::SetHead(head_text))
3365 } else {
3366 None
3367 }
3368 }
3369 (Some(current_index), None) => {
3370 let index_changed =
3371 index_text.as_ref() != current_index.as_deref();
3372 index_changed
3373 .then_some(DiffBasesChange::SetIndex(index_text))
3374 }
3375 (None, Some(current_head)) => {
3376 let head_changed =
3377 head_text.as_ref() != current_head.as_deref();
3378 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3379 }
3380 (None, None) => None,
3381 };
3382
3383 changes.push((buffer.clone(), change))
3384 }
3385 changes
3386 })
3387 .await;
3388
3389 git_store.update(&mut cx, |git_store, cx| {
3390 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3391 let buffer_snapshot = buffer.read(cx).text_snapshot();
3392 let buffer_id = buffer_snapshot.remote_id();
3393 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3394 continue;
3395 };
3396
3397 let downstream_client = git_store.downstream_client();
3398 diff_state.update(cx, |diff_state, cx| {
3399 use proto::update_diff_bases::Mode;
3400
3401 if let Some((diff_bases_change, (client, project_id))) =
3402 diff_bases_change.clone().zip(downstream_client)
3403 {
3404 let (staged_text, committed_text, mode) = match diff_bases_change {
3405 DiffBasesChange::SetIndex(index) => {
3406 (index, None, Mode::IndexOnly)
3407 }
3408 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3409 DiffBasesChange::SetEach { index, head } => {
3410 (index, head, Mode::IndexAndHead)
3411 }
3412 DiffBasesChange::SetBoth(text) => {
3413 (None, text, Mode::IndexMatchesHead)
3414 }
3415 };
3416 client
3417 .send(proto::UpdateDiffBases {
3418 project_id: project_id.to_proto(),
3419 buffer_id: buffer_id.to_proto(),
3420 staged_text,
3421 committed_text,
3422 mode: mode as i32,
3423 })
3424 .log_err();
3425 }
3426
3427 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3428 });
3429 }
3430 })
3431 },
3432 );
3433 }
3434
3435 pub fn send_job<F, Fut, R>(
3436 &mut self,
3437 status: Option<SharedString>,
3438 job: F,
3439 ) -> oneshot::Receiver<R>
3440 where
3441 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3442 Fut: Future<Output = R> + 'static,
3443 R: Send + 'static,
3444 {
3445 self.send_keyed_job(None, status, job)
3446 }
3447
3448 fn send_keyed_job<F, Fut, R>(
3449 &mut self,
3450 key: Option<GitJobKey>,
3451 status: Option<SharedString>,
3452 job: F,
3453 ) -> oneshot::Receiver<R>
3454 where
3455 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3456 Fut: Future<Output = R> + 'static,
3457 R: Send + 'static,
3458 {
3459 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3460 let job_id = post_inc(&mut self.job_id);
3461 let this = self.this.clone();
3462 self.job_sender
3463 .unbounded_send(GitJob {
3464 key,
3465 job: Box::new(move |state, cx: &mut AsyncApp| {
3466 let job = job(state, cx.clone());
3467 cx.spawn(async move |cx| {
3468 if let Some(s) = status.clone() {
3469 this.update(cx, |this, cx| {
3470 this.active_jobs.insert(
3471 job_id,
3472 JobInfo {
3473 start: Instant::now(),
3474 message: s.clone(),
3475 },
3476 );
3477
3478 cx.notify();
3479 })
3480 .ok();
3481 }
3482 let result = job.await;
3483
3484 this.update(cx, |this, cx| {
3485 this.active_jobs.remove(&job_id);
3486 cx.notify();
3487 })
3488 .ok();
3489
3490 result_tx.send(result).ok();
3491 })
3492 }),
3493 })
3494 .ok();
3495 result_rx
3496 }
3497
3498 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3499 let Some(git_store) = self.git_store.upgrade() else {
3500 return;
3501 };
3502 let entity = cx.entity();
3503 git_store.update(cx, |git_store, cx| {
3504 let Some((&id, _)) = git_store
3505 .repositories
3506 .iter()
3507 .find(|(_, handle)| *handle == &entity)
3508 else {
3509 return;
3510 };
3511 git_store.active_repo_id = Some(id);
3512 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3513 });
3514 }
3515
3516 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3517 self.snapshot.status()
3518 }
3519
3520 pub fn cached_stash(&self) -> GitStash {
3521 self.snapshot.stash_entries.clone()
3522 }
3523
3524 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3525 let git_store = self.git_store.upgrade()?;
3526 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3527 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3528 let abs_path = SanitizedPath::new(&abs_path);
3529 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3530 Some(ProjectPath {
3531 worktree_id: worktree.read(cx).id(),
3532 path: relative_path,
3533 })
3534 }
3535
3536 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3537 let git_store = self.git_store.upgrade()?;
3538 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3539 let abs_path = worktree_store.absolutize(path, cx)?;
3540 self.snapshot.abs_path_to_repo_path(&abs_path)
3541 }
3542
3543 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3544 other
3545 .read(cx)
3546 .snapshot
3547 .work_directory_abs_path
3548 .starts_with(&self.snapshot.work_directory_abs_path)
3549 }
3550
3551 pub fn open_commit_buffer(
3552 &mut self,
3553 languages: Option<Arc<LanguageRegistry>>,
3554 buffer_store: Entity<BufferStore>,
3555 cx: &mut Context<Self>,
3556 ) -> Task<Result<Entity<Buffer>>> {
3557 let id = self.id;
3558 if let Some(buffer) = self.commit_message_buffer.clone() {
3559 return Task::ready(Ok(buffer));
3560 }
3561 let this = cx.weak_entity();
3562
3563 let rx = self.send_job(None, move |state, mut cx| async move {
3564 let Some(this) = this.upgrade() else {
3565 bail!("git store was dropped");
3566 };
3567 match state {
3568 RepositoryState::Local { .. } => {
3569 this.update(&mut cx, |_, cx| {
3570 Self::open_local_commit_buffer(languages, buffer_store, cx)
3571 })?
3572 .await
3573 }
3574 RepositoryState::Remote { project_id, client } => {
3575 let request = client.request(proto::OpenCommitMessageBuffer {
3576 project_id: project_id.0,
3577 repository_id: id.to_proto(),
3578 });
3579 let response = request.await.context("requesting to open commit buffer")?;
3580 let buffer_id = BufferId::new(response.buffer_id)?;
3581 let buffer = buffer_store
3582 .update(&mut cx, |buffer_store, cx| {
3583 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3584 })?
3585 .await?;
3586 if let Some(language_registry) = languages {
3587 let git_commit_language =
3588 language_registry.language_for_name("Git Commit").await?;
3589 buffer.update(&mut cx, |buffer, cx| {
3590 buffer.set_language(Some(git_commit_language), cx);
3591 })?;
3592 }
3593 this.update(&mut cx, |this, _| {
3594 this.commit_message_buffer = Some(buffer.clone());
3595 })?;
3596 Ok(buffer)
3597 }
3598 }
3599 });
3600
3601 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3602 }
3603
3604 fn open_local_commit_buffer(
3605 language_registry: Option<Arc<LanguageRegistry>>,
3606 buffer_store: Entity<BufferStore>,
3607 cx: &mut Context<Self>,
3608 ) -> Task<Result<Entity<Buffer>>> {
3609 cx.spawn(async move |repository, cx| {
3610 let buffer = buffer_store
3611 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3612 .await?;
3613
3614 if let Some(language_registry) = language_registry {
3615 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3616 buffer.update(cx, |buffer, cx| {
3617 buffer.set_language(Some(git_commit_language), cx);
3618 })?;
3619 }
3620
3621 repository.update(cx, |repository, _| {
3622 repository.commit_message_buffer = Some(buffer.clone());
3623 })?;
3624 Ok(buffer)
3625 })
3626 }
3627
3628 pub fn checkout_files(
3629 &mut self,
3630 commit: &str,
3631 paths: Vec<RepoPath>,
3632 _cx: &mut App,
3633 ) -> oneshot::Receiver<Result<()>> {
3634 let commit = commit.to_string();
3635 let id = self.id;
3636
3637 self.send_job(
3638 Some(format!("git checkout {}", commit).into()),
3639 move |git_repo, _| async move {
3640 match git_repo {
3641 RepositoryState::Local {
3642 backend,
3643 environment,
3644 ..
3645 } => {
3646 backend
3647 .checkout_files(commit, paths, environment.clone())
3648 .await
3649 }
3650 RepositoryState::Remote { project_id, client } => {
3651 client
3652 .request(proto::GitCheckoutFiles {
3653 project_id: project_id.0,
3654 repository_id: id.to_proto(),
3655 commit,
3656 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3657 })
3658 .await?;
3659
3660 Ok(())
3661 }
3662 }
3663 },
3664 )
3665 }
3666
3667 pub fn reset(
3668 &mut self,
3669 commit: String,
3670 reset_mode: ResetMode,
3671 _cx: &mut App,
3672 ) -> oneshot::Receiver<Result<()>> {
3673 let id = self.id;
3674
3675 self.send_job(None, move |git_repo, _| async move {
3676 match git_repo {
3677 RepositoryState::Local {
3678 backend,
3679 environment,
3680 ..
3681 } => backend.reset(commit, reset_mode, environment).await,
3682 RepositoryState::Remote { project_id, client } => {
3683 client
3684 .request(proto::GitReset {
3685 project_id: project_id.0,
3686 repository_id: id.to_proto(),
3687 commit,
3688 mode: match reset_mode {
3689 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3690 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3691 },
3692 })
3693 .await?;
3694
3695 Ok(())
3696 }
3697 }
3698 })
3699 }
3700
3701 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3702 let id = self.id;
3703 self.send_job(None, move |git_repo, _cx| async move {
3704 match git_repo {
3705 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3706 RepositoryState::Remote { project_id, client } => {
3707 let resp = client
3708 .request(proto::GitShow {
3709 project_id: project_id.0,
3710 repository_id: id.to_proto(),
3711 commit,
3712 })
3713 .await?;
3714
3715 Ok(CommitDetails {
3716 sha: resp.sha.into(),
3717 message: resp.message.into(),
3718 commit_timestamp: resp.commit_timestamp,
3719 author_email: resp.author_email.into(),
3720 author_name: resp.author_name.into(),
3721 })
3722 }
3723 }
3724 })
3725 }
3726
3727 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3728 let id = self.id;
3729 self.send_job(None, move |git_repo, cx| async move {
3730 match git_repo {
3731 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3732 RepositoryState::Remote {
3733 client, project_id, ..
3734 } => {
3735 let response = client
3736 .request(proto::LoadCommitDiff {
3737 project_id: project_id.0,
3738 repository_id: id.to_proto(),
3739 commit,
3740 })
3741 .await?;
3742 Ok(CommitDiff {
3743 files: response
3744 .files
3745 .into_iter()
3746 .map(|file| {
3747 Ok(CommitFile {
3748 path: RepoPath::from_proto(&file.path)?,
3749 old_text: file.old_text,
3750 new_text: file.new_text,
3751 })
3752 })
3753 .collect::<Result<Vec<_>>>()?,
3754 })
3755 }
3756 }
3757 })
3758 }
3759
3760 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3761 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3762 }
3763
3764 fn save_buffers<'a>(
3765 &self,
3766 entries: impl IntoIterator<Item = &'a RepoPath>,
3767 cx: &mut Context<Self>,
3768 ) -> Vec<Task<anyhow::Result<()>>> {
3769 let mut save_futures = Vec::new();
3770 if let Some(buffer_store) = self.buffer_store(cx) {
3771 buffer_store.update(cx, |buffer_store, cx| {
3772 for path in entries {
3773 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3774 continue;
3775 };
3776 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3777 && buffer
3778 .read(cx)
3779 .file()
3780 .is_some_and(|file| file.disk_state().exists())
3781 && buffer.read(cx).has_unsaved_edits()
3782 {
3783 save_futures.push(buffer_store.save_buffer(buffer, cx));
3784 }
3785 }
3786 })
3787 }
3788 save_futures
3789 }
3790
3791 pub fn stage_entries(
3792 &self,
3793 entries: Vec<RepoPath>,
3794 cx: &mut Context<Self>,
3795 ) -> Task<anyhow::Result<()>> {
3796 if entries.is_empty() {
3797 return Task::ready(Ok(()));
3798 }
3799 let id = self.id;
3800 let save_tasks = self.save_buffers(&entries, cx);
3801 let paths = entries
3802 .iter()
3803 .map(|p| p.as_unix_str())
3804 .collect::<Vec<_>>()
3805 .join(" ");
3806 let status = format!("git add {paths}");
3807 let job_key = match entries.len() {
3808 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3809 _ => None,
3810 };
3811
3812 cx.spawn(async move |this, cx| {
3813 for save_task in save_tasks {
3814 save_task.await?;
3815 }
3816
3817 this.update(cx, |this, _| {
3818 this.send_keyed_job(
3819 job_key,
3820 Some(status.into()),
3821 move |git_repo, _cx| async move {
3822 match git_repo {
3823 RepositoryState::Local {
3824 backend,
3825 environment,
3826 ..
3827 } => backend.stage_paths(entries, environment.clone()).await,
3828 RepositoryState::Remote { project_id, client } => {
3829 client
3830 .request(proto::Stage {
3831 project_id: project_id.0,
3832 repository_id: id.to_proto(),
3833 paths: entries
3834 .into_iter()
3835 .map(|repo_path| repo_path.to_proto())
3836 .collect(),
3837 })
3838 .await
3839 .context("sending stage request")?;
3840
3841 Ok(())
3842 }
3843 }
3844 },
3845 )
3846 })?
3847 .await??;
3848
3849 Ok(())
3850 })
3851 }
3852
3853 pub fn unstage_entries(
3854 &self,
3855 entries: Vec<RepoPath>,
3856 cx: &mut Context<Self>,
3857 ) -> Task<anyhow::Result<()>> {
3858 if entries.is_empty() {
3859 return Task::ready(Ok(()));
3860 }
3861 let id = self.id;
3862 let save_tasks = self.save_buffers(&entries, cx);
3863 let paths = entries
3864 .iter()
3865 .map(|p| p.as_unix_str())
3866 .collect::<Vec<_>>()
3867 .join(" ");
3868 let status = format!("git reset {paths}");
3869 let job_key = match entries.len() {
3870 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3871 _ => None,
3872 };
3873
3874 cx.spawn(async move |this, cx| {
3875 for save_task in save_tasks {
3876 save_task.await?;
3877 }
3878
3879 this.update(cx, |this, _| {
3880 this.send_keyed_job(
3881 job_key,
3882 Some(status.into()),
3883 move |git_repo, _cx| async move {
3884 match git_repo {
3885 RepositoryState::Local {
3886 backend,
3887 environment,
3888 ..
3889 } => backend.unstage_paths(entries, environment).await,
3890 RepositoryState::Remote { project_id, client } => {
3891 client
3892 .request(proto::Unstage {
3893 project_id: project_id.0,
3894 repository_id: id.to_proto(),
3895 paths: entries
3896 .into_iter()
3897 .map(|repo_path| repo_path.to_proto())
3898 .collect(),
3899 })
3900 .await
3901 .context("sending unstage request")?;
3902
3903 Ok(())
3904 }
3905 }
3906 },
3907 )
3908 })?
3909 .await??;
3910
3911 Ok(())
3912 })
3913 }
3914
3915 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3916 let to_stage = self
3917 .cached_status()
3918 .filter(|entry| !entry.status.staging().is_fully_staged())
3919 .map(|entry| entry.repo_path)
3920 .collect();
3921 self.stage_entries(to_stage, cx)
3922 }
3923
3924 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3925 let to_unstage = self
3926 .cached_status()
3927 .filter(|entry| entry.status.staging().has_staged())
3928 .map(|entry| entry.repo_path)
3929 .collect();
3930 self.unstage_entries(to_unstage, cx)
3931 }
3932
3933 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3934 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3935
3936 self.stash_entries(to_stash, cx)
3937 }
3938
3939 pub fn stash_entries(
3940 &mut self,
3941 entries: Vec<RepoPath>,
3942 cx: &mut Context<Self>,
3943 ) -> Task<anyhow::Result<()>> {
3944 let id = self.id;
3945
3946 cx.spawn(async move |this, cx| {
3947 this.update(cx, |this, _| {
3948 this.send_job(None, move |git_repo, _cx| async move {
3949 match git_repo {
3950 RepositoryState::Local {
3951 backend,
3952 environment,
3953 ..
3954 } => backend.stash_paths(entries, environment).await,
3955 RepositoryState::Remote { project_id, client } => {
3956 client
3957 .request(proto::Stash {
3958 project_id: project_id.0,
3959 repository_id: id.to_proto(),
3960 paths: entries
3961 .into_iter()
3962 .map(|repo_path| repo_path.to_proto())
3963 .collect(),
3964 })
3965 .await
3966 .context("sending stash request")?;
3967 Ok(())
3968 }
3969 }
3970 })
3971 })?
3972 .await??;
3973 Ok(())
3974 })
3975 }
3976
3977 pub fn stash_pop(
3978 &mut self,
3979 index: Option<usize>,
3980 cx: &mut Context<Self>,
3981 ) -> Task<anyhow::Result<()>> {
3982 let id = self.id;
3983 cx.spawn(async move |this, cx| {
3984 this.update(cx, |this, _| {
3985 this.send_job(None, move |git_repo, _cx| async move {
3986 match git_repo {
3987 RepositoryState::Local {
3988 backend,
3989 environment,
3990 ..
3991 } => backend.stash_pop(index, environment).await,
3992 RepositoryState::Remote { project_id, client } => {
3993 client
3994 .request(proto::StashPop {
3995 project_id: project_id.0,
3996 repository_id: id.to_proto(),
3997 stash_index: index.map(|i| i as u64),
3998 })
3999 .await
4000 .context("sending stash pop request")?;
4001 Ok(())
4002 }
4003 }
4004 })
4005 })?
4006 .await??;
4007 Ok(())
4008 })
4009 }
4010
4011 pub fn stash_apply(
4012 &mut self,
4013 index: Option<usize>,
4014 cx: &mut Context<Self>,
4015 ) -> Task<anyhow::Result<()>> {
4016 let id = self.id;
4017 cx.spawn(async move |this, cx| {
4018 this.update(cx, |this, _| {
4019 this.send_job(None, move |git_repo, _cx| async move {
4020 match git_repo {
4021 RepositoryState::Local {
4022 backend,
4023 environment,
4024 ..
4025 } => backend.stash_apply(index, environment).await,
4026 RepositoryState::Remote { project_id, client } => {
4027 client
4028 .request(proto::StashApply {
4029 project_id: project_id.0,
4030 repository_id: id.to_proto(),
4031 stash_index: index.map(|i| i as u64),
4032 })
4033 .await
4034 .context("sending stash apply request")?;
4035 Ok(())
4036 }
4037 }
4038 })
4039 })?
4040 .await??;
4041 Ok(())
4042 })
4043 }
4044
4045 pub fn stash_drop(
4046 &mut self,
4047 index: Option<usize>,
4048 cx: &mut Context<Self>,
4049 ) -> oneshot::Receiver<anyhow::Result<()>> {
4050 let id = self.id;
4051 let updates_tx = self
4052 .git_store()
4053 .and_then(|git_store| match &git_store.read(cx).state {
4054 GitStoreState::Local { downstream, .. } => downstream
4055 .as_ref()
4056 .map(|downstream| downstream.updates_tx.clone()),
4057 _ => None,
4058 });
4059 let this = cx.weak_entity();
4060 self.send_job(None, move |git_repo, mut cx| async move {
4061 match git_repo {
4062 RepositoryState::Local {
4063 backend,
4064 environment,
4065 ..
4066 } => {
4067 // TODO would be nice to not have to do this manually
4068 let result = backend.stash_drop(index, environment).await;
4069 if result.is_ok()
4070 && let Ok(stash_entries) = backend.stash_entries().await
4071 {
4072 let snapshot = this.update(&mut cx, |this, cx| {
4073 this.snapshot.stash_entries = stash_entries;
4074 cx.emit(RepositoryEvent::StashEntriesChanged);
4075 this.snapshot.clone()
4076 })?;
4077 if let Some(updates_tx) = updates_tx {
4078 updates_tx
4079 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4080 .ok();
4081 }
4082 }
4083
4084 result
4085 }
4086 RepositoryState::Remote { project_id, client } => {
4087 client
4088 .request(proto::StashDrop {
4089 project_id: project_id.0,
4090 repository_id: id.to_proto(),
4091 stash_index: index.map(|i| i as u64),
4092 })
4093 .await
4094 .context("sending stash pop request")?;
4095 Ok(())
4096 }
4097 }
4098 })
4099 }
4100
4101 pub fn commit(
4102 &mut self,
4103 message: SharedString,
4104 name_and_email: Option<(SharedString, SharedString)>,
4105 options: CommitOptions,
4106 _cx: &mut App,
4107 ) -> oneshot::Receiver<Result<()>> {
4108 let id = self.id;
4109
4110 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4111 match git_repo {
4112 RepositoryState::Local {
4113 backend,
4114 environment,
4115 ..
4116 } => {
4117 backend
4118 .commit(message, name_and_email, options, environment)
4119 .await
4120 }
4121 RepositoryState::Remote { project_id, client } => {
4122 let (name, email) = name_and_email.unzip();
4123 client
4124 .request(proto::Commit {
4125 project_id: project_id.0,
4126 repository_id: id.to_proto(),
4127 message: String::from(message),
4128 name: name.map(String::from),
4129 email: email.map(String::from),
4130 options: Some(proto::commit::CommitOptions {
4131 amend: options.amend,
4132 signoff: options.signoff,
4133 }),
4134 })
4135 .await
4136 .context("sending commit request")?;
4137
4138 Ok(())
4139 }
4140 }
4141 })
4142 }
4143
4144 pub fn fetch(
4145 &mut self,
4146 fetch_options: FetchOptions,
4147 askpass: AskPassDelegate,
4148 _cx: &mut App,
4149 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4150 let askpass_delegates = self.askpass_delegates.clone();
4151 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4152 let id = self.id;
4153
4154 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4155 match git_repo {
4156 RepositoryState::Local {
4157 backend,
4158 environment,
4159 ..
4160 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4161 RepositoryState::Remote { project_id, client } => {
4162 askpass_delegates.lock().insert(askpass_id, askpass);
4163 let _defer = util::defer(|| {
4164 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4165 debug_assert!(askpass_delegate.is_some());
4166 });
4167
4168 let response = client
4169 .request(proto::Fetch {
4170 project_id: project_id.0,
4171 repository_id: id.to_proto(),
4172 askpass_id,
4173 remote: fetch_options.to_proto(),
4174 })
4175 .await
4176 .context("sending fetch request")?;
4177
4178 Ok(RemoteCommandOutput {
4179 stdout: response.stdout,
4180 stderr: response.stderr,
4181 })
4182 }
4183 }
4184 })
4185 }
4186
4187 pub fn push(
4188 &mut self,
4189 branch: SharedString,
4190 remote: SharedString,
4191 options: Option<PushOptions>,
4192 askpass: AskPassDelegate,
4193 cx: &mut Context<Self>,
4194 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4195 let askpass_delegates = self.askpass_delegates.clone();
4196 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4197 let id = self.id;
4198
4199 let args = options
4200 .map(|option| match option {
4201 PushOptions::SetUpstream => " --set-upstream",
4202 PushOptions::Force => " --force-with-lease",
4203 })
4204 .unwrap_or("");
4205
4206 let updates_tx = self
4207 .git_store()
4208 .and_then(|git_store| match &git_store.read(cx).state {
4209 GitStoreState::Local { downstream, .. } => downstream
4210 .as_ref()
4211 .map(|downstream| downstream.updates_tx.clone()),
4212 _ => None,
4213 });
4214
4215 let this = cx.weak_entity();
4216 self.send_job(
4217 Some(format!("git push {} {} {}", args, remote, branch).into()),
4218 move |git_repo, mut cx| async move {
4219 match git_repo {
4220 RepositoryState::Local {
4221 backend,
4222 environment,
4223 ..
4224 } => {
4225 let result = backend
4226 .push(
4227 branch.to_string(),
4228 remote.to_string(),
4229 options,
4230 askpass,
4231 environment.clone(),
4232 cx.clone(),
4233 )
4234 .await;
4235 // TODO would be nice to not have to do this manually
4236 if result.is_ok() {
4237 let branches = backend.branches().await?;
4238 let branch = branches.into_iter().find(|branch| branch.is_head);
4239 log::info!("head branch after scan is {branch:?}");
4240 let snapshot = this.update(&mut cx, |this, cx| {
4241 this.snapshot.branch = branch;
4242 cx.emit(RepositoryEvent::BranchChanged);
4243 this.snapshot.clone()
4244 })?;
4245 if let Some(updates_tx) = updates_tx {
4246 updates_tx
4247 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4248 .ok();
4249 }
4250 }
4251 result
4252 }
4253 RepositoryState::Remote { project_id, client } => {
4254 askpass_delegates.lock().insert(askpass_id, askpass);
4255 let _defer = util::defer(|| {
4256 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4257 debug_assert!(askpass_delegate.is_some());
4258 });
4259 let response = client
4260 .request(proto::Push {
4261 project_id: project_id.0,
4262 repository_id: id.to_proto(),
4263 askpass_id,
4264 branch_name: branch.to_string(),
4265 remote_name: remote.to_string(),
4266 options: options.map(|options| match options {
4267 PushOptions::Force => proto::push::PushOptions::Force,
4268 PushOptions::SetUpstream => {
4269 proto::push::PushOptions::SetUpstream
4270 }
4271 }
4272 as i32),
4273 })
4274 .await
4275 .context("sending push request")?;
4276
4277 Ok(RemoteCommandOutput {
4278 stdout: response.stdout,
4279 stderr: response.stderr,
4280 })
4281 }
4282 }
4283 },
4284 )
4285 }
4286
4287 pub fn pull(
4288 &mut self,
4289 branch: SharedString,
4290 remote: SharedString,
4291 askpass: AskPassDelegate,
4292 _cx: &mut App,
4293 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4294 let askpass_delegates = self.askpass_delegates.clone();
4295 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4296 let id = self.id;
4297
4298 self.send_job(
4299 Some(format!("git pull {} {}", remote, branch).into()),
4300 move |git_repo, cx| async move {
4301 match git_repo {
4302 RepositoryState::Local {
4303 backend,
4304 environment,
4305 ..
4306 } => {
4307 backend
4308 .pull(
4309 branch.to_string(),
4310 remote.to_string(),
4311 askpass,
4312 environment.clone(),
4313 cx,
4314 )
4315 .await
4316 }
4317 RepositoryState::Remote { project_id, client } => {
4318 askpass_delegates.lock().insert(askpass_id, askpass);
4319 let _defer = util::defer(|| {
4320 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4321 debug_assert!(askpass_delegate.is_some());
4322 });
4323 let response = client
4324 .request(proto::Pull {
4325 project_id: project_id.0,
4326 repository_id: id.to_proto(),
4327 askpass_id,
4328 branch_name: branch.to_string(),
4329 remote_name: remote.to_string(),
4330 })
4331 .await
4332 .context("sending pull request")?;
4333
4334 Ok(RemoteCommandOutput {
4335 stdout: response.stdout,
4336 stderr: response.stderr,
4337 })
4338 }
4339 }
4340 },
4341 )
4342 }
4343
4344 fn spawn_set_index_text_job(
4345 &mut self,
4346 path: RepoPath,
4347 content: Option<String>,
4348 hunk_staging_operation_count: Option<usize>,
4349 cx: &mut Context<Self>,
4350 ) -> oneshot::Receiver<anyhow::Result<()>> {
4351 let id = self.id;
4352 let this = cx.weak_entity();
4353 let git_store = self.git_store.clone();
4354 self.send_keyed_job(
4355 Some(GitJobKey::WriteIndex(path.clone())),
4356 None,
4357 move |git_repo, mut cx| async move {
4358 log::debug!(
4359 "start updating index text for buffer {}",
4360 path.as_unix_str()
4361 );
4362 match git_repo {
4363 RepositoryState::Local {
4364 backend,
4365 environment,
4366 ..
4367 } => {
4368 backend
4369 .set_index_text(path.clone(), content, environment.clone())
4370 .await?;
4371 }
4372 RepositoryState::Remote { project_id, client } => {
4373 client
4374 .request(proto::SetIndexText {
4375 project_id: project_id.0,
4376 repository_id: id.to_proto(),
4377 path: path.to_proto(),
4378 text: content,
4379 })
4380 .await?;
4381 }
4382 }
4383 log::debug!(
4384 "finish updating index text for buffer {}",
4385 path.as_unix_str()
4386 );
4387
4388 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4389 let project_path = this
4390 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4391 .ok()
4392 .flatten();
4393 git_store.update(&mut cx, |git_store, cx| {
4394 let buffer_id = git_store
4395 .buffer_store
4396 .read(cx)
4397 .get_by_path(&project_path?)?
4398 .read(cx)
4399 .remote_id();
4400 let diff_state = git_store.diffs.get(&buffer_id)?;
4401 diff_state.update(cx, |diff_state, _| {
4402 diff_state.hunk_staging_operation_count_as_of_write =
4403 hunk_staging_operation_count;
4404 });
4405 Some(())
4406 })?;
4407 }
4408 Ok(())
4409 },
4410 )
4411 }
4412
4413 pub fn get_remotes(
4414 &mut self,
4415 branch_name: Option<String>,
4416 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4417 let id = self.id;
4418 self.send_job(None, move |repo, _cx| async move {
4419 match repo {
4420 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4421 RepositoryState::Remote { project_id, client } => {
4422 let response = client
4423 .request(proto::GetRemotes {
4424 project_id: project_id.0,
4425 repository_id: id.to_proto(),
4426 branch_name,
4427 })
4428 .await?;
4429
4430 let remotes = response
4431 .remotes
4432 .into_iter()
4433 .map(|remotes| git::repository::Remote {
4434 name: remotes.name.into(),
4435 })
4436 .collect();
4437
4438 Ok(remotes)
4439 }
4440 }
4441 })
4442 }
4443
4444 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4445 let id = self.id;
4446 self.send_job(None, move |repo, _| async move {
4447 match repo {
4448 RepositoryState::Local { backend, .. } => backend.branches().await,
4449 RepositoryState::Remote { project_id, client } => {
4450 let response = client
4451 .request(proto::GitGetBranches {
4452 project_id: project_id.0,
4453 repository_id: id.to_proto(),
4454 })
4455 .await?;
4456
4457 let branches = response
4458 .branches
4459 .into_iter()
4460 .map(|branch| proto_to_branch(&branch))
4461 .collect();
4462
4463 Ok(branches)
4464 }
4465 }
4466 })
4467 }
4468
4469 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4470 let id = self.id;
4471 self.send_job(None, move |repo, _| async move {
4472 match repo {
4473 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4474 RepositoryState::Remote { project_id, client } => {
4475 let response = client
4476 .request(proto::GitGetWorktrees {
4477 project_id: project_id.0,
4478 repository_id: id.to_proto(),
4479 })
4480 .await?;
4481
4482 let worktrees = response
4483 .worktrees
4484 .into_iter()
4485 .map(|worktree| proto_to_worktree(&worktree))
4486 .collect();
4487
4488 Ok(worktrees)
4489 }
4490 }
4491 })
4492 }
4493
4494 pub fn create_worktree(
4495 &mut self,
4496 name: String,
4497 path: PathBuf,
4498 commit: Option<String>,
4499 ) -> oneshot::Receiver<Result<()>> {
4500 let id = self.id;
4501 self.send_job(
4502 Some("git worktree add".into()),
4503 move |repo, _cx| async move {
4504 match repo {
4505 RepositoryState::Local { backend, .. } => {
4506 backend.create_worktree(name, path, commit).await
4507 }
4508 RepositoryState::Remote { project_id, client } => {
4509 client
4510 .request(proto::GitCreateWorktree {
4511 project_id: project_id.0,
4512 repository_id: id.to_proto(),
4513 name,
4514 directory: path.to_string_lossy().to_string(),
4515 commit,
4516 })
4517 .await?;
4518
4519 Ok(())
4520 }
4521 }
4522 },
4523 )
4524 }
4525
4526 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4527 let id = self.id;
4528 self.send_job(None, move |repo, _| async move {
4529 match repo {
4530 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4531 RepositoryState::Remote { project_id, client } => {
4532 let response = client
4533 .request(proto::GetDefaultBranch {
4534 project_id: project_id.0,
4535 repository_id: id.to_proto(),
4536 })
4537 .await?;
4538
4539 anyhow::Ok(response.branch.map(SharedString::from))
4540 }
4541 }
4542 })
4543 }
4544
4545 pub fn diff_tree(
4546 &mut self,
4547 diff_type: DiffTreeType,
4548 _cx: &App,
4549 ) -> oneshot::Receiver<Result<TreeDiff>> {
4550 let repository_id = self.snapshot.id;
4551 self.send_job(None, move |repo, _cx| async move {
4552 match repo {
4553 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4554 RepositoryState::Remote { client, project_id } => {
4555 let response = client
4556 .request(proto::GetTreeDiff {
4557 project_id: project_id.0,
4558 repository_id: repository_id.0,
4559 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4560 base: diff_type.base().to_string(),
4561 head: diff_type.head().to_string(),
4562 })
4563 .await?;
4564
4565 let entries = response
4566 .entries
4567 .into_iter()
4568 .filter_map(|entry| {
4569 let status = match entry.status() {
4570 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4571 proto::tree_diff_status::Status::Modified => {
4572 TreeDiffStatus::Modified {
4573 old: git::Oid::from_str(
4574 &entry.oid.context("missing oid").log_err()?,
4575 )
4576 .log_err()?,
4577 }
4578 }
4579 proto::tree_diff_status::Status::Deleted => {
4580 TreeDiffStatus::Deleted {
4581 old: git::Oid::from_str(
4582 &entry.oid.context("missing oid").log_err()?,
4583 )
4584 .log_err()?,
4585 }
4586 }
4587 };
4588 Some((
4589 RepoPath(RelPath::from_proto(&entry.path).log_err()?),
4590 status,
4591 ))
4592 })
4593 .collect();
4594
4595 Ok(TreeDiff { entries })
4596 }
4597 }
4598 })
4599 }
4600
4601 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4602 let id = self.id;
4603 self.send_job(None, move |repo, _cx| async move {
4604 match repo {
4605 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4606 RepositoryState::Remote { project_id, client } => {
4607 let response = client
4608 .request(proto::GitDiff {
4609 project_id: project_id.0,
4610 repository_id: id.to_proto(),
4611 diff_type: match diff_type {
4612 DiffType::HeadToIndex => {
4613 proto::git_diff::DiffType::HeadToIndex.into()
4614 }
4615 DiffType::HeadToWorktree => {
4616 proto::git_diff::DiffType::HeadToWorktree.into()
4617 }
4618 },
4619 })
4620 .await?;
4621
4622 Ok(response.diff)
4623 }
4624 }
4625 })
4626 }
4627
4628 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4629 let id = self.id;
4630 self.send_job(
4631 Some(format!("git switch -c {branch_name}").into()),
4632 move |repo, _cx| async move {
4633 match repo {
4634 RepositoryState::Local { backend, .. } => {
4635 backend.create_branch(branch_name).await
4636 }
4637 RepositoryState::Remote { project_id, client } => {
4638 client
4639 .request(proto::GitCreateBranch {
4640 project_id: project_id.0,
4641 repository_id: id.to_proto(),
4642 branch_name,
4643 })
4644 .await?;
4645
4646 Ok(())
4647 }
4648 }
4649 },
4650 )
4651 }
4652
4653 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4654 let id = self.id;
4655 self.send_job(
4656 Some(format!("git switch {branch_name}").into()),
4657 move |repo, _cx| async move {
4658 match repo {
4659 RepositoryState::Local { backend, .. } => {
4660 backend.change_branch(branch_name).await
4661 }
4662 RepositoryState::Remote { project_id, client } => {
4663 client
4664 .request(proto::GitChangeBranch {
4665 project_id: project_id.0,
4666 repository_id: id.to_proto(),
4667 branch_name,
4668 })
4669 .await?;
4670
4671 Ok(())
4672 }
4673 }
4674 },
4675 )
4676 }
4677
4678 pub fn rename_branch(
4679 &mut self,
4680 branch: String,
4681 new_name: String,
4682 ) -> oneshot::Receiver<Result<()>> {
4683 let id = self.id;
4684 self.send_job(
4685 Some(format!("git branch -m {branch} {new_name}").into()),
4686 move |repo, _cx| async move {
4687 match repo {
4688 RepositoryState::Local { backend, .. } => {
4689 backend.rename_branch(branch, new_name).await
4690 }
4691 RepositoryState::Remote { project_id, client } => {
4692 client
4693 .request(proto::GitRenameBranch {
4694 project_id: project_id.0,
4695 repository_id: id.to_proto(),
4696 branch,
4697 new_name,
4698 })
4699 .await?;
4700
4701 Ok(())
4702 }
4703 }
4704 },
4705 )
4706 }
4707
4708 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4709 let id = self.id;
4710 self.send_job(None, move |repo, _cx| async move {
4711 match repo {
4712 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4713 RepositoryState::Remote { project_id, client } => {
4714 let response = client
4715 .request(proto::CheckForPushedCommits {
4716 project_id: project_id.0,
4717 repository_id: id.to_proto(),
4718 })
4719 .await?;
4720
4721 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4722
4723 Ok(branches)
4724 }
4725 }
4726 })
4727 }
4728
4729 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4730 self.send_job(None, |repo, _cx| async move {
4731 match repo {
4732 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4733 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4734 }
4735 })
4736 }
4737
4738 pub fn restore_checkpoint(
4739 &mut self,
4740 checkpoint: GitRepositoryCheckpoint,
4741 ) -> oneshot::Receiver<Result<()>> {
4742 self.send_job(None, move |repo, _cx| async move {
4743 match repo {
4744 RepositoryState::Local { backend, .. } => {
4745 backend.restore_checkpoint(checkpoint).await
4746 }
4747 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4748 }
4749 })
4750 }
4751
4752 pub(crate) fn apply_remote_update(
4753 &mut self,
4754 update: proto::UpdateRepository,
4755 cx: &mut Context<Self>,
4756 ) -> Result<()> {
4757 let conflicted_paths = TreeSet::from_ordered_entries(
4758 update
4759 .current_merge_conflicts
4760 .into_iter()
4761 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4762 );
4763 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4764 let new_head_commit = update
4765 .head_commit_details
4766 .as_ref()
4767 .map(proto_to_commit_details);
4768 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4769 cx.emit(RepositoryEvent::BranchChanged)
4770 }
4771 self.snapshot.branch = new_branch;
4772 self.snapshot.head_commit = new_head_commit;
4773
4774 self.snapshot.merge.conflicted_paths = conflicted_paths;
4775 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4776 let new_stash_entries = GitStash {
4777 entries: update
4778 .stash_entries
4779 .iter()
4780 .filter_map(|entry| proto_to_stash(entry).ok())
4781 .collect(),
4782 };
4783 if self.snapshot.stash_entries != new_stash_entries {
4784 cx.emit(RepositoryEvent::StashEntriesChanged)
4785 }
4786 self.snapshot.stash_entries = new_stash_entries;
4787
4788 let edits = update
4789 .removed_statuses
4790 .into_iter()
4791 .filter_map(|path| {
4792 Some(sum_tree::Edit::Remove(PathKey(
4793 RelPath::from_proto(&path).log_err()?,
4794 )))
4795 })
4796 .chain(
4797 update
4798 .updated_statuses
4799 .into_iter()
4800 .filter_map(|updated_status| {
4801 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4802 }),
4803 )
4804 .collect::<Vec<_>>();
4805 if !edits.is_empty() {
4806 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4807 }
4808 self.snapshot.statuses_by_path.edit(edits, ());
4809 if update.is_last_update {
4810 self.snapshot.scan_id = update.scan_id;
4811 }
4812 Ok(())
4813 }
4814
4815 pub fn compare_checkpoints(
4816 &mut self,
4817 left: GitRepositoryCheckpoint,
4818 right: GitRepositoryCheckpoint,
4819 ) -> oneshot::Receiver<Result<bool>> {
4820 self.send_job(None, move |repo, _cx| async move {
4821 match repo {
4822 RepositoryState::Local { backend, .. } => {
4823 backend.compare_checkpoints(left, right).await
4824 }
4825 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4826 }
4827 })
4828 }
4829
4830 pub fn diff_checkpoints(
4831 &mut self,
4832 base_checkpoint: GitRepositoryCheckpoint,
4833 target_checkpoint: GitRepositoryCheckpoint,
4834 ) -> oneshot::Receiver<Result<String>> {
4835 self.send_job(None, move |repo, _cx| async move {
4836 match repo {
4837 RepositoryState::Local { backend, .. } => {
4838 backend
4839 .diff_checkpoints(base_checkpoint, target_checkpoint)
4840 .await
4841 }
4842 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4843 }
4844 })
4845 }
4846
4847 fn schedule_scan(
4848 &mut self,
4849 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4850 cx: &mut Context<Self>,
4851 ) {
4852 let this = cx.weak_entity();
4853 let _ = self.send_keyed_job(
4854 Some(GitJobKey::ReloadGitState),
4855 None,
4856 |state, mut cx| async move {
4857 log::debug!("run scheduled git status scan");
4858
4859 let Some(this) = this.upgrade() else {
4860 return Ok(());
4861 };
4862 let RepositoryState::Local { backend, .. } = state else {
4863 bail!("not a local repository")
4864 };
4865 let (snapshot, events) = this
4866 .update(&mut cx, |this, _| {
4867 this.paths_needing_status_update.clear();
4868 compute_snapshot(
4869 this.id,
4870 this.work_directory_abs_path.clone(),
4871 this.snapshot.clone(),
4872 backend.clone(),
4873 )
4874 })?
4875 .await?;
4876 this.update(&mut cx, |this, cx| {
4877 this.snapshot = snapshot.clone();
4878 for event in events {
4879 cx.emit(event);
4880 }
4881 })?;
4882 if let Some(updates_tx) = updates_tx {
4883 updates_tx
4884 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4885 .ok();
4886 }
4887 Ok(())
4888 },
4889 );
4890 }
4891
4892 fn spawn_local_git_worker(
4893 work_directory_abs_path: Arc<Path>,
4894 dot_git_abs_path: Arc<Path>,
4895 _repository_dir_abs_path: Arc<Path>,
4896 _common_dir_abs_path: Arc<Path>,
4897 project_environment: WeakEntity<ProjectEnvironment>,
4898 fs: Arc<dyn Fs>,
4899 cx: &mut Context<Self>,
4900 ) -> mpsc::UnboundedSender<GitJob> {
4901 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4902
4903 cx.spawn(async move |_, cx| {
4904 let environment = project_environment
4905 .upgrade()
4906 .context("missing project environment")?
4907 .update(cx, |project_environment, cx| {
4908 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
4909 })?
4910 .await
4911 .unwrap_or_else(|| {
4912 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4913 HashMap::default()
4914 });
4915 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4916 let backend = cx
4917 .background_spawn(async move {
4918 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4919 .or_else(|| which::which("git").ok());
4920 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4921 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4922 })
4923 .await?;
4924
4925 if let Some(git_hosting_provider_registry) =
4926 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4927 {
4928 git_hosting_providers::register_additional_providers(
4929 git_hosting_provider_registry,
4930 backend.clone(),
4931 );
4932 }
4933
4934 let state = RepositoryState::Local {
4935 backend,
4936 environment: Arc::new(environment),
4937 };
4938 let mut jobs = VecDeque::new();
4939 loop {
4940 while let Ok(Some(next_job)) = job_rx.try_next() {
4941 jobs.push_back(next_job);
4942 }
4943
4944 if let Some(job) = jobs.pop_front() {
4945 if let Some(current_key) = &job.key
4946 && jobs
4947 .iter()
4948 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4949 {
4950 continue;
4951 }
4952 (job.job)(state.clone(), cx).await;
4953 } else if let Some(job) = job_rx.next().await {
4954 jobs.push_back(job);
4955 } else {
4956 break;
4957 }
4958 }
4959 anyhow::Ok(())
4960 })
4961 .detach_and_log_err(cx);
4962
4963 job_tx
4964 }
4965
4966 fn spawn_remote_git_worker(
4967 project_id: ProjectId,
4968 client: AnyProtoClient,
4969 cx: &mut Context<Self>,
4970 ) -> mpsc::UnboundedSender<GitJob> {
4971 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4972
4973 cx.spawn(async move |_, cx| {
4974 let state = RepositoryState::Remote { project_id, client };
4975 let mut jobs = VecDeque::new();
4976 loop {
4977 while let Ok(Some(next_job)) = job_rx.try_next() {
4978 jobs.push_back(next_job);
4979 }
4980
4981 if let Some(job) = jobs.pop_front() {
4982 if let Some(current_key) = &job.key
4983 && jobs
4984 .iter()
4985 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4986 {
4987 continue;
4988 }
4989 (job.job)(state.clone(), cx).await;
4990 } else if let Some(job) = job_rx.next().await {
4991 jobs.push_back(job);
4992 } else {
4993 break;
4994 }
4995 }
4996 anyhow::Ok(())
4997 })
4998 .detach_and_log_err(cx);
4999
5000 job_tx
5001 }
5002
5003 fn load_staged_text(
5004 &mut self,
5005 buffer_id: BufferId,
5006 repo_path: RepoPath,
5007 cx: &App,
5008 ) -> Task<Result<Option<String>>> {
5009 let rx = self.send_job(None, move |state, _| async move {
5010 match state {
5011 RepositoryState::Local { backend, .. } => {
5012 anyhow::Ok(backend.load_index_text(repo_path).await)
5013 }
5014 RepositoryState::Remote { project_id, client } => {
5015 let response = client
5016 .request(proto::OpenUnstagedDiff {
5017 project_id: project_id.to_proto(),
5018 buffer_id: buffer_id.to_proto(),
5019 })
5020 .await?;
5021 Ok(response.staged_text)
5022 }
5023 }
5024 });
5025 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5026 }
5027
5028 fn load_committed_text(
5029 &mut self,
5030 buffer_id: BufferId,
5031 repo_path: RepoPath,
5032 cx: &App,
5033 ) -> Task<Result<DiffBasesChange>> {
5034 let rx = self.send_job(None, move |state, _| async move {
5035 match state {
5036 RepositoryState::Local { backend, .. } => {
5037 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5038 let staged_text = backend.load_index_text(repo_path).await;
5039 let diff_bases_change = if committed_text == staged_text {
5040 DiffBasesChange::SetBoth(committed_text)
5041 } else {
5042 DiffBasesChange::SetEach {
5043 index: staged_text,
5044 head: committed_text,
5045 }
5046 };
5047 anyhow::Ok(diff_bases_change)
5048 }
5049 RepositoryState::Remote { project_id, client } => {
5050 use proto::open_uncommitted_diff_response::Mode;
5051
5052 let response = client
5053 .request(proto::OpenUncommittedDiff {
5054 project_id: project_id.to_proto(),
5055 buffer_id: buffer_id.to_proto(),
5056 })
5057 .await?;
5058 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5059 let bases = match mode {
5060 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5061 Mode::IndexAndHead => DiffBasesChange::SetEach {
5062 head: response.committed_text,
5063 index: response.staged_text,
5064 },
5065 };
5066 Ok(bases)
5067 }
5068 }
5069 });
5070
5071 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5072 }
5073 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5074 let repository_id = self.snapshot.id;
5075 let rx = self.send_job(None, move |state, _| async move {
5076 match state {
5077 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5078 RepositoryState::Remote { client, project_id } => {
5079 let response = client
5080 .request(proto::GetBlobContent {
5081 project_id: project_id.to_proto(),
5082 repository_id: repository_id.0,
5083 oid: oid.to_string(),
5084 })
5085 .await?;
5086 Ok(response.content)
5087 }
5088 }
5089 });
5090 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5091 }
5092
5093 fn paths_changed(
5094 &mut self,
5095 paths: Vec<RepoPath>,
5096 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5097 cx: &mut Context<Self>,
5098 ) {
5099 self.paths_needing_status_update.extend(paths);
5100
5101 let this = cx.weak_entity();
5102 let _ = self.send_keyed_job(
5103 Some(GitJobKey::RefreshStatuses),
5104 None,
5105 |state, mut cx| async move {
5106 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5107 (
5108 this.snapshot.clone(),
5109 mem::take(&mut this.paths_needing_status_update),
5110 )
5111 })?;
5112 let RepositoryState::Local { backend, .. } = state else {
5113 bail!("not a local repository")
5114 };
5115
5116 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5117 if paths.is_empty() {
5118 return Ok(());
5119 }
5120 let statuses = backend.status(&paths).await?;
5121 let stash_entries = backend.stash_entries().await?;
5122
5123 let changed_path_statuses = cx
5124 .background_spawn(async move {
5125 let mut changed_path_statuses = Vec::new();
5126 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5127 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5128
5129 for (repo_path, status) in &*statuses.entries {
5130 changed_paths.remove(repo_path);
5131 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5132 && cursor.item().is_some_and(|entry| entry.status == *status)
5133 {
5134 continue;
5135 }
5136
5137 changed_path_statuses.push(Edit::Insert(StatusEntry {
5138 repo_path: repo_path.clone(),
5139 status: *status,
5140 }));
5141 }
5142 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5143 for path in changed_paths.into_iter() {
5144 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5145 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5146 }
5147 }
5148 changed_path_statuses
5149 })
5150 .await;
5151
5152 this.update(&mut cx, |this, cx| {
5153 if this.snapshot.stash_entries != stash_entries {
5154 cx.emit(RepositoryEvent::StashEntriesChanged);
5155 this.snapshot.stash_entries = stash_entries;
5156 }
5157
5158 if !changed_path_statuses.is_empty() {
5159 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5160 this.snapshot
5161 .statuses_by_path
5162 .edit(changed_path_statuses, ());
5163 this.snapshot.scan_id += 1;
5164 }
5165
5166 if let Some(updates_tx) = updates_tx {
5167 updates_tx
5168 .unbounded_send(DownstreamUpdate::UpdateRepository(
5169 this.snapshot.clone(),
5170 ))
5171 .ok();
5172 }
5173 })
5174 },
5175 );
5176 }
5177
5178 /// currently running git command and when it started
5179 pub fn current_job(&self) -> Option<JobInfo> {
5180 self.active_jobs.values().next().cloned()
5181 }
5182
5183 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5184 self.send_job(None, |_, _| async {})
5185 }
5186}
5187
5188fn get_permalink_in_rust_registry_src(
5189 provider_registry: Arc<GitHostingProviderRegistry>,
5190 path: PathBuf,
5191 selection: Range<u32>,
5192) -> Result<url::Url> {
5193 #[derive(Deserialize)]
5194 struct CargoVcsGit {
5195 sha1: String,
5196 }
5197
5198 #[derive(Deserialize)]
5199 struct CargoVcsInfo {
5200 git: CargoVcsGit,
5201 path_in_vcs: String,
5202 }
5203
5204 #[derive(Deserialize)]
5205 struct CargoPackage {
5206 repository: String,
5207 }
5208
5209 #[derive(Deserialize)]
5210 struct CargoToml {
5211 package: CargoPackage,
5212 }
5213
5214 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5215 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5216 Some((dir, json))
5217 }) else {
5218 bail!("No .cargo_vcs_info.json found in parent directories")
5219 };
5220 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5221 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5222 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5223 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5224 .context("parsing package.repository field of manifest")?;
5225 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5226 let permalink = provider.build_permalink(
5227 remote,
5228 BuildPermalinkParams::new(
5229 &cargo_vcs_info.git.sha1,
5230 &RepoPath(
5231 RelPath::new(&path, PathStyle::local())
5232 .context("invalid path")?
5233 .into_arc(),
5234 ),
5235 Some(selection),
5236 ),
5237 );
5238 Ok(permalink)
5239}
5240
5241fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5242 let Some(blame) = blame else {
5243 return proto::BlameBufferResponse {
5244 blame_response: None,
5245 };
5246 };
5247
5248 let entries = blame
5249 .entries
5250 .into_iter()
5251 .map(|entry| proto::BlameEntry {
5252 sha: entry.sha.as_bytes().into(),
5253 start_line: entry.range.start,
5254 end_line: entry.range.end,
5255 original_line_number: entry.original_line_number,
5256 author: entry.author,
5257 author_mail: entry.author_mail,
5258 author_time: entry.author_time,
5259 author_tz: entry.author_tz,
5260 committer: entry.committer_name,
5261 committer_mail: entry.committer_email,
5262 committer_time: entry.committer_time,
5263 committer_tz: entry.committer_tz,
5264 summary: entry.summary,
5265 previous: entry.previous,
5266 filename: entry.filename,
5267 })
5268 .collect::<Vec<_>>();
5269
5270 let messages = blame
5271 .messages
5272 .into_iter()
5273 .map(|(oid, message)| proto::CommitMessage {
5274 oid: oid.as_bytes().into(),
5275 message,
5276 })
5277 .collect::<Vec<_>>();
5278
5279 proto::BlameBufferResponse {
5280 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5281 entries,
5282 messages,
5283 remote_url: blame.remote_url,
5284 }),
5285 }
5286}
5287
5288fn deserialize_blame_buffer_response(
5289 response: proto::BlameBufferResponse,
5290) -> Option<git::blame::Blame> {
5291 let response = response.blame_response?;
5292 let entries = response
5293 .entries
5294 .into_iter()
5295 .filter_map(|entry| {
5296 Some(git::blame::BlameEntry {
5297 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5298 range: entry.start_line..entry.end_line,
5299 original_line_number: entry.original_line_number,
5300 committer_name: entry.committer,
5301 committer_time: entry.committer_time,
5302 committer_tz: entry.committer_tz,
5303 committer_email: entry.committer_mail,
5304 author: entry.author,
5305 author_mail: entry.author_mail,
5306 author_time: entry.author_time,
5307 author_tz: entry.author_tz,
5308 summary: entry.summary,
5309 previous: entry.previous,
5310 filename: entry.filename,
5311 })
5312 })
5313 .collect::<Vec<_>>();
5314
5315 let messages = response
5316 .messages
5317 .into_iter()
5318 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5319 .collect::<HashMap<_, _>>();
5320
5321 Some(Blame {
5322 entries,
5323 messages,
5324 remote_url: response.remote_url,
5325 })
5326}
5327
5328fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5329 proto::Branch {
5330 is_head: branch.is_head,
5331 ref_name: branch.ref_name.to_string(),
5332 unix_timestamp: branch
5333 .most_recent_commit
5334 .as_ref()
5335 .map(|commit| commit.commit_timestamp as u64),
5336 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5337 ref_name: upstream.ref_name.to_string(),
5338 tracking: upstream
5339 .tracking
5340 .status()
5341 .map(|upstream| proto::UpstreamTracking {
5342 ahead: upstream.ahead as u64,
5343 behind: upstream.behind as u64,
5344 }),
5345 }),
5346 most_recent_commit: branch
5347 .most_recent_commit
5348 .as_ref()
5349 .map(|commit| proto::CommitSummary {
5350 sha: commit.sha.to_string(),
5351 subject: commit.subject.to_string(),
5352 commit_timestamp: commit.commit_timestamp,
5353 author_name: commit.author_name.to_string(),
5354 }),
5355 }
5356}
5357
5358fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5359 proto::Worktree {
5360 path: worktree.path.to_string_lossy().to_string(),
5361 ref_name: worktree.ref_name.to_string(),
5362 sha: worktree.sha.to_string(),
5363 }
5364}
5365
5366fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5367 git::repository::Worktree {
5368 path: PathBuf::from(proto.path.clone()),
5369 ref_name: proto.ref_name.clone().into(),
5370 sha: proto.sha.clone().into(),
5371 }
5372}
5373
5374fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5375 git::repository::Branch {
5376 is_head: proto.is_head,
5377 ref_name: proto.ref_name.clone().into(),
5378 upstream: proto
5379 .upstream
5380 .as_ref()
5381 .map(|upstream| git::repository::Upstream {
5382 ref_name: upstream.ref_name.to_string().into(),
5383 tracking: upstream
5384 .tracking
5385 .as_ref()
5386 .map(|tracking| {
5387 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5388 ahead: tracking.ahead as u32,
5389 behind: tracking.behind as u32,
5390 })
5391 })
5392 .unwrap_or(git::repository::UpstreamTracking::Gone),
5393 }),
5394 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5395 git::repository::CommitSummary {
5396 sha: commit.sha.to_string().into(),
5397 subject: commit.subject.to_string().into(),
5398 commit_timestamp: commit.commit_timestamp,
5399 author_name: commit.author_name.to_string().into(),
5400 has_parent: true,
5401 }
5402 }),
5403 }
5404}
5405
5406fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5407 proto::GitCommitDetails {
5408 sha: commit.sha.to_string(),
5409 message: commit.message.to_string(),
5410 commit_timestamp: commit.commit_timestamp,
5411 author_email: commit.author_email.to_string(),
5412 author_name: commit.author_name.to_string(),
5413 }
5414}
5415
5416fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5417 CommitDetails {
5418 sha: proto.sha.clone().into(),
5419 message: proto.message.clone().into(),
5420 commit_timestamp: proto.commit_timestamp,
5421 author_email: proto.author_email.clone().into(),
5422 author_name: proto.author_name.clone().into(),
5423 }
5424}
5425
5426async fn compute_snapshot(
5427 id: RepositoryId,
5428 work_directory_abs_path: Arc<Path>,
5429 prev_snapshot: RepositorySnapshot,
5430 backend: Arc<dyn GitRepository>,
5431) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5432 let mut events = Vec::new();
5433 let branches = backend.branches().await?;
5434 let branch = branches.into_iter().find(|branch| branch.is_head);
5435 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5436 let stash_entries = backend.stash_entries().await?;
5437 let statuses_by_path = SumTree::from_iter(
5438 statuses
5439 .entries
5440 .iter()
5441 .map(|(repo_path, status)| StatusEntry {
5442 repo_path: repo_path.clone(),
5443 status: *status,
5444 }),
5445 (),
5446 );
5447 let (merge_details, merge_heads_changed) =
5448 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5449 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5450
5451 if merge_heads_changed {
5452 events.push(RepositoryEvent::MergeHeadsChanged);
5453 }
5454
5455 if statuses_by_path != prev_snapshot.statuses_by_path {
5456 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5457 }
5458
5459 // Useful when branch is None in detached head state
5460 let head_commit = match backend.head_sha().await {
5461 Some(head_sha) => backend.show(head_sha).await.log_err(),
5462 None => None,
5463 };
5464
5465 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5466 events.push(RepositoryEvent::BranchChanged);
5467 }
5468
5469 // Used by edit prediction data collection
5470 let remote_origin_url = backend.remote_url("origin");
5471 let remote_upstream_url = backend.remote_url("upstream");
5472
5473 let snapshot = RepositorySnapshot {
5474 id,
5475 statuses_by_path,
5476 work_directory_abs_path,
5477 path_style: prev_snapshot.path_style,
5478 scan_id: prev_snapshot.scan_id + 1,
5479 branch,
5480 head_commit,
5481 merge: merge_details,
5482 remote_origin_url,
5483 remote_upstream_url,
5484 stash_entries,
5485 };
5486
5487 Ok((snapshot, events))
5488}
5489
5490fn status_from_proto(
5491 simple_status: i32,
5492 status: Option<proto::GitFileStatus>,
5493) -> anyhow::Result<FileStatus> {
5494 use proto::git_file_status::Variant;
5495
5496 let Some(variant) = status.and_then(|status| status.variant) else {
5497 let code = proto::GitStatus::from_i32(simple_status)
5498 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5499 let result = match code {
5500 proto::GitStatus::Added => TrackedStatus {
5501 worktree_status: StatusCode::Added,
5502 index_status: StatusCode::Unmodified,
5503 }
5504 .into(),
5505 proto::GitStatus::Modified => TrackedStatus {
5506 worktree_status: StatusCode::Modified,
5507 index_status: StatusCode::Unmodified,
5508 }
5509 .into(),
5510 proto::GitStatus::Conflict => UnmergedStatus {
5511 first_head: UnmergedStatusCode::Updated,
5512 second_head: UnmergedStatusCode::Updated,
5513 }
5514 .into(),
5515 proto::GitStatus::Deleted => TrackedStatus {
5516 worktree_status: StatusCode::Deleted,
5517 index_status: StatusCode::Unmodified,
5518 }
5519 .into(),
5520 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5521 };
5522 return Ok(result);
5523 };
5524
5525 let result = match variant {
5526 Variant::Untracked(_) => FileStatus::Untracked,
5527 Variant::Ignored(_) => FileStatus::Ignored,
5528 Variant::Unmerged(unmerged) => {
5529 let [first_head, second_head] =
5530 [unmerged.first_head, unmerged.second_head].map(|head| {
5531 let code = proto::GitStatus::from_i32(head)
5532 .with_context(|| format!("Invalid git status code: {head}"))?;
5533 let result = match code {
5534 proto::GitStatus::Added => UnmergedStatusCode::Added,
5535 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5536 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5537 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5538 };
5539 Ok(result)
5540 });
5541 let [first_head, second_head] = [first_head?, second_head?];
5542 UnmergedStatus {
5543 first_head,
5544 second_head,
5545 }
5546 .into()
5547 }
5548 Variant::Tracked(tracked) => {
5549 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5550 .map(|status| {
5551 let code = proto::GitStatus::from_i32(status)
5552 .with_context(|| format!("Invalid git status code: {status}"))?;
5553 let result = match code {
5554 proto::GitStatus::Modified => StatusCode::Modified,
5555 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5556 proto::GitStatus::Added => StatusCode::Added,
5557 proto::GitStatus::Deleted => StatusCode::Deleted,
5558 proto::GitStatus::Renamed => StatusCode::Renamed,
5559 proto::GitStatus::Copied => StatusCode::Copied,
5560 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5561 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5562 };
5563 Ok(result)
5564 });
5565 let [index_status, worktree_status] = [index_status?, worktree_status?];
5566 TrackedStatus {
5567 index_status,
5568 worktree_status,
5569 }
5570 .into()
5571 }
5572 };
5573 Ok(result)
5574}
5575
5576fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5577 use proto::git_file_status::{Tracked, Unmerged, Variant};
5578
5579 let variant = match status {
5580 FileStatus::Untracked => Variant::Untracked(Default::default()),
5581 FileStatus::Ignored => Variant::Ignored(Default::default()),
5582 FileStatus::Unmerged(UnmergedStatus {
5583 first_head,
5584 second_head,
5585 }) => Variant::Unmerged(Unmerged {
5586 first_head: unmerged_status_to_proto(first_head),
5587 second_head: unmerged_status_to_proto(second_head),
5588 }),
5589 FileStatus::Tracked(TrackedStatus {
5590 index_status,
5591 worktree_status,
5592 }) => Variant::Tracked(Tracked {
5593 index_status: tracked_status_to_proto(index_status),
5594 worktree_status: tracked_status_to_proto(worktree_status),
5595 }),
5596 };
5597 proto::GitFileStatus {
5598 variant: Some(variant),
5599 }
5600}
5601
5602fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5603 match code {
5604 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5605 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5606 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5607 }
5608}
5609
5610fn tracked_status_to_proto(code: StatusCode) -> i32 {
5611 match code {
5612 StatusCode::Added => proto::GitStatus::Added as _,
5613 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5614 StatusCode::Modified => proto::GitStatus::Modified as _,
5615 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5616 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5617 StatusCode::Copied => proto::GitStatus::Copied as _,
5618 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5619 }
5620}