1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4
5use crate::{
6 ProjectEnvironment, ProjectItem, ProjectPath,
7 buffer_store::{BufferStore, BufferStoreEvent},
8 worktree_store::{WorktreeStore, WorktreeStoreEvent},
9};
10use anyhow::{Context as _, Result, anyhow, bail};
11use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
12use buffer_diff::{BufferDiff, BufferDiffEvent};
13use client::ProjectId;
14use collections::HashMap;
15pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
16use fs::Fs;
17use futures::{
18 FutureExt, StreamExt,
19 channel::{mpsc, oneshot},
20 future::{self, Shared},
21 stream::FuturesOrdered,
22};
23use git::{
24 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
25 blame::Blame,
26 parse_git_remote_url,
27 repository::{
28 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
29 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
30 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
31 },
32 stash::{GitStash, StashEntry},
33 status::{
34 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
35 UnmergedStatus, UnmergedStatusCode,
36 },
37};
38use gpui::{
39 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
40 WeakEntity,
41};
42use language::{
43 Buffer, BufferEvent, Language, LanguageRegistry,
44 proto::{deserialize_version, serialize_version},
45};
46use parking_lot::Mutex;
47use postage::stream::Stream as _;
48use rpc::{
49 AnyProtoClient, TypedEnvelope,
50 proto::{self, git_reset, split_repository_update},
51};
52use serde::Deserialize;
53use std::{
54 cmp::Ordering,
55 collections::{BTreeSet, VecDeque},
56 future::Future,
57 mem,
58 ops::Range,
59 path::{Path, PathBuf},
60 str::FromStr,
61 sync::{
62 Arc,
63 atomic::{self, AtomicU64},
64 },
65 time::Instant,
66};
67use sum_tree::{Edit, SumTree, TreeSet};
68use task::Shell;
69use text::{Bias, BufferId};
70use util::{
71 ResultExt, debug_panic,
72 paths::{PathStyle, SanitizedPath},
73 post_inc,
74 rel_path::RelPath,
75};
76use worktree::{
77 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
78 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
79};
80use zeroize::Zeroize;
81
82pub struct GitStore {
83 state: GitStoreState,
84 buffer_store: Entity<BufferStore>,
85 worktree_store: Entity<WorktreeStore>,
86 repositories: HashMap<RepositoryId, Entity<Repository>>,
87 active_repo_id: Option<RepositoryId>,
88 #[allow(clippy::type_complexity)]
89 loading_diffs:
90 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
91 diffs: HashMap<BufferId, Entity<BufferGitState>>,
92 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
93 _subscriptions: Vec<Subscription>,
94}
95
96#[derive(Default)]
97struct SharedDiffs {
98 unstaged: Option<Entity<BufferDiff>>,
99 uncommitted: Option<Entity<BufferDiff>>,
100}
101
102struct BufferGitState {
103 unstaged_diff: Option<WeakEntity<BufferDiff>>,
104 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
105 conflict_set: Option<WeakEntity<ConflictSet>>,
106 recalculate_diff_task: Option<Task<Result<()>>>,
107 reparse_conflict_markers_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 language_registry: Option<Arc<LanguageRegistry>>,
110 conflict_updated_futures: Vec<oneshot::Sender<()>>,
111 recalculating_tx: postage::watch::Sender<bool>,
112
113 /// These operation counts are used to ensure that head and index text
114 /// values read from the git repository are up-to-date with any hunk staging
115 /// operations that have been performed on the BufferDiff.
116 ///
117 /// The operation count is incremented immediately when the user initiates a
118 /// hunk stage/unstage operation. Then, upon finishing writing the new index
119 /// text do disk, the `operation count as of write` is updated to reflect
120 /// the operation count that prompted the write.
121 hunk_staging_operation_count: usize,
122 hunk_staging_operation_count_as_of_write: usize,
123
124 head_text: Option<Arc<String>>,
125 index_text: Option<Arc<String>>,
126 head_changed: bool,
127 index_changed: bool,
128 language_changed: bool,
129}
130
131#[derive(Clone, Debug)]
132enum DiffBasesChange {
133 SetIndex(Option<String>),
134 SetHead(Option<String>),
135 SetEach {
136 index: Option<String>,
137 head: Option<String>,
138 },
139 SetBoth(Option<String>),
140}
141
142#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
143enum DiffKind {
144 Unstaged,
145 Uncommitted,
146}
147
148enum GitStoreState {
149 Local {
150 next_repository_id: Arc<AtomicU64>,
151 downstream: Option<LocalDownstreamState>,
152 project_environment: Entity<ProjectEnvironment>,
153 fs: Arc<dyn Fs>,
154 },
155 Remote {
156 upstream_client: AnyProtoClient,
157 upstream_project_id: u64,
158 downstream: Option<(AnyProtoClient, ProjectId)>,
159 },
160}
161
162enum DownstreamUpdate {
163 UpdateRepository(RepositorySnapshot),
164 RemoveRepository(RepositoryId),
165}
166
167struct LocalDownstreamState {
168 client: AnyProtoClient,
169 project_id: ProjectId,
170 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
171 _task: Task<Result<()>>,
172}
173
174#[derive(Clone, Debug)]
175pub struct GitStoreCheckpoint {
176 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
177}
178
179#[derive(Clone, Debug, PartialEq, Eq)]
180pub struct StatusEntry {
181 pub repo_path: RepoPath,
182 pub status: FileStatus,
183}
184
185impl StatusEntry {
186 fn to_proto(&self) -> proto::StatusEntry {
187 let simple_status = match self.status {
188 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
189 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
190 FileStatus::Tracked(TrackedStatus {
191 index_status,
192 worktree_status,
193 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
194 worktree_status
195 } else {
196 index_status
197 }),
198 };
199
200 proto::StatusEntry {
201 repo_path: self.repo_path.to_proto(),
202 simple_status,
203 status: Some(status_to_proto(self.status)),
204 }
205 }
206}
207
208impl TryFrom<proto::StatusEntry> for StatusEntry {
209 type Error = anyhow::Error;
210
211 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
212 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
213 let status = status_from_proto(value.simple_status, value.status)?;
214 Ok(Self { repo_path, status })
215 }
216}
217
218impl sum_tree::Item for StatusEntry {
219 type Summary = PathSummary<GitSummary>;
220
221 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
222 PathSummary {
223 max_path: self.repo_path.0.clone(),
224 item_summary: self.status.summary(),
225 }
226 }
227}
228
229impl sum_tree::KeyedItem for StatusEntry {
230 type Key = PathKey;
231
232 fn key(&self) -> Self::Key {
233 PathKey(self.repo_path.0.clone())
234 }
235}
236
237#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
238pub struct RepositoryId(pub u64);
239
240#[derive(Clone, Debug, Default, PartialEq, Eq)]
241pub struct MergeDetails {
242 pub conflicted_paths: TreeSet<RepoPath>,
243 pub message: Option<SharedString>,
244 pub heads: Vec<Option<SharedString>>,
245}
246
247#[derive(Clone, Debug, PartialEq, Eq)]
248pub struct RepositorySnapshot {
249 pub id: RepositoryId,
250 pub statuses_by_path: SumTree<StatusEntry>,
251 pub work_directory_abs_path: Arc<Path>,
252 pub path_style: PathStyle,
253 pub branch: Option<Branch>,
254 pub head_commit: Option<CommitDetails>,
255 pub scan_id: u64,
256 pub merge: MergeDetails,
257 pub remote_origin_url: Option<String>,
258 pub remote_upstream_url: Option<String>,
259 pub stash_entries: GitStash,
260}
261
262type JobId = u64;
263
264#[derive(Clone, Debug, PartialEq, Eq)]
265pub struct JobInfo {
266 pub start: Instant,
267 pub message: SharedString,
268}
269
270pub struct Repository {
271 this: WeakEntity<Self>,
272 snapshot: RepositorySnapshot,
273 commit_message_buffer: Option<Entity<Buffer>>,
274 git_store: WeakEntity<GitStore>,
275 // For a local repository, holds paths that have had worktree events since the last status scan completed,
276 // and that should be examined during the next status scan.
277 paths_needing_status_update: BTreeSet<RepoPath>,
278 job_sender: mpsc::UnboundedSender<GitJob>,
279 active_jobs: HashMap<JobId, JobInfo>,
280 job_id: JobId,
281 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
282 latest_askpass_id: u64,
283}
284
285impl std::ops::Deref for Repository {
286 type Target = RepositorySnapshot;
287
288 fn deref(&self) -> &Self::Target {
289 &self.snapshot
290 }
291}
292
293#[derive(Clone)]
294pub enum RepositoryState {
295 Local {
296 backend: Arc<dyn GitRepository>,
297 environment: Arc<HashMap<String, String>>,
298 },
299 Remote {
300 project_id: ProjectId,
301 client: AnyProtoClient,
302 },
303}
304
305#[derive(Clone, Debug, PartialEq, Eq)]
306pub enum RepositoryEvent {
307 StatusesChanged {
308 // TODO could report which statuses changed here
309 full_scan: bool,
310 },
311 MergeHeadsChanged,
312 BranchChanged,
313 StashEntriesChanged,
314}
315
316#[derive(Clone, Debug)]
317pub struct JobsUpdated;
318
319#[derive(Debug)]
320pub enum GitStoreEvent {
321 ActiveRepositoryChanged(Option<RepositoryId>),
322 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
323 RepositoryAdded,
324 RepositoryRemoved(RepositoryId),
325 IndexWriteError(anyhow::Error),
326 JobsUpdated,
327 ConflictsUpdated,
328}
329
330impl EventEmitter<RepositoryEvent> for Repository {}
331impl EventEmitter<JobsUpdated> for Repository {}
332impl EventEmitter<GitStoreEvent> for GitStore {}
333
334pub struct GitJob {
335 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
336 key: Option<GitJobKey>,
337}
338
339#[derive(PartialEq, Eq)]
340enum GitJobKey {
341 WriteIndex(RepoPath),
342 ReloadBufferDiffBases,
343 RefreshStatuses,
344 ReloadGitState,
345}
346
347impl GitStore {
348 pub fn local(
349 worktree_store: &Entity<WorktreeStore>,
350 buffer_store: Entity<BufferStore>,
351 environment: Entity<ProjectEnvironment>,
352 fs: Arc<dyn Fs>,
353 cx: &mut Context<Self>,
354 ) -> Self {
355 Self::new(
356 worktree_store.clone(),
357 buffer_store,
358 GitStoreState::Local {
359 next_repository_id: Arc::new(AtomicU64::new(1)),
360 downstream: None,
361 project_environment: environment,
362 fs,
363 },
364 cx,
365 )
366 }
367
368 pub fn remote(
369 worktree_store: &Entity<WorktreeStore>,
370 buffer_store: Entity<BufferStore>,
371 upstream_client: AnyProtoClient,
372 project_id: u64,
373 cx: &mut Context<Self>,
374 ) -> Self {
375 Self::new(
376 worktree_store.clone(),
377 buffer_store,
378 GitStoreState::Remote {
379 upstream_client,
380 upstream_project_id: project_id,
381 downstream: None,
382 },
383 cx,
384 )
385 }
386
387 fn new(
388 worktree_store: Entity<WorktreeStore>,
389 buffer_store: Entity<BufferStore>,
390 state: GitStoreState,
391 cx: &mut Context<Self>,
392 ) -> Self {
393 let _subscriptions = vec![
394 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
395 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
396 ];
397
398 GitStore {
399 state,
400 buffer_store,
401 worktree_store,
402 repositories: HashMap::default(),
403 active_repo_id: None,
404 _subscriptions,
405 loading_diffs: HashMap::default(),
406 shared_diffs: HashMap::default(),
407 diffs: HashMap::default(),
408 }
409 }
410
411 pub fn init(client: &AnyProtoClient) {
412 client.add_entity_request_handler(Self::handle_get_remotes);
413 client.add_entity_request_handler(Self::handle_get_branches);
414 client.add_entity_request_handler(Self::handle_get_default_branch);
415 client.add_entity_request_handler(Self::handle_change_branch);
416 client.add_entity_request_handler(Self::handle_create_branch);
417 client.add_entity_request_handler(Self::handle_rename_branch);
418 client.add_entity_request_handler(Self::handle_git_init);
419 client.add_entity_request_handler(Self::handle_push);
420 client.add_entity_request_handler(Self::handle_pull);
421 client.add_entity_request_handler(Self::handle_fetch);
422 client.add_entity_request_handler(Self::handle_stage);
423 client.add_entity_request_handler(Self::handle_unstage);
424 client.add_entity_request_handler(Self::handle_stash);
425 client.add_entity_request_handler(Self::handle_stash_pop);
426 client.add_entity_request_handler(Self::handle_stash_apply);
427 client.add_entity_request_handler(Self::handle_stash_drop);
428 client.add_entity_request_handler(Self::handle_commit);
429 client.add_entity_request_handler(Self::handle_reset);
430 client.add_entity_request_handler(Self::handle_show);
431 client.add_entity_request_handler(Self::handle_load_commit_diff);
432 client.add_entity_request_handler(Self::handle_checkout_files);
433 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
434 client.add_entity_request_handler(Self::handle_set_index_text);
435 client.add_entity_request_handler(Self::handle_askpass);
436 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
437 client.add_entity_request_handler(Self::handle_git_diff);
438 client.add_entity_request_handler(Self::handle_tree_diff);
439 client.add_entity_request_handler(Self::handle_get_blob_content);
440 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
441 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
442 client.add_entity_message_handler(Self::handle_update_diff_bases);
443 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
444 client.add_entity_request_handler(Self::handle_blame_buffer);
445 client.add_entity_message_handler(Self::handle_update_repository);
446 client.add_entity_message_handler(Self::handle_remove_repository);
447 client.add_entity_request_handler(Self::handle_git_clone);
448 client.add_entity_request_handler(Self::handle_get_worktrees);
449 client.add_entity_request_handler(Self::handle_create_worktree);
450 }
451
452 pub fn is_local(&self) -> bool {
453 matches!(self.state, GitStoreState::Local { .. })
454 }
455 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
456 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
457 let id = repo.read(cx).id;
458 if self.active_repo_id != Some(id) {
459 self.active_repo_id = Some(id);
460 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
461 }
462 }
463 }
464
465 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
466 match &mut self.state {
467 GitStoreState::Remote {
468 downstream: downstream_client,
469 ..
470 } => {
471 for repo in self.repositories.values() {
472 let update = repo.read(cx).snapshot.initial_update(project_id);
473 for update in split_repository_update(update) {
474 client.send(update).log_err();
475 }
476 }
477 *downstream_client = Some((client, ProjectId(project_id)));
478 }
479 GitStoreState::Local {
480 downstream: downstream_client,
481 ..
482 } => {
483 let mut snapshots = HashMap::default();
484 let (updates_tx, mut updates_rx) = mpsc::unbounded();
485 for repo in self.repositories.values() {
486 updates_tx
487 .unbounded_send(DownstreamUpdate::UpdateRepository(
488 repo.read(cx).snapshot.clone(),
489 ))
490 .ok();
491 }
492 *downstream_client = Some(LocalDownstreamState {
493 client: client.clone(),
494 project_id: ProjectId(project_id),
495 updates_tx,
496 _task: cx.spawn(async move |this, cx| {
497 cx.background_spawn(async move {
498 while let Some(update) = updates_rx.next().await {
499 match update {
500 DownstreamUpdate::UpdateRepository(snapshot) => {
501 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
502 {
503 let update =
504 snapshot.build_update(old_snapshot, project_id);
505 *old_snapshot = snapshot;
506 for update in split_repository_update(update) {
507 client.send(update)?;
508 }
509 } else {
510 let update = snapshot.initial_update(project_id);
511 for update in split_repository_update(update) {
512 client.send(update)?;
513 }
514 snapshots.insert(snapshot.id, snapshot);
515 }
516 }
517 DownstreamUpdate::RemoveRepository(id) => {
518 client.send(proto::RemoveRepository {
519 project_id,
520 id: id.to_proto(),
521 })?;
522 }
523 }
524 }
525 anyhow::Ok(())
526 })
527 .await
528 .ok();
529 this.update(cx, |this, _| {
530 if let GitStoreState::Local {
531 downstream: downstream_client,
532 ..
533 } = &mut this.state
534 {
535 downstream_client.take();
536 } else {
537 unreachable!("unshared called on remote store");
538 }
539 })
540 }),
541 });
542 }
543 }
544 }
545
546 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
547 match &mut self.state {
548 GitStoreState::Local {
549 downstream: downstream_client,
550 ..
551 } => {
552 downstream_client.take();
553 }
554 GitStoreState::Remote {
555 downstream: downstream_client,
556 ..
557 } => {
558 downstream_client.take();
559 }
560 }
561 self.shared_diffs.clear();
562 }
563
564 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
565 self.shared_diffs.remove(peer_id);
566 }
567
568 pub fn active_repository(&self) -> Option<Entity<Repository>> {
569 self.active_repo_id
570 .as_ref()
571 .map(|id| self.repositories[id].clone())
572 }
573
574 pub fn open_unstaged_diff(
575 &mut self,
576 buffer: Entity<Buffer>,
577 cx: &mut Context<Self>,
578 ) -> Task<Result<Entity<BufferDiff>>> {
579 let buffer_id = buffer.read(cx).remote_id();
580 if let Some(diff_state) = self.diffs.get(&buffer_id)
581 && let Some(unstaged_diff) = diff_state
582 .read(cx)
583 .unstaged_diff
584 .as_ref()
585 .and_then(|weak| weak.upgrade())
586 {
587 if let Some(task) =
588 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
589 {
590 return cx.background_executor().spawn(async move {
591 task.await;
592 Ok(unstaged_diff)
593 });
594 }
595 return Task::ready(Ok(unstaged_diff));
596 }
597
598 let Some((repo, repo_path)) =
599 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
600 else {
601 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
602 };
603
604 let task = self
605 .loading_diffs
606 .entry((buffer_id, DiffKind::Unstaged))
607 .or_insert_with(|| {
608 let staged_text = repo.update(cx, |repo, cx| {
609 repo.load_staged_text(buffer_id, repo_path, cx)
610 });
611 cx.spawn(async move |this, cx| {
612 Self::open_diff_internal(
613 this,
614 DiffKind::Unstaged,
615 staged_text.await.map(DiffBasesChange::SetIndex),
616 buffer,
617 cx,
618 )
619 .await
620 .map_err(Arc::new)
621 })
622 .shared()
623 })
624 .clone();
625
626 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
627 }
628
629 pub fn open_diff_since(
630 &mut self,
631 oid: Option<git::Oid>,
632 buffer: Entity<Buffer>,
633 repo: Entity<Repository>,
634 languages: Arc<LanguageRegistry>,
635 cx: &mut Context<Self>,
636 ) -> Task<Result<Entity<BufferDiff>>> {
637 cx.spawn(async move |this, cx| {
638 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
639 let content = match oid {
640 None => None,
641 Some(oid) => Some(
642 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
643 .await?,
644 ),
645 };
646 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
647
648 buffer_diff
649 .update(cx, |buffer_diff, cx| {
650 buffer_diff.set_base_text(
651 content.map(Arc::new),
652 buffer_snapshot.language().cloned(),
653 Some(languages.clone()),
654 buffer_snapshot.text,
655 cx,
656 )
657 })?
658 .await?;
659 let unstaged_diff = this
660 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
661 .await?;
662 buffer_diff.update(cx, |buffer_diff, _| {
663 buffer_diff.set_secondary_diff(unstaged_diff);
664 })?;
665
666 this.update(cx, |_, cx| {
667 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
668 .detach();
669 })?;
670
671 Ok(buffer_diff)
672 })
673 }
674
675 pub fn open_uncommitted_diff(
676 &mut self,
677 buffer: Entity<Buffer>,
678 cx: &mut Context<Self>,
679 ) -> Task<Result<Entity<BufferDiff>>> {
680 let buffer_id = buffer.read(cx).remote_id();
681
682 if let Some(diff_state) = self.diffs.get(&buffer_id)
683 && let Some(uncommitted_diff) = diff_state
684 .read(cx)
685 .uncommitted_diff
686 .as_ref()
687 .and_then(|weak| weak.upgrade())
688 {
689 if let Some(task) =
690 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
691 {
692 return cx.background_executor().spawn(async move {
693 task.await;
694 Ok(uncommitted_diff)
695 });
696 }
697 return Task::ready(Ok(uncommitted_diff));
698 }
699
700 let Some((repo, repo_path)) =
701 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
702 else {
703 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
704 };
705
706 let task = self
707 .loading_diffs
708 .entry((buffer_id, DiffKind::Uncommitted))
709 .or_insert_with(|| {
710 let changes = repo.update(cx, |repo, cx| {
711 repo.load_committed_text(buffer_id, repo_path, cx)
712 });
713
714 // todo(lw): hot foreground spawn
715 cx.spawn(async move |this, cx| {
716 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
717 .await
718 .map_err(Arc::new)
719 })
720 .shared()
721 })
722 .clone();
723
724 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
725 }
726
727 async fn open_diff_internal(
728 this: WeakEntity<Self>,
729 kind: DiffKind,
730 texts: Result<DiffBasesChange>,
731 buffer_entity: Entity<Buffer>,
732 cx: &mut AsyncApp,
733 ) -> Result<Entity<BufferDiff>> {
734 let diff_bases_change = match texts {
735 Err(e) => {
736 this.update(cx, |this, cx| {
737 let buffer = buffer_entity.read(cx);
738 let buffer_id = buffer.remote_id();
739 this.loading_diffs.remove(&(buffer_id, kind));
740 })?;
741 return Err(e);
742 }
743 Ok(change) => change,
744 };
745
746 this.update(cx, |this, cx| {
747 let buffer = buffer_entity.read(cx);
748 let buffer_id = buffer.remote_id();
749 let language = buffer.language().cloned();
750 let language_registry = buffer.language_registry();
751 let text_snapshot = buffer.text_snapshot();
752 this.loading_diffs.remove(&(buffer_id, kind));
753
754 let git_store = cx.weak_entity();
755 let diff_state = this
756 .diffs
757 .entry(buffer_id)
758 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
759
760 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
761
762 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
763 diff_state.update(cx, |diff_state, cx| {
764 diff_state.language = language;
765 diff_state.language_registry = language_registry;
766
767 match kind {
768 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
769 DiffKind::Uncommitted => {
770 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
771 diff
772 } else {
773 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
774 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
775 unstaged_diff
776 };
777
778 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
779 diff_state.uncommitted_diff = Some(diff.downgrade())
780 }
781 }
782
783 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
784 let rx = diff_state.wait_for_recalculation();
785
786 anyhow::Ok(async move {
787 if let Some(rx) = rx {
788 rx.await;
789 }
790 Ok(diff)
791 })
792 })
793 })??
794 .await
795 }
796
797 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
798 let diff_state = self.diffs.get(&buffer_id)?;
799 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
800 }
801
802 pub fn get_uncommitted_diff(
803 &self,
804 buffer_id: BufferId,
805 cx: &App,
806 ) -> Option<Entity<BufferDiff>> {
807 let diff_state = self.diffs.get(&buffer_id)?;
808 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
809 }
810
811 pub fn open_conflict_set(
812 &mut self,
813 buffer: Entity<Buffer>,
814 cx: &mut Context<Self>,
815 ) -> Entity<ConflictSet> {
816 log::debug!("open conflict set");
817 let buffer_id = buffer.read(cx).remote_id();
818
819 if let Some(git_state) = self.diffs.get(&buffer_id)
820 && let Some(conflict_set) = git_state
821 .read(cx)
822 .conflict_set
823 .as_ref()
824 .and_then(|weak| weak.upgrade())
825 {
826 let conflict_set = conflict_set;
827 let buffer_snapshot = buffer.read(cx).text_snapshot();
828
829 git_state.update(cx, |state, cx| {
830 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
831 });
832
833 return conflict_set;
834 }
835
836 let is_unmerged = self
837 .repository_and_path_for_buffer_id(buffer_id, cx)
838 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
839 let git_store = cx.weak_entity();
840 let buffer_git_state = self
841 .diffs
842 .entry(buffer_id)
843 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
844 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
845
846 self._subscriptions
847 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
848 cx.emit(GitStoreEvent::ConflictsUpdated);
849 }));
850
851 buffer_git_state.update(cx, |state, cx| {
852 state.conflict_set = Some(conflict_set.downgrade());
853 let buffer_snapshot = buffer.read(cx).text_snapshot();
854 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
855 });
856
857 conflict_set
858 }
859
860 pub fn project_path_git_status(
861 &self,
862 project_path: &ProjectPath,
863 cx: &App,
864 ) -> Option<FileStatus> {
865 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
866 Some(repo.read(cx).status_for_path(&repo_path)?.status)
867 }
868
869 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
870 let mut work_directory_abs_paths = Vec::new();
871 let mut checkpoints = Vec::new();
872 for repository in self.repositories.values() {
873 repository.update(cx, |repository, _| {
874 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
875 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
876 });
877 }
878
879 cx.background_executor().spawn(async move {
880 let checkpoints = future::try_join_all(checkpoints).await?;
881 Ok(GitStoreCheckpoint {
882 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
883 .into_iter()
884 .zip(checkpoints)
885 .collect(),
886 })
887 })
888 }
889
890 pub fn restore_checkpoint(
891 &self,
892 checkpoint: GitStoreCheckpoint,
893 cx: &mut App,
894 ) -> Task<Result<()>> {
895 let repositories_by_work_dir_abs_path = self
896 .repositories
897 .values()
898 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
899 .collect::<HashMap<_, _>>();
900
901 let mut tasks = Vec::new();
902 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
903 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
904 let restore = repository.update(cx, |repository, _| {
905 repository.restore_checkpoint(checkpoint)
906 });
907 tasks.push(async move { restore.await? });
908 }
909 }
910 cx.background_spawn(async move {
911 future::try_join_all(tasks).await?;
912 Ok(())
913 })
914 }
915
916 /// Compares two checkpoints, returning true if they are equal.
917 pub fn compare_checkpoints(
918 &self,
919 left: GitStoreCheckpoint,
920 mut right: GitStoreCheckpoint,
921 cx: &mut App,
922 ) -> Task<Result<bool>> {
923 let repositories_by_work_dir_abs_path = self
924 .repositories
925 .values()
926 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
927 .collect::<HashMap<_, _>>();
928
929 let mut tasks = Vec::new();
930 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
931 if let Some(right_checkpoint) = right
932 .checkpoints_by_work_dir_abs_path
933 .remove(&work_dir_abs_path)
934 {
935 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
936 {
937 let compare = repository.update(cx, |repository, _| {
938 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
939 });
940
941 tasks.push(async move { compare.await? });
942 }
943 } else {
944 return Task::ready(Ok(false));
945 }
946 }
947 cx.background_spawn(async move {
948 Ok(future::try_join_all(tasks)
949 .await?
950 .into_iter()
951 .all(|result| result))
952 })
953 }
954
955 /// Blames a buffer.
956 pub fn blame_buffer(
957 &self,
958 buffer: &Entity<Buffer>,
959 version: Option<clock::Global>,
960 cx: &mut App,
961 ) -> Task<Result<Option<Blame>>> {
962 let buffer = buffer.read(cx);
963 let Some((repo, repo_path)) =
964 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
965 else {
966 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
967 };
968 let content = match &version {
969 Some(version) => buffer.rope_for_version(version),
970 None => buffer.as_rope().clone(),
971 };
972 let version = version.unwrap_or(buffer.version());
973 let buffer_id = buffer.remote_id();
974
975 let rx = repo.update(cx, |repo, _| {
976 repo.send_job(None, move |state, _| async move {
977 match state {
978 RepositoryState::Local { backend, .. } => backend
979 .blame(repo_path.clone(), content)
980 .await
981 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
982 .map(Some),
983 RepositoryState::Remote { project_id, client } => {
984 let response = client
985 .request(proto::BlameBuffer {
986 project_id: project_id.to_proto(),
987 buffer_id: buffer_id.into(),
988 version: serialize_version(&version),
989 })
990 .await?;
991 Ok(deserialize_blame_buffer_response(response))
992 }
993 }
994 })
995 });
996
997 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
998 }
999
1000 pub fn get_permalink_to_line(
1001 &self,
1002 buffer: &Entity<Buffer>,
1003 selection: Range<u32>,
1004 cx: &mut App,
1005 ) -> Task<Result<url::Url>> {
1006 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1007 return Task::ready(Err(anyhow!("buffer has no file")));
1008 };
1009
1010 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1011 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1012 cx,
1013 ) else {
1014 // If we're not in a Git repo, check whether this is a Rust source
1015 // file in the Cargo registry (presumably opened with go-to-definition
1016 // from a normal Rust file). If so, we can put together a permalink
1017 // using crate metadata.
1018 if buffer
1019 .read(cx)
1020 .language()
1021 .is_none_or(|lang| lang.name() != "Rust".into())
1022 {
1023 return Task::ready(Err(anyhow!("no permalink available")));
1024 }
1025 let file_path = file.worktree.read(cx).absolutize(&file.path);
1026 return cx.spawn(async move |cx| {
1027 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1028 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1029 .context("no permalink available")
1030 });
1031 };
1032
1033 let buffer_id = buffer.read(cx).remote_id();
1034 let branch = repo.read(cx).branch.clone();
1035 let remote = branch
1036 .as_ref()
1037 .and_then(|b| b.upstream.as_ref())
1038 .and_then(|b| b.remote_name())
1039 .unwrap_or("origin")
1040 .to_string();
1041
1042 let rx = repo.update(cx, |repo, _| {
1043 repo.send_job(None, move |state, cx| async move {
1044 match state {
1045 RepositoryState::Local { backend, .. } => {
1046 let origin_url = backend
1047 .remote_url(&remote)
1048 .with_context(|| format!("remote \"{remote}\" not found"))?;
1049
1050 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1051
1052 let provider_registry =
1053 cx.update(GitHostingProviderRegistry::default_global)?;
1054
1055 let (provider, remote) =
1056 parse_git_remote_url(provider_registry, &origin_url)
1057 .context("parsing Git remote URL")?;
1058
1059 Ok(provider.build_permalink(
1060 remote,
1061 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1062 ))
1063 }
1064 RepositoryState::Remote { project_id, client } => {
1065 let response = client
1066 .request(proto::GetPermalinkToLine {
1067 project_id: project_id.to_proto(),
1068 buffer_id: buffer_id.into(),
1069 selection: Some(proto::Range {
1070 start: selection.start as u64,
1071 end: selection.end as u64,
1072 }),
1073 })
1074 .await?;
1075
1076 url::Url::parse(&response.permalink).context("failed to parse permalink")
1077 }
1078 }
1079 })
1080 });
1081 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1082 }
1083
1084 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1085 match &self.state {
1086 GitStoreState::Local {
1087 downstream: downstream_client,
1088 ..
1089 } => downstream_client
1090 .as_ref()
1091 .map(|state| (state.client.clone(), state.project_id)),
1092 GitStoreState::Remote {
1093 downstream: downstream_client,
1094 ..
1095 } => downstream_client.clone(),
1096 }
1097 }
1098
1099 fn upstream_client(&self) -> Option<AnyProtoClient> {
1100 match &self.state {
1101 GitStoreState::Local { .. } => None,
1102 GitStoreState::Remote {
1103 upstream_client, ..
1104 } => Some(upstream_client.clone()),
1105 }
1106 }
1107
1108 fn on_worktree_store_event(
1109 &mut self,
1110 worktree_store: Entity<WorktreeStore>,
1111 event: &WorktreeStoreEvent,
1112 cx: &mut Context<Self>,
1113 ) {
1114 let GitStoreState::Local {
1115 project_environment,
1116 downstream,
1117 next_repository_id,
1118 fs,
1119 } = &self.state
1120 else {
1121 return;
1122 };
1123
1124 match event {
1125 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1126 if let Some(worktree) = self
1127 .worktree_store
1128 .read(cx)
1129 .worktree_for_id(*worktree_id, cx)
1130 {
1131 let paths_by_git_repo =
1132 self.process_updated_entries(&worktree, updated_entries, cx);
1133 let downstream = downstream
1134 .as_ref()
1135 .map(|downstream| downstream.updates_tx.clone());
1136 cx.spawn(async move |_, cx| {
1137 let paths_by_git_repo = paths_by_git_repo.await;
1138 for (repo, paths) in paths_by_git_repo {
1139 repo.update(cx, |repo, cx| {
1140 repo.paths_changed(paths, downstream.clone(), cx);
1141 })
1142 .ok();
1143 }
1144 })
1145 .detach();
1146 }
1147 }
1148 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1149 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1150 else {
1151 return;
1152 };
1153 if !worktree.read(cx).is_visible() {
1154 log::debug!(
1155 "not adding repositories for local worktree {:?} because it's not visible",
1156 worktree.read(cx).abs_path()
1157 );
1158 return;
1159 }
1160 self.update_repositories_from_worktree(
1161 project_environment.clone(),
1162 next_repository_id.clone(),
1163 downstream
1164 .as_ref()
1165 .map(|downstream| downstream.updates_tx.clone()),
1166 changed_repos.clone(),
1167 fs.clone(),
1168 cx,
1169 );
1170 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1171 }
1172 _ => {}
1173 }
1174 }
1175 fn on_repository_event(
1176 &mut self,
1177 repo: Entity<Repository>,
1178 event: &RepositoryEvent,
1179 cx: &mut Context<Self>,
1180 ) {
1181 let id = repo.read(cx).id;
1182 let repo_snapshot = repo.read(cx).snapshot.clone();
1183 for (buffer_id, diff) in self.diffs.iter() {
1184 if let Some((buffer_repo, repo_path)) =
1185 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1186 && buffer_repo == repo
1187 {
1188 diff.update(cx, |diff, cx| {
1189 if let Some(conflict_set) = &diff.conflict_set {
1190 let conflict_status_changed =
1191 conflict_set.update(cx, |conflict_set, cx| {
1192 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1193 conflict_set.set_has_conflict(has_conflict, cx)
1194 })?;
1195 if conflict_status_changed {
1196 let buffer_store = self.buffer_store.read(cx);
1197 if let Some(buffer) = buffer_store.get(*buffer_id) {
1198 let _ = diff
1199 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1200 }
1201 }
1202 }
1203 anyhow::Ok(())
1204 })
1205 .ok();
1206 }
1207 }
1208 cx.emit(GitStoreEvent::RepositoryUpdated(
1209 id,
1210 event.clone(),
1211 self.active_repo_id == Some(id),
1212 ))
1213 }
1214
1215 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1216 cx.emit(GitStoreEvent::JobsUpdated)
1217 }
1218
1219 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1220 fn update_repositories_from_worktree(
1221 &mut self,
1222 project_environment: Entity<ProjectEnvironment>,
1223 next_repository_id: Arc<AtomicU64>,
1224 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1225 updated_git_repositories: UpdatedGitRepositoriesSet,
1226 fs: Arc<dyn Fs>,
1227 cx: &mut Context<Self>,
1228 ) {
1229 let mut removed_ids = Vec::new();
1230 for update in updated_git_repositories.iter() {
1231 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1232 let existing_work_directory_abs_path =
1233 repo.read(cx).work_directory_abs_path.clone();
1234 Some(&existing_work_directory_abs_path)
1235 == update.old_work_directory_abs_path.as_ref()
1236 || Some(&existing_work_directory_abs_path)
1237 == update.new_work_directory_abs_path.as_ref()
1238 }) {
1239 if let Some(new_work_directory_abs_path) =
1240 update.new_work_directory_abs_path.clone()
1241 {
1242 existing.update(cx, |existing, cx| {
1243 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1244 existing.schedule_scan(updates_tx.clone(), cx);
1245 });
1246 } else {
1247 removed_ids.push(*id);
1248 }
1249 } else if let UpdatedGitRepository {
1250 new_work_directory_abs_path: Some(work_directory_abs_path),
1251 dot_git_abs_path: Some(dot_git_abs_path),
1252 repository_dir_abs_path: Some(repository_dir_abs_path),
1253 common_dir_abs_path: Some(common_dir_abs_path),
1254 ..
1255 } = update
1256 {
1257 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1258 let git_store = cx.weak_entity();
1259 let repo = cx.new(|cx| {
1260 let mut repo = Repository::local(
1261 id,
1262 work_directory_abs_path.clone(),
1263 dot_git_abs_path.clone(),
1264 repository_dir_abs_path.clone(),
1265 common_dir_abs_path.clone(),
1266 project_environment.downgrade(),
1267 fs.clone(),
1268 git_store,
1269 cx,
1270 );
1271 if let Some(updates_tx) = updates_tx.as_ref() {
1272 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1273 updates_tx
1274 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1275 .ok();
1276 }
1277 repo.schedule_scan(updates_tx.clone(), cx);
1278 repo
1279 });
1280 self._subscriptions
1281 .push(cx.subscribe(&repo, Self::on_repository_event));
1282 self._subscriptions
1283 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1284 self.repositories.insert(id, repo);
1285 cx.emit(GitStoreEvent::RepositoryAdded);
1286 self.active_repo_id.get_or_insert_with(|| {
1287 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1288 id
1289 });
1290 }
1291 }
1292
1293 for id in removed_ids {
1294 if self.active_repo_id == Some(id) {
1295 self.active_repo_id = None;
1296 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1297 }
1298 self.repositories.remove(&id);
1299 if let Some(updates_tx) = updates_tx.as_ref() {
1300 updates_tx
1301 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1302 .ok();
1303 }
1304 }
1305 }
1306
1307 fn on_buffer_store_event(
1308 &mut self,
1309 _: Entity<BufferStore>,
1310 event: &BufferStoreEvent,
1311 cx: &mut Context<Self>,
1312 ) {
1313 match event {
1314 BufferStoreEvent::BufferAdded(buffer) => {
1315 cx.subscribe(buffer, |this, buffer, event, cx| {
1316 if let BufferEvent::LanguageChanged = event {
1317 let buffer_id = buffer.read(cx).remote_id();
1318 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1319 diff_state.update(cx, |diff_state, cx| {
1320 diff_state.buffer_language_changed(buffer, cx);
1321 });
1322 }
1323 }
1324 })
1325 .detach();
1326 }
1327 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1328 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1329 diffs.remove(buffer_id);
1330 }
1331 }
1332 BufferStoreEvent::BufferDropped(buffer_id) => {
1333 self.diffs.remove(buffer_id);
1334 for diffs in self.shared_diffs.values_mut() {
1335 diffs.remove(buffer_id);
1336 }
1337 }
1338
1339 _ => {}
1340 }
1341 }
1342
1343 pub fn recalculate_buffer_diffs(
1344 &mut self,
1345 buffers: Vec<Entity<Buffer>>,
1346 cx: &mut Context<Self>,
1347 ) -> impl Future<Output = ()> + use<> {
1348 let mut futures = Vec::new();
1349 for buffer in buffers {
1350 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1351 let buffer = buffer.read(cx).text_snapshot();
1352 diff_state.update(cx, |diff_state, cx| {
1353 diff_state.recalculate_diffs(buffer.clone(), cx);
1354 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1355 });
1356 futures.push(diff_state.update(cx, |diff_state, cx| {
1357 diff_state
1358 .reparse_conflict_markers(buffer, cx)
1359 .map(|_| {})
1360 .boxed()
1361 }));
1362 }
1363 }
1364 async move {
1365 futures::future::join_all(futures).await;
1366 }
1367 }
1368
1369 fn on_buffer_diff_event(
1370 &mut self,
1371 diff: Entity<buffer_diff::BufferDiff>,
1372 event: &BufferDiffEvent,
1373 cx: &mut Context<Self>,
1374 ) {
1375 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1376 let buffer_id = diff.read(cx).buffer_id;
1377 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1378 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1379 diff_state.hunk_staging_operation_count += 1;
1380 diff_state.hunk_staging_operation_count
1381 });
1382 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1383 let recv = repo.update(cx, |repo, cx| {
1384 log::debug!("hunks changed for {}", path.as_unix_str());
1385 repo.spawn_set_index_text_job(
1386 path,
1387 new_index_text.as_ref().map(|rope| rope.to_string()),
1388 Some(hunk_staging_operation_count),
1389 cx,
1390 )
1391 });
1392 let diff = diff.downgrade();
1393 cx.spawn(async move |this, cx| {
1394 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1395 diff.update(cx, |diff, cx| {
1396 diff.clear_pending_hunks(cx);
1397 })
1398 .ok();
1399 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1400 .ok();
1401 }
1402 })
1403 .detach();
1404 }
1405 }
1406 }
1407 }
1408
1409 fn local_worktree_git_repos_changed(
1410 &mut self,
1411 worktree: Entity<Worktree>,
1412 changed_repos: &UpdatedGitRepositoriesSet,
1413 cx: &mut Context<Self>,
1414 ) {
1415 log::debug!("local worktree repos changed");
1416 debug_assert!(worktree.read(cx).is_local());
1417
1418 for repository in self.repositories.values() {
1419 repository.update(cx, |repository, cx| {
1420 let repo_abs_path = &repository.work_directory_abs_path;
1421 if changed_repos.iter().any(|update| {
1422 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1423 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1424 }) {
1425 repository.reload_buffer_diff_bases(cx);
1426 }
1427 });
1428 }
1429 }
1430
1431 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1432 &self.repositories
1433 }
1434
1435 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1436 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1437 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1438 Some(status.status)
1439 }
1440
1441 pub fn repository_and_path_for_buffer_id(
1442 &self,
1443 buffer_id: BufferId,
1444 cx: &App,
1445 ) -> Option<(Entity<Repository>, RepoPath)> {
1446 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1447 let project_path = buffer.read(cx).project_path(cx)?;
1448 self.repository_and_path_for_project_path(&project_path, cx)
1449 }
1450
1451 pub fn repository_and_path_for_project_path(
1452 &self,
1453 path: &ProjectPath,
1454 cx: &App,
1455 ) -> Option<(Entity<Repository>, RepoPath)> {
1456 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1457 self.repositories
1458 .values()
1459 .filter_map(|repo| {
1460 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1461 Some((repo.clone(), repo_path))
1462 })
1463 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1464 }
1465
1466 pub fn git_init(
1467 &self,
1468 path: Arc<Path>,
1469 fallback_branch_name: String,
1470 cx: &App,
1471 ) -> Task<Result<()>> {
1472 match &self.state {
1473 GitStoreState::Local { fs, .. } => {
1474 let fs = fs.clone();
1475 cx.background_executor()
1476 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1477 }
1478 GitStoreState::Remote {
1479 upstream_client,
1480 upstream_project_id: project_id,
1481 ..
1482 } => {
1483 let client = upstream_client.clone();
1484 let project_id = *project_id;
1485 cx.background_executor().spawn(async move {
1486 client
1487 .request(proto::GitInit {
1488 project_id: project_id,
1489 abs_path: path.to_string_lossy().into_owned(),
1490 fallback_branch_name,
1491 })
1492 .await?;
1493 Ok(())
1494 })
1495 }
1496 }
1497 }
1498
1499 pub fn git_clone(
1500 &self,
1501 repo: String,
1502 path: impl Into<Arc<std::path::Path>>,
1503 cx: &App,
1504 ) -> Task<Result<()>> {
1505 let path = path.into();
1506 match &self.state {
1507 GitStoreState::Local { fs, .. } => {
1508 let fs = fs.clone();
1509 cx.background_executor()
1510 .spawn(async move { fs.git_clone(&repo, &path).await })
1511 }
1512 GitStoreState::Remote {
1513 upstream_client,
1514 upstream_project_id,
1515 ..
1516 } => {
1517 if upstream_client.is_via_collab() {
1518 return Task::ready(Err(anyhow!(
1519 "Git Clone isn't supported for project guests"
1520 )));
1521 }
1522 let request = upstream_client.request(proto::GitClone {
1523 project_id: *upstream_project_id,
1524 abs_path: path.to_string_lossy().into_owned(),
1525 remote_repo: repo,
1526 });
1527
1528 cx.background_spawn(async move {
1529 let result = request.await?;
1530
1531 match result.success {
1532 true => Ok(()),
1533 false => Err(anyhow!("Git Clone failed")),
1534 }
1535 })
1536 }
1537 }
1538 }
1539
1540 async fn handle_update_repository(
1541 this: Entity<Self>,
1542 envelope: TypedEnvelope<proto::UpdateRepository>,
1543 mut cx: AsyncApp,
1544 ) -> Result<()> {
1545 this.update(&mut cx, |this, cx| {
1546 let path_style = this.worktree_store.read(cx).path_style();
1547 let mut update = envelope.payload;
1548
1549 let id = RepositoryId::from_proto(update.id);
1550 let client = this.upstream_client().context("no upstream client")?;
1551
1552 let mut repo_subscription = None;
1553 let repo = this.repositories.entry(id).or_insert_with(|| {
1554 let git_store = cx.weak_entity();
1555 let repo = cx.new(|cx| {
1556 Repository::remote(
1557 id,
1558 Path::new(&update.abs_path).into(),
1559 path_style,
1560 ProjectId(update.project_id),
1561 client,
1562 git_store,
1563 cx,
1564 )
1565 });
1566 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1567 cx.emit(GitStoreEvent::RepositoryAdded);
1568 repo
1569 });
1570 this._subscriptions.extend(repo_subscription);
1571
1572 repo.update(cx, {
1573 let update = update.clone();
1574 |repo, cx| repo.apply_remote_update(update, cx)
1575 })?;
1576
1577 this.active_repo_id.get_or_insert_with(|| {
1578 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1579 id
1580 });
1581
1582 if let Some((client, project_id)) = this.downstream_client() {
1583 update.project_id = project_id.to_proto();
1584 client.send(update).log_err();
1585 }
1586 Ok(())
1587 })?
1588 }
1589
1590 async fn handle_remove_repository(
1591 this: Entity<Self>,
1592 envelope: TypedEnvelope<proto::RemoveRepository>,
1593 mut cx: AsyncApp,
1594 ) -> Result<()> {
1595 this.update(&mut cx, |this, cx| {
1596 let mut update = envelope.payload;
1597 let id = RepositoryId::from_proto(update.id);
1598 this.repositories.remove(&id);
1599 if let Some((client, project_id)) = this.downstream_client() {
1600 update.project_id = project_id.to_proto();
1601 client.send(update).log_err();
1602 }
1603 if this.active_repo_id == Some(id) {
1604 this.active_repo_id = None;
1605 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1606 }
1607 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1608 })
1609 }
1610
1611 async fn handle_git_init(
1612 this: Entity<Self>,
1613 envelope: TypedEnvelope<proto::GitInit>,
1614 cx: AsyncApp,
1615 ) -> Result<proto::Ack> {
1616 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1617 let name = envelope.payload.fallback_branch_name;
1618 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1619 .await?;
1620
1621 Ok(proto::Ack {})
1622 }
1623
1624 async fn handle_git_clone(
1625 this: Entity<Self>,
1626 envelope: TypedEnvelope<proto::GitClone>,
1627 cx: AsyncApp,
1628 ) -> Result<proto::GitCloneResponse> {
1629 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1630 let repo_name = envelope.payload.remote_repo;
1631 let result = cx
1632 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1633 .await;
1634
1635 Ok(proto::GitCloneResponse {
1636 success: result.is_ok(),
1637 })
1638 }
1639
1640 async fn handle_fetch(
1641 this: Entity<Self>,
1642 envelope: TypedEnvelope<proto::Fetch>,
1643 mut cx: AsyncApp,
1644 ) -> Result<proto::RemoteMessageResponse> {
1645 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1646 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1647 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1648 let askpass_id = envelope.payload.askpass_id;
1649
1650 let askpass = make_remote_delegate(
1651 this,
1652 envelope.payload.project_id,
1653 repository_id,
1654 askpass_id,
1655 &mut cx,
1656 );
1657
1658 let remote_output = repository_handle
1659 .update(&mut cx, |repository_handle, cx| {
1660 repository_handle.fetch(fetch_options, askpass, cx)
1661 })?
1662 .await??;
1663
1664 Ok(proto::RemoteMessageResponse {
1665 stdout: remote_output.stdout,
1666 stderr: remote_output.stderr,
1667 })
1668 }
1669
1670 async fn handle_push(
1671 this: Entity<Self>,
1672 envelope: TypedEnvelope<proto::Push>,
1673 mut cx: AsyncApp,
1674 ) -> Result<proto::RemoteMessageResponse> {
1675 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1676 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1677
1678 let askpass_id = envelope.payload.askpass_id;
1679 let askpass = make_remote_delegate(
1680 this,
1681 envelope.payload.project_id,
1682 repository_id,
1683 askpass_id,
1684 &mut cx,
1685 );
1686
1687 let options = envelope
1688 .payload
1689 .options
1690 .as_ref()
1691 .map(|_| match envelope.payload.options() {
1692 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1693 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1694 });
1695
1696 let branch_name = envelope.payload.branch_name.into();
1697 let remote_name = envelope.payload.remote_name.into();
1698
1699 let remote_output = repository_handle
1700 .update(&mut cx, |repository_handle, cx| {
1701 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1702 })?
1703 .await??;
1704 Ok(proto::RemoteMessageResponse {
1705 stdout: remote_output.stdout,
1706 stderr: remote_output.stderr,
1707 })
1708 }
1709
1710 async fn handle_pull(
1711 this: Entity<Self>,
1712 envelope: TypedEnvelope<proto::Pull>,
1713 mut cx: AsyncApp,
1714 ) -> Result<proto::RemoteMessageResponse> {
1715 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1716 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1717 let askpass_id = envelope.payload.askpass_id;
1718 let askpass = make_remote_delegate(
1719 this,
1720 envelope.payload.project_id,
1721 repository_id,
1722 askpass_id,
1723 &mut cx,
1724 );
1725
1726 let branch_name = envelope.payload.branch_name.into();
1727 let remote_name = envelope.payload.remote_name.into();
1728
1729 let remote_message = repository_handle
1730 .update(&mut cx, |repository_handle, cx| {
1731 repository_handle.pull(branch_name, remote_name, askpass, cx)
1732 })?
1733 .await??;
1734
1735 Ok(proto::RemoteMessageResponse {
1736 stdout: remote_message.stdout,
1737 stderr: remote_message.stderr,
1738 })
1739 }
1740
1741 async fn handle_stage(
1742 this: Entity<Self>,
1743 envelope: TypedEnvelope<proto::Stage>,
1744 mut cx: AsyncApp,
1745 ) -> Result<proto::Ack> {
1746 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1747 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1748
1749 let entries = envelope
1750 .payload
1751 .paths
1752 .into_iter()
1753 .map(|path| RepoPath::new(&path))
1754 .collect::<Result<Vec<_>>>()?;
1755
1756 repository_handle
1757 .update(&mut cx, |repository_handle, cx| {
1758 repository_handle.stage_entries(entries, cx)
1759 })?
1760 .await?;
1761 Ok(proto::Ack {})
1762 }
1763
1764 async fn handle_unstage(
1765 this: Entity<Self>,
1766 envelope: TypedEnvelope<proto::Unstage>,
1767 mut cx: AsyncApp,
1768 ) -> Result<proto::Ack> {
1769 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1770 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1771
1772 let entries = envelope
1773 .payload
1774 .paths
1775 .into_iter()
1776 .map(|path| RepoPath::new(&path))
1777 .collect::<Result<Vec<_>>>()?;
1778
1779 repository_handle
1780 .update(&mut cx, |repository_handle, cx| {
1781 repository_handle.unstage_entries(entries, cx)
1782 })?
1783 .await?;
1784
1785 Ok(proto::Ack {})
1786 }
1787
1788 async fn handle_stash(
1789 this: Entity<Self>,
1790 envelope: TypedEnvelope<proto::Stash>,
1791 mut cx: AsyncApp,
1792 ) -> Result<proto::Ack> {
1793 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1794 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1795
1796 let entries = envelope
1797 .payload
1798 .paths
1799 .into_iter()
1800 .map(|path| RepoPath::new(&path))
1801 .collect::<Result<Vec<_>>>()?;
1802
1803 repository_handle
1804 .update(&mut cx, |repository_handle, cx| {
1805 repository_handle.stash_entries(entries, cx)
1806 })?
1807 .await?;
1808
1809 Ok(proto::Ack {})
1810 }
1811
1812 async fn handle_stash_pop(
1813 this: Entity<Self>,
1814 envelope: TypedEnvelope<proto::StashPop>,
1815 mut cx: AsyncApp,
1816 ) -> Result<proto::Ack> {
1817 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1818 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1819 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1820
1821 repository_handle
1822 .update(&mut cx, |repository_handle, cx| {
1823 repository_handle.stash_pop(stash_index, cx)
1824 })?
1825 .await?;
1826
1827 Ok(proto::Ack {})
1828 }
1829
1830 async fn handle_stash_apply(
1831 this: Entity<Self>,
1832 envelope: TypedEnvelope<proto::StashApply>,
1833 mut cx: AsyncApp,
1834 ) -> Result<proto::Ack> {
1835 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1836 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1837 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1838
1839 repository_handle
1840 .update(&mut cx, |repository_handle, cx| {
1841 repository_handle.stash_apply(stash_index, cx)
1842 })?
1843 .await?;
1844
1845 Ok(proto::Ack {})
1846 }
1847
1848 async fn handle_stash_drop(
1849 this: Entity<Self>,
1850 envelope: TypedEnvelope<proto::StashDrop>,
1851 mut cx: AsyncApp,
1852 ) -> Result<proto::Ack> {
1853 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1854 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1855 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1856
1857 repository_handle
1858 .update(&mut cx, |repository_handle, cx| {
1859 repository_handle.stash_drop(stash_index, cx)
1860 })?
1861 .await??;
1862
1863 Ok(proto::Ack {})
1864 }
1865
1866 async fn handle_set_index_text(
1867 this: Entity<Self>,
1868 envelope: TypedEnvelope<proto::SetIndexText>,
1869 mut cx: AsyncApp,
1870 ) -> Result<proto::Ack> {
1871 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1872 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1873 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1874
1875 repository_handle
1876 .update(&mut cx, |repository_handle, cx| {
1877 repository_handle.spawn_set_index_text_job(
1878 repo_path,
1879 envelope.payload.text,
1880 None,
1881 cx,
1882 )
1883 })?
1884 .await??;
1885 Ok(proto::Ack {})
1886 }
1887
1888 async fn handle_commit(
1889 this: Entity<Self>,
1890 envelope: TypedEnvelope<proto::Commit>,
1891 mut cx: AsyncApp,
1892 ) -> Result<proto::Ack> {
1893 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1894 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1895
1896 let message = SharedString::from(envelope.payload.message);
1897 let name = envelope.payload.name.map(SharedString::from);
1898 let email = envelope.payload.email.map(SharedString::from);
1899 let options = envelope.payload.options.unwrap_or_default();
1900
1901 repository_handle
1902 .update(&mut cx, |repository_handle, cx| {
1903 repository_handle.commit(
1904 message,
1905 name.zip(email),
1906 CommitOptions {
1907 amend: options.amend,
1908 signoff: options.signoff,
1909 },
1910 cx,
1911 )
1912 })?
1913 .await??;
1914 Ok(proto::Ack {})
1915 }
1916
1917 async fn handle_get_remotes(
1918 this: Entity<Self>,
1919 envelope: TypedEnvelope<proto::GetRemotes>,
1920 mut cx: AsyncApp,
1921 ) -> Result<proto::GetRemotesResponse> {
1922 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1923 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1924
1925 let branch_name = envelope.payload.branch_name;
1926
1927 let remotes = repository_handle
1928 .update(&mut cx, |repository_handle, _| {
1929 repository_handle.get_remotes(branch_name)
1930 })?
1931 .await??;
1932
1933 Ok(proto::GetRemotesResponse {
1934 remotes: remotes
1935 .into_iter()
1936 .map(|remotes| proto::get_remotes_response::Remote {
1937 name: remotes.name.to_string(),
1938 })
1939 .collect::<Vec<_>>(),
1940 })
1941 }
1942
1943 async fn handle_get_worktrees(
1944 this: Entity<Self>,
1945 envelope: TypedEnvelope<proto::GitGetWorktrees>,
1946 mut cx: AsyncApp,
1947 ) -> Result<proto::GitWorktreesResponse> {
1948 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1949 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1950
1951 let worktrees = repository_handle
1952 .update(&mut cx, |repository_handle, _| {
1953 repository_handle.worktrees()
1954 })?
1955 .await??;
1956
1957 Ok(proto::GitWorktreesResponse {
1958 worktrees: worktrees
1959 .into_iter()
1960 .map(|worktree| worktree_to_proto(&worktree))
1961 .collect::<Vec<_>>(),
1962 })
1963 }
1964
1965 async fn handle_create_worktree(
1966 this: Entity<Self>,
1967 envelope: TypedEnvelope<proto::GitCreateWorktree>,
1968 mut cx: AsyncApp,
1969 ) -> Result<proto::Ack> {
1970 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1971 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1972 let directory = PathBuf::from(envelope.payload.directory);
1973 let name = envelope.payload.name;
1974 let commit = envelope.payload.commit;
1975
1976 repository_handle
1977 .update(&mut cx, |repository_handle, _| {
1978 repository_handle.create_worktree(name, directory, commit)
1979 })?
1980 .await??;
1981
1982 Ok(proto::Ack {})
1983 }
1984
1985 async fn handle_get_branches(
1986 this: Entity<Self>,
1987 envelope: TypedEnvelope<proto::GitGetBranches>,
1988 mut cx: AsyncApp,
1989 ) -> Result<proto::GitBranchesResponse> {
1990 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1991 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1992
1993 let branches = repository_handle
1994 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1995 .await??;
1996
1997 Ok(proto::GitBranchesResponse {
1998 branches: branches
1999 .into_iter()
2000 .map(|branch| branch_to_proto(&branch))
2001 .collect::<Vec<_>>(),
2002 })
2003 }
2004 async fn handle_get_default_branch(
2005 this: Entity<Self>,
2006 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2007 mut cx: AsyncApp,
2008 ) -> Result<proto::GetDefaultBranchResponse> {
2009 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2010 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2011
2012 let branch = repository_handle
2013 .update(&mut cx, |repository_handle, _| {
2014 repository_handle.default_branch()
2015 })?
2016 .await??
2017 .map(Into::into);
2018
2019 Ok(proto::GetDefaultBranchResponse { branch })
2020 }
2021 async fn handle_create_branch(
2022 this: Entity<Self>,
2023 envelope: TypedEnvelope<proto::GitCreateBranch>,
2024 mut cx: AsyncApp,
2025 ) -> Result<proto::Ack> {
2026 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2027 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2028 let branch_name = envelope.payload.branch_name;
2029
2030 repository_handle
2031 .update(&mut cx, |repository_handle, _| {
2032 repository_handle.create_branch(branch_name)
2033 })?
2034 .await??;
2035
2036 Ok(proto::Ack {})
2037 }
2038
2039 async fn handle_change_branch(
2040 this: Entity<Self>,
2041 envelope: TypedEnvelope<proto::GitChangeBranch>,
2042 mut cx: AsyncApp,
2043 ) -> Result<proto::Ack> {
2044 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2045 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2046 let branch_name = envelope.payload.branch_name;
2047
2048 repository_handle
2049 .update(&mut cx, |repository_handle, _| {
2050 repository_handle.change_branch(branch_name)
2051 })?
2052 .await??;
2053
2054 Ok(proto::Ack {})
2055 }
2056
2057 async fn handle_rename_branch(
2058 this: Entity<Self>,
2059 envelope: TypedEnvelope<proto::GitRenameBranch>,
2060 mut cx: AsyncApp,
2061 ) -> Result<proto::Ack> {
2062 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2063 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2064 let branch = envelope.payload.branch;
2065 let new_name = envelope.payload.new_name;
2066
2067 repository_handle
2068 .update(&mut cx, |repository_handle, _| {
2069 repository_handle.rename_branch(branch, new_name)
2070 })?
2071 .await??;
2072
2073 Ok(proto::Ack {})
2074 }
2075
2076 async fn handle_show(
2077 this: Entity<Self>,
2078 envelope: TypedEnvelope<proto::GitShow>,
2079 mut cx: AsyncApp,
2080 ) -> Result<proto::GitCommitDetails> {
2081 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2082 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2083
2084 let commit = repository_handle
2085 .update(&mut cx, |repository_handle, _| {
2086 repository_handle.show(envelope.payload.commit)
2087 })?
2088 .await??;
2089 Ok(proto::GitCommitDetails {
2090 sha: commit.sha.into(),
2091 message: commit.message.into(),
2092 commit_timestamp: commit.commit_timestamp,
2093 author_email: commit.author_email.into(),
2094 author_name: commit.author_name.into(),
2095 })
2096 }
2097
2098 async fn handle_load_commit_diff(
2099 this: Entity<Self>,
2100 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2101 mut cx: AsyncApp,
2102 ) -> Result<proto::LoadCommitDiffResponse> {
2103 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2104 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2105
2106 let commit_diff = repository_handle
2107 .update(&mut cx, |repository_handle, _| {
2108 repository_handle.load_commit_diff(envelope.payload.commit)
2109 })?
2110 .await??;
2111 Ok(proto::LoadCommitDiffResponse {
2112 files: commit_diff
2113 .files
2114 .into_iter()
2115 .map(|file| proto::CommitFile {
2116 path: file.path.to_proto(),
2117 old_text: file.old_text,
2118 new_text: file.new_text,
2119 })
2120 .collect(),
2121 })
2122 }
2123
2124 async fn handle_reset(
2125 this: Entity<Self>,
2126 envelope: TypedEnvelope<proto::GitReset>,
2127 mut cx: AsyncApp,
2128 ) -> Result<proto::Ack> {
2129 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2130 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2131
2132 let mode = match envelope.payload.mode() {
2133 git_reset::ResetMode::Soft => ResetMode::Soft,
2134 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2135 };
2136
2137 repository_handle
2138 .update(&mut cx, |repository_handle, cx| {
2139 repository_handle.reset(envelope.payload.commit, mode, cx)
2140 })?
2141 .await??;
2142 Ok(proto::Ack {})
2143 }
2144
2145 async fn handle_checkout_files(
2146 this: Entity<Self>,
2147 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2148 mut cx: AsyncApp,
2149 ) -> Result<proto::Ack> {
2150 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2151 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2152 let paths = envelope
2153 .payload
2154 .paths
2155 .iter()
2156 .map(|s| RepoPath::from_proto(s))
2157 .collect::<Result<Vec<_>>>()?;
2158
2159 repository_handle
2160 .update(&mut cx, |repository_handle, cx| {
2161 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2162 })?
2163 .await??;
2164 Ok(proto::Ack {})
2165 }
2166
2167 async fn handle_open_commit_message_buffer(
2168 this: Entity<Self>,
2169 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2170 mut cx: AsyncApp,
2171 ) -> Result<proto::OpenBufferResponse> {
2172 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2173 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2174 let buffer = repository
2175 .update(&mut cx, |repository, cx| {
2176 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2177 })?
2178 .await?;
2179
2180 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2181 this.update(&mut cx, |this, cx| {
2182 this.buffer_store.update(cx, |buffer_store, cx| {
2183 buffer_store
2184 .create_buffer_for_peer(
2185 &buffer,
2186 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2187 cx,
2188 )
2189 .detach_and_log_err(cx);
2190 })
2191 })?;
2192
2193 Ok(proto::OpenBufferResponse {
2194 buffer_id: buffer_id.to_proto(),
2195 })
2196 }
2197
2198 async fn handle_askpass(
2199 this: Entity<Self>,
2200 envelope: TypedEnvelope<proto::AskPassRequest>,
2201 mut cx: AsyncApp,
2202 ) -> Result<proto::AskPassResponse> {
2203 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2204 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2205
2206 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2207 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2208 debug_panic!("no askpass found");
2209 anyhow::bail!("no askpass found");
2210 };
2211
2212 let response = askpass
2213 .ask_password(envelope.payload.prompt)
2214 .await
2215 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2216
2217 delegates
2218 .lock()
2219 .insert(envelope.payload.askpass_id, askpass);
2220
2221 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2222 Ok(proto::AskPassResponse {
2223 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2224 })
2225 }
2226
2227 async fn handle_check_for_pushed_commits(
2228 this: Entity<Self>,
2229 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2230 mut cx: AsyncApp,
2231 ) -> Result<proto::CheckForPushedCommitsResponse> {
2232 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2233 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2234
2235 let branches = repository_handle
2236 .update(&mut cx, |repository_handle, _| {
2237 repository_handle.check_for_pushed_commits()
2238 })?
2239 .await??;
2240 Ok(proto::CheckForPushedCommitsResponse {
2241 pushed_to: branches
2242 .into_iter()
2243 .map(|commit| commit.to_string())
2244 .collect(),
2245 })
2246 }
2247
2248 async fn handle_git_diff(
2249 this: Entity<Self>,
2250 envelope: TypedEnvelope<proto::GitDiff>,
2251 mut cx: AsyncApp,
2252 ) -> Result<proto::GitDiffResponse> {
2253 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2254 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2255 let diff_type = match envelope.payload.diff_type() {
2256 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2257 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2258 };
2259
2260 let mut diff = repository_handle
2261 .update(&mut cx, |repository_handle, cx| {
2262 repository_handle.diff(diff_type, cx)
2263 })?
2264 .await??;
2265 const ONE_MB: usize = 1_000_000;
2266 if diff.len() > ONE_MB {
2267 diff = diff.chars().take(ONE_MB).collect()
2268 }
2269
2270 Ok(proto::GitDiffResponse { diff })
2271 }
2272
2273 async fn handle_tree_diff(
2274 this: Entity<Self>,
2275 request: TypedEnvelope<proto::GetTreeDiff>,
2276 mut cx: AsyncApp,
2277 ) -> Result<proto::GetTreeDiffResponse> {
2278 let repository_id = RepositoryId(request.payload.repository_id);
2279 let diff_type = if request.payload.is_merge {
2280 DiffTreeType::MergeBase {
2281 base: request.payload.base.into(),
2282 head: request.payload.head.into(),
2283 }
2284 } else {
2285 DiffTreeType::Since {
2286 base: request.payload.base.into(),
2287 head: request.payload.head.into(),
2288 }
2289 };
2290
2291 let diff = this
2292 .update(&mut cx, |this, cx| {
2293 let repository = this.repositories().get(&repository_id)?;
2294 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2295 })?
2296 .context("missing repository")?
2297 .await??;
2298
2299 Ok(proto::GetTreeDiffResponse {
2300 entries: diff
2301 .entries
2302 .into_iter()
2303 .map(|(path, status)| proto::TreeDiffStatus {
2304 path: path.0.to_proto(),
2305 status: match status {
2306 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2307 TreeDiffStatus::Modified { .. } => {
2308 proto::tree_diff_status::Status::Modified.into()
2309 }
2310 TreeDiffStatus::Deleted { .. } => {
2311 proto::tree_diff_status::Status::Deleted.into()
2312 }
2313 },
2314 oid: match status {
2315 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2316 Some(old.to_string())
2317 }
2318 TreeDiffStatus::Added => None,
2319 },
2320 })
2321 .collect(),
2322 })
2323 }
2324
2325 async fn handle_get_blob_content(
2326 this: Entity<Self>,
2327 request: TypedEnvelope<proto::GetBlobContent>,
2328 mut cx: AsyncApp,
2329 ) -> Result<proto::GetBlobContentResponse> {
2330 let oid = git::Oid::from_str(&request.payload.oid)?;
2331 let repository_id = RepositoryId(request.payload.repository_id);
2332 let content = this
2333 .update(&mut cx, |this, cx| {
2334 let repository = this.repositories().get(&repository_id)?;
2335 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2336 })?
2337 .context("missing repository")?
2338 .await?;
2339 Ok(proto::GetBlobContentResponse { content })
2340 }
2341
2342 async fn handle_open_unstaged_diff(
2343 this: Entity<Self>,
2344 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2345 mut cx: AsyncApp,
2346 ) -> Result<proto::OpenUnstagedDiffResponse> {
2347 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2348 let diff = this
2349 .update(&mut cx, |this, cx| {
2350 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2351 Some(this.open_unstaged_diff(buffer, cx))
2352 })?
2353 .context("missing buffer")?
2354 .await?;
2355 this.update(&mut cx, |this, _| {
2356 let shared_diffs = this
2357 .shared_diffs
2358 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2359 .or_default();
2360 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2361 })?;
2362 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2363 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2364 }
2365
2366 async fn handle_open_uncommitted_diff(
2367 this: Entity<Self>,
2368 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2369 mut cx: AsyncApp,
2370 ) -> Result<proto::OpenUncommittedDiffResponse> {
2371 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2372 let diff = this
2373 .update(&mut cx, |this, cx| {
2374 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2375 Some(this.open_uncommitted_diff(buffer, cx))
2376 })?
2377 .context("missing buffer")?
2378 .await?;
2379 this.update(&mut cx, |this, _| {
2380 let shared_diffs = this
2381 .shared_diffs
2382 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2383 .or_default();
2384 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2385 })?;
2386 diff.read_with(&cx, |diff, cx| {
2387 use proto::open_uncommitted_diff_response::Mode;
2388
2389 let unstaged_diff = diff.secondary_diff();
2390 let index_snapshot = unstaged_diff.and_then(|diff| {
2391 let diff = diff.read(cx);
2392 diff.base_text_exists().then(|| diff.base_text())
2393 });
2394
2395 let mode;
2396 let staged_text;
2397 let committed_text;
2398 if diff.base_text_exists() {
2399 let committed_snapshot = diff.base_text();
2400 committed_text = Some(committed_snapshot.text());
2401 if let Some(index_text) = index_snapshot {
2402 if index_text.remote_id() == committed_snapshot.remote_id() {
2403 mode = Mode::IndexMatchesHead;
2404 staged_text = None;
2405 } else {
2406 mode = Mode::IndexAndHead;
2407 staged_text = Some(index_text.text());
2408 }
2409 } else {
2410 mode = Mode::IndexAndHead;
2411 staged_text = None;
2412 }
2413 } else {
2414 mode = Mode::IndexAndHead;
2415 committed_text = None;
2416 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2417 }
2418
2419 proto::OpenUncommittedDiffResponse {
2420 committed_text,
2421 staged_text,
2422 mode: mode.into(),
2423 }
2424 })
2425 }
2426
2427 async fn handle_update_diff_bases(
2428 this: Entity<Self>,
2429 request: TypedEnvelope<proto::UpdateDiffBases>,
2430 mut cx: AsyncApp,
2431 ) -> Result<()> {
2432 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2433 this.update(&mut cx, |this, cx| {
2434 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2435 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2436 {
2437 let buffer = buffer.read(cx).text_snapshot();
2438 diff_state.update(cx, |diff_state, cx| {
2439 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2440 })
2441 }
2442 })
2443 }
2444
2445 async fn handle_blame_buffer(
2446 this: Entity<Self>,
2447 envelope: TypedEnvelope<proto::BlameBuffer>,
2448 mut cx: AsyncApp,
2449 ) -> Result<proto::BlameBufferResponse> {
2450 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2451 let version = deserialize_version(&envelope.payload.version);
2452 let buffer = this.read_with(&cx, |this, cx| {
2453 this.buffer_store.read(cx).get_existing(buffer_id)
2454 })??;
2455 buffer
2456 .update(&mut cx, |buffer, _| {
2457 buffer.wait_for_version(version.clone())
2458 })?
2459 .await?;
2460 let blame = this
2461 .update(&mut cx, |this, cx| {
2462 this.blame_buffer(&buffer, Some(version), cx)
2463 })?
2464 .await?;
2465 Ok(serialize_blame_buffer_response(blame))
2466 }
2467
2468 async fn handle_get_permalink_to_line(
2469 this: Entity<Self>,
2470 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2471 mut cx: AsyncApp,
2472 ) -> Result<proto::GetPermalinkToLineResponse> {
2473 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2474 // let version = deserialize_version(&envelope.payload.version);
2475 let selection = {
2476 let proto_selection = envelope
2477 .payload
2478 .selection
2479 .context("no selection to get permalink for defined")?;
2480 proto_selection.start as u32..proto_selection.end as u32
2481 };
2482 let buffer = this.read_with(&cx, |this, cx| {
2483 this.buffer_store.read(cx).get_existing(buffer_id)
2484 })??;
2485 let permalink = this
2486 .update(&mut cx, |this, cx| {
2487 this.get_permalink_to_line(&buffer, selection, cx)
2488 })?
2489 .await?;
2490 Ok(proto::GetPermalinkToLineResponse {
2491 permalink: permalink.to_string(),
2492 })
2493 }
2494
2495 fn repository_for_request(
2496 this: &Entity<Self>,
2497 id: RepositoryId,
2498 cx: &mut AsyncApp,
2499 ) -> Result<Entity<Repository>> {
2500 this.read_with(cx, |this, _| {
2501 this.repositories
2502 .get(&id)
2503 .context("missing repository handle")
2504 .cloned()
2505 })?
2506 }
2507
2508 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2509 self.repositories
2510 .iter()
2511 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2512 .collect()
2513 }
2514
2515 fn process_updated_entries(
2516 &self,
2517 worktree: &Entity<Worktree>,
2518 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2519 cx: &mut App,
2520 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2521 let path_style = worktree.read(cx).path_style();
2522 let mut repo_paths = self
2523 .repositories
2524 .values()
2525 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2526 .collect::<Vec<_>>();
2527 let mut entries: Vec<_> = updated_entries
2528 .iter()
2529 .map(|(path, _, _)| path.clone())
2530 .collect();
2531 entries.sort();
2532 let worktree = worktree.read(cx);
2533
2534 let entries = entries
2535 .into_iter()
2536 .map(|path| worktree.absolutize(&path))
2537 .collect::<Arc<[_]>>();
2538
2539 let executor = cx.background_executor().clone();
2540 cx.background_executor().spawn(async move {
2541 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2542 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2543 let mut tasks = FuturesOrdered::new();
2544 for (repo_path, repo) in repo_paths.into_iter().rev() {
2545 let entries = entries.clone();
2546 let task = executor.spawn(async move {
2547 // Find all repository paths that belong to this repo
2548 let mut ix = entries.partition_point(|path| path < &*repo_path);
2549 if ix == entries.len() {
2550 return None;
2551 };
2552
2553 let mut paths = Vec::new();
2554 // All paths prefixed by a given repo will constitute a continuous range.
2555 while let Some(path) = entries.get(ix)
2556 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2557 &repo_path, path, path_style,
2558 )
2559 {
2560 paths.push((repo_path, ix));
2561 ix += 1;
2562 }
2563 if paths.is_empty() {
2564 None
2565 } else {
2566 Some((repo, paths))
2567 }
2568 });
2569 tasks.push_back(task);
2570 }
2571
2572 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2573 let mut path_was_used = vec![false; entries.len()];
2574 let tasks = tasks.collect::<Vec<_>>().await;
2575 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2576 // We always want to assign a path to it's innermost repository.
2577 for t in tasks {
2578 let Some((repo, paths)) = t else {
2579 continue;
2580 };
2581 let entry = paths_by_git_repo.entry(repo).or_default();
2582 for (repo_path, ix) in paths {
2583 if path_was_used[ix] {
2584 continue;
2585 }
2586 path_was_used[ix] = true;
2587 entry.push(repo_path);
2588 }
2589 }
2590
2591 paths_by_git_repo
2592 })
2593 }
2594}
2595
2596impl BufferGitState {
2597 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2598 Self {
2599 unstaged_diff: Default::default(),
2600 uncommitted_diff: Default::default(),
2601 recalculate_diff_task: Default::default(),
2602 language: Default::default(),
2603 language_registry: Default::default(),
2604 recalculating_tx: postage::watch::channel_with(false).0,
2605 hunk_staging_operation_count: 0,
2606 hunk_staging_operation_count_as_of_write: 0,
2607 head_text: Default::default(),
2608 index_text: Default::default(),
2609 head_changed: Default::default(),
2610 index_changed: Default::default(),
2611 language_changed: Default::default(),
2612 conflict_updated_futures: Default::default(),
2613 conflict_set: Default::default(),
2614 reparse_conflict_markers_task: Default::default(),
2615 }
2616 }
2617
2618 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2619 self.language = buffer.read(cx).language().cloned();
2620 self.language_changed = true;
2621 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2622 }
2623
2624 fn reparse_conflict_markers(
2625 &mut self,
2626 buffer: text::BufferSnapshot,
2627 cx: &mut Context<Self>,
2628 ) -> oneshot::Receiver<()> {
2629 let (tx, rx) = oneshot::channel();
2630
2631 let Some(conflict_set) = self
2632 .conflict_set
2633 .as_ref()
2634 .and_then(|conflict_set| conflict_set.upgrade())
2635 else {
2636 return rx;
2637 };
2638
2639 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2640 if conflict_set.has_conflict {
2641 Some(conflict_set.snapshot())
2642 } else {
2643 None
2644 }
2645 });
2646
2647 if let Some(old_snapshot) = old_snapshot {
2648 self.conflict_updated_futures.push(tx);
2649 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2650 let (snapshot, changed_range) = cx
2651 .background_spawn(async move {
2652 let new_snapshot = ConflictSet::parse(&buffer);
2653 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2654 (new_snapshot, changed_range)
2655 })
2656 .await;
2657 this.update(cx, |this, cx| {
2658 if let Some(conflict_set) = &this.conflict_set {
2659 conflict_set
2660 .update(cx, |conflict_set, cx| {
2661 conflict_set.set_snapshot(snapshot, changed_range, cx);
2662 })
2663 .ok();
2664 }
2665 let futures = std::mem::take(&mut this.conflict_updated_futures);
2666 for tx in futures {
2667 tx.send(()).ok();
2668 }
2669 })
2670 }))
2671 }
2672
2673 rx
2674 }
2675
2676 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2677 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2678 }
2679
2680 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2681 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2682 }
2683
2684 fn handle_base_texts_updated(
2685 &mut self,
2686 buffer: text::BufferSnapshot,
2687 message: proto::UpdateDiffBases,
2688 cx: &mut Context<Self>,
2689 ) {
2690 use proto::update_diff_bases::Mode;
2691
2692 let Some(mode) = Mode::from_i32(message.mode) else {
2693 return;
2694 };
2695
2696 let diff_bases_change = match mode {
2697 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2698 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2699 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2700 Mode::IndexAndHead => DiffBasesChange::SetEach {
2701 index: message.staged_text,
2702 head: message.committed_text,
2703 },
2704 };
2705
2706 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2707 }
2708
2709 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2710 if *self.recalculating_tx.borrow() {
2711 let mut rx = self.recalculating_tx.subscribe();
2712 Some(async move {
2713 loop {
2714 let is_recalculating = rx.recv().await;
2715 if is_recalculating != Some(true) {
2716 break;
2717 }
2718 }
2719 })
2720 } else {
2721 None
2722 }
2723 }
2724
2725 fn diff_bases_changed(
2726 &mut self,
2727 buffer: text::BufferSnapshot,
2728 diff_bases_change: Option<DiffBasesChange>,
2729 cx: &mut Context<Self>,
2730 ) {
2731 match diff_bases_change {
2732 Some(DiffBasesChange::SetIndex(index)) => {
2733 self.index_text = index.map(|mut index| {
2734 text::LineEnding::normalize(&mut index);
2735 Arc::new(index)
2736 });
2737 self.index_changed = true;
2738 }
2739 Some(DiffBasesChange::SetHead(head)) => {
2740 self.head_text = head.map(|mut head| {
2741 text::LineEnding::normalize(&mut head);
2742 Arc::new(head)
2743 });
2744 self.head_changed = true;
2745 }
2746 Some(DiffBasesChange::SetBoth(text)) => {
2747 let text = text.map(|mut text| {
2748 text::LineEnding::normalize(&mut text);
2749 Arc::new(text)
2750 });
2751 self.head_text = text.clone();
2752 self.index_text = text;
2753 self.head_changed = true;
2754 self.index_changed = true;
2755 }
2756 Some(DiffBasesChange::SetEach { index, head }) => {
2757 self.index_text = index.map(|mut index| {
2758 text::LineEnding::normalize(&mut index);
2759 Arc::new(index)
2760 });
2761 self.index_changed = true;
2762 self.head_text = head.map(|mut head| {
2763 text::LineEnding::normalize(&mut head);
2764 Arc::new(head)
2765 });
2766 self.head_changed = true;
2767 }
2768 None => {}
2769 }
2770
2771 self.recalculate_diffs(buffer, cx)
2772 }
2773
2774 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2775 *self.recalculating_tx.borrow_mut() = true;
2776
2777 let language = self.language.clone();
2778 let language_registry = self.language_registry.clone();
2779 let unstaged_diff = self.unstaged_diff();
2780 let uncommitted_diff = self.uncommitted_diff();
2781 let head = self.head_text.clone();
2782 let index = self.index_text.clone();
2783 let index_changed = self.index_changed;
2784 let head_changed = self.head_changed;
2785 let language_changed = self.language_changed;
2786 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2787 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2788 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2789 (None, None) => true,
2790 _ => false,
2791 };
2792 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2793 log::debug!(
2794 "start recalculating diffs for buffer {}",
2795 buffer.remote_id()
2796 );
2797
2798 let mut new_unstaged_diff = None;
2799 if let Some(unstaged_diff) = &unstaged_diff {
2800 new_unstaged_diff = Some(
2801 BufferDiff::update_diff(
2802 unstaged_diff.clone(),
2803 buffer.clone(),
2804 index,
2805 index_changed,
2806 language_changed,
2807 language.clone(),
2808 language_registry.clone(),
2809 cx,
2810 )
2811 .await?,
2812 );
2813 }
2814
2815 let mut new_uncommitted_diff = None;
2816 if let Some(uncommitted_diff) = &uncommitted_diff {
2817 new_uncommitted_diff = if index_matches_head {
2818 new_unstaged_diff.clone()
2819 } else {
2820 Some(
2821 BufferDiff::update_diff(
2822 uncommitted_diff.clone(),
2823 buffer.clone(),
2824 head,
2825 head_changed,
2826 language_changed,
2827 language.clone(),
2828 language_registry.clone(),
2829 cx,
2830 )
2831 .await?,
2832 )
2833 }
2834 }
2835
2836 let cancel = this.update(cx, |this, _| {
2837 // This checks whether all pending stage/unstage operations
2838 // have quiesced (i.e. both the corresponding write and the
2839 // read of that write have completed). If not, then we cancel
2840 // this recalculation attempt to avoid invalidating pending
2841 // state too quickly; another recalculation will come along
2842 // later and clear the pending state once the state of the index has settled.
2843 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2844 *this.recalculating_tx.borrow_mut() = false;
2845 true
2846 } else {
2847 false
2848 }
2849 })?;
2850 if cancel {
2851 log::debug!(
2852 concat!(
2853 "aborting recalculating diffs for buffer {}",
2854 "due to subsequent hunk operations",
2855 ),
2856 buffer.remote_id()
2857 );
2858 return Ok(());
2859 }
2860
2861 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2862 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2863 {
2864 unstaged_diff.update(cx, |diff, cx| {
2865 if language_changed {
2866 diff.language_changed(cx);
2867 }
2868 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2869 })?
2870 } else {
2871 None
2872 };
2873
2874 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2875 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2876 {
2877 uncommitted_diff.update(cx, |diff, cx| {
2878 if language_changed {
2879 diff.language_changed(cx);
2880 }
2881 diff.set_snapshot_with_secondary(
2882 new_uncommitted_diff,
2883 &buffer,
2884 unstaged_changed_range,
2885 true,
2886 cx,
2887 );
2888 })?;
2889 }
2890
2891 log::debug!(
2892 "finished recalculating diffs for buffer {}",
2893 buffer.remote_id()
2894 );
2895
2896 if let Some(this) = this.upgrade() {
2897 this.update(cx, |this, _| {
2898 this.index_changed = false;
2899 this.head_changed = false;
2900 this.language_changed = false;
2901 *this.recalculating_tx.borrow_mut() = false;
2902 })?;
2903 }
2904
2905 Ok(())
2906 }));
2907 }
2908}
2909
2910fn make_remote_delegate(
2911 this: Entity<GitStore>,
2912 project_id: u64,
2913 repository_id: RepositoryId,
2914 askpass_id: u64,
2915 cx: &mut AsyncApp,
2916) -> AskPassDelegate {
2917 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2918 this.update(cx, |this, cx| {
2919 let Some((client, _)) = this.downstream_client() else {
2920 return;
2921 };
2922 let response = client.request(proto::AskPassRequest {
2923 project_id,
2924 repository_id: repository_id.to_proto(),
2925 askpass_id,
2926 prompt,
2927 });
2928 cx.spawn(async move |_, _| {
2929 let mut response = response.await?.response;
2930 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2931 .ok();
2932 response.zeroize();
2933 anyhow::Ok(())
2934 })
2935 .detach_and_log_err(cx);
2936 })
2937 .log_err();
2938 })
2939}
2940
2941impl RepositoryId {
2942 pub fn to_proto(self) -> u64 {
2943 self.0
2944 }
2945
2946 pub fn from_proto(id: u64) -> Self {
2947 RepositoryId(id)
2948 }
2949}
2950
2951impl RepositorySnapshot {
2952 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2953 Self {
2954 id,
2955 statuses_by_path: Default::default(),
2956 work_directory_abs_path,
2957 branch: None,
2958 head_commit: None,
2959 scan_id: 0,
2960 merge: Default::default(),
2961 remote_origin_url: None,
2962 remote_upstream_url: None,
2963 stash_entries: Default::default(),
2964 path_style,
2965 }
2966 }
2967
2968 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2969 proto::UpdateRepository {
2970 branch_summary: self.branch.as_ref().map(branch_to_proto),
2971 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2972 updated_statuses: self
2973 .statuses_by_path
2974 .iter()
2975 .map(|entry| entry.to_proto())
2976 .collect(),
2977 removed_statuses: Default::default(),
2978 current_merge_conflicts: self
2979 .merge
2980 .conflicted_paths
2981 .iter()
2982 .map(|repo_path| repo_path.to_proto())
2983 .collect(),
2984 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2985 project_id,
2986 id: self.id.to_proto(),
2987 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2988 entry_ids: vec![self.id.to_proto()],
2989 scan_id: self.scan_id,
2990 is_last_update: true,
2991 stash_entries: self
2992 .stash_entries
2993 .entries
2994 .iter()
2995 .map(stash_to_proto)
2996 .collect(),
2997 }
2998 }
2999
3000 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3001 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3002 let mut removed_statuses: Vec<String> = Vec::new();
3003
3004 let mut new_statuses = self.statuses_by_path.iter().peekable();
3005 let mut old_statuses = old.statuses_by_path.iter().peekable();
3006
3007 let mut current_new_entry = new_statuses.next();
3008 let mut current_old_entry = old_statuses.next();
3009 loop {
3010 match (current_new_entry, current_old_entry) {
3011 (Some(new_entry), Some(old_entry)) => {
3012 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3013 Ordering::Less => {
3014 updated_statuses.push(new_entry.to_proto());
3015 current_new_entry = new_statuses.next();
3016 }
3017 Ordering::Equal => {
3018 if new_entry.status != old_entry.status {
3019 updated_statuses.push(new_entry.to_proto());
3020 }
3021 current_old_entry = old_statuses.next();
3022 current_new_entry = new_statuses.next();
3023 }
3024 Ordering::Greater => {
3025 removed_statuses.push(old_entry.repo_path.to_proto());
3026 current_old_entry = old_statuses.next();
3027 }
3028 }
3029 }
3030 (None, Some(old_entry)) => {
3031 removed_statuses.push(old_entry.repo_path.to_proto());
3032 current_old_entry = old_statuses.next();
3033 }
3034 (Some(new_entry), None) => {
3035 updated_statuses.push(new_entry.to_proto());
3036 current_new_entry = new_statuses.next();
3037 }
3038 (None, None) => break,
3039 }
3040 }
3041
3042 proto::UpdateRepository {
3043 branch_summary: self.branch.as_ref().map(branch_to_proto),
3044 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3045 updated_statuses,
3046 removed_statuses,
3047 current_merge_conflicts: self
3048 .merge
3049 .conflicted_paths
3050 .iter()
3051 .map(|path| path.to_proto())
3052 .collect(),
3053 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3054 project_id,
3055 id: self.id.to_proto(),
3056 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3057 entry_ids: vec![],
3058 scan_id: self.scan_id,
3059 is_last_update: true,
3060 stash_entries: self
3061 .stash_entries
3062 .entries
3063 .iter()
3064 .map(stash_to_proto)
3065 .collect(),
3066 }
3067 }
3068
3069 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3070 self.statuses_by_path.iter().cloned()
3071 }
3072
3073 pub fn status_summary(&self) -> GitSummary {
3074 self.statuses_by_path.summary().item_summary
3075 }
3076
3077 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3078 self.statuses_by_path
3079 .get(&PathKey(path.0.clone()), ())
3080 .cloned()
3081 }
3082
3083 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3084 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3085 }
3086
3087 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3088 self.path_style
3089 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3090 .unwrap()
3091 .into()
3092 }
3093
3094 #[inline]
3095 fn abs_path_to_repo_path_inner(
3096 work_directory_abs_path: &Path,
3097 abs_path: &Path,
3098 path_style: PathStyle,
3099 ) -> Option<RepoPath> {
3100 abs_path
3101 .strip_prefix(&work_directory_abs_path)
3102 .ok()
3103 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3104 }
3105
3106 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3107 self.merge.conflicted_paths.contains(repo_path)
3108 }
3109
3110 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3111 let had_conflict_on_last_merge_head_change =
3112 self.merge.conflicted_paths.contains(repo_path);
3113 let has_conflict_currently = self
3114 .status_for_path(repo_path)
3115 .is_some_and(|entry| entry.status.is_conflicted());
3116 had_conflict_on_last_merge_head_change || has_conflict_currently
3117 }
3118
3119 /// This is the name that will be displayed in the repository selector for this repository.
3120 pub fn display_name(&self) -> SharedString {
3121 self.work_directory_abs_path
3122 .file_name()
3123 .unwrap_or_default()
3124 .to_string_lossy()
3125 .to_string()
3126 .into()
3127 }
3128}
3129
3130pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3131 proto::StashEntry {
3132 oid: entry.oid.as_bytes().to_vec(),
3133 message: entry.message.clone(),
3134 branch: entry.branch.clone(),
3135 index: entry.index as u64,
3136 timestamp: entry.timestamp,
3137 }
3138}
3139
3140pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3141 Ok(StashEntry {
3142 oid: Oid::from_bytes(&entry.oid)?,
3143 message: entry.message.clone(),
3144 index: entry.index as usize,
3145 branch: entry.branch.clone(),
3146 timestamp: entry.timestamp,
3147 })
3148}
3149
3150impl MergeDetails {
3151 async fn load(
3152 backend: &Arc<dyn GitRepository>,
3153 status: &SumTree<StatusEntry>,
3154 prev_snapshot: &RepositorySnapshot,
3155 ) -> Result<(MergeDetails, bool)> {
3156 log::debug!("load merge details");
3157 let message = backend.merge_message().await;
3158 let heads = backend
3159 .revparse_batch(vec![
3160 "MERGE_HEAD".into(),
3161 "CHERRY_PICK_HEAD".into(),
3162 "REBASE_HEAD".into(),
3163 "REVERT_HEAD".into(),
3164 "APPLY_HEAD".into(),
3165 ])
3166 .await
3167 .log_err()
3168 .unwrap_or_default()
3169 .into_iter()
3170 .map(|opt| opt.map(SharedString::from))
3171 .collect::<Vec<_>>();
3172 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3173 let conflicted_paths = if merge_heads_changed {
3174 let current_conflicted_paths = TreeSet::from_ordered_entries(
3175 status
3176 .iter()
3177 .filter(|entry| entry.status.is_conflicted())
3178 .map(|entry| entry.repo_path.clone()),
3179 );
3180
3181 // It can happen that we run a scan while a lengthy merge is in progress
3182 // that will eventually result in conflicts, but before those conflicts
3183 // are reported by `git status`. Since for the moment we only care about
3184 // the merge heads state for the purposes of tracking conflicts, don't update
3185 // this state until we see some conflicts.
3186 if heads.iter().any(Option::is_some)
3187 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3188 && current_conflicted_paths.is_empty()
3189 {
3190 log::debug!("not updating merge heads because no conflicts found");
3191 return Ok((
3192 MergeDetails {
3193 message: message.map(SharedString::from),
3194 ..prev_snapshot.merge.clone()
3195 },
3196 false,
3197 ));
3198 }
3199
3200 current_conflicted_paths
3201 } else {
3202 prev_snapshot.merge.conflicted_paths.clone()
3203 };
3204 let details = MergeDetails {
3205 conflicted_paths,
3206 message: message.map(SharedString::from),
3207 heads,
3208 };
3209 Ok((details, merge_heads_changed))
3210 }
3211}
3212
3213impl Repository {
3214 pub fn snapshot(&self) -> RepositorySnapshot {
3215 self.snapshot.clone()
3216 }
3217
3218 fn local(
3219 id: RepositoryId,
3220 work_directory_abs_path: Arc<Path>,
3221 dot_git_abs_path: Arc<Path>,
3222 repository_dir_abs_path: Arc<Path>,
3223 common_dir_abs_path: Arc<Path>,
3224 project_environment: WeakEntity<ProjectEnvironment>,
3225 fs: Arc<dyn Fs>,
3226 git_store: WeakEntity<GitStore>,
3227 cx: &mut Context<Self>,
3228 ) -> Self {
3229 let snapshot =
3230 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3231 Repository {
3232 this: cx.weak_entity(),
3233 git_store,
3234 snapshot,
3235 commit_message_buffer: None,
3236 askpass_delegates: Default::default(),
3237 paths_needing_status_update: Default::default(),
3238 latest_askpass_id: 0,
3239 job_sender: Repository::spawn_local_git_worker(
3240 work_directory_abs_path,
3241 dot_git_abs_path,
3242 repository_dir_abs_path,
3243 common_dir_abs_path,
3244 project_environment,
3245 fs,
3246 cx,
3247 ),
3248 job_id: 0,
3249 active_jobs: Default::default(),
3250 }
3251 }
3252
3253 fn remote(
3254 id: RepositoryId,
3255 work_directory_abs_path: Arc<Path>,
3256 path_style: PathStyle,
3257 project_id: ProjectId,
3258 client: AnyProtoClient,
3259 git_store: WeakEntity<GitStore>,
3260 cx: &mut Context<Self>,
3261 ) -> Self {
3262 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3263 Self {
3264 this: cx.weak_entity(),
3265 snapshot,
3266 commit_message_buffer: None,
3267 git_store,
3268 paths_needing_status_update: Default::default(),
3269 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3270 askpass_delegates: Default::default(),
3271 latest_askpass_id: 0,
3272 active_jobs: Default::default(),
3273 job_id: 0,
3274 }
3275 }
3276
3277 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3278 self.git_store.upgrade()
3279 }
3280
3281 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3282 let this = cx.weak_entity();
3283 let git_store = self.git_store.clone();
3284 let _ = self.send_keyed_job(
3285 Some(GitJobKey::ReloadBufferDiffBases),
3286 None,
3287 |state, mut cx| async move {
3288 let RepositoryState::Local { backend, .. } = state else {
3289 log::error!("tried to recompute diffs for a non-local repository");
3290 return Ok(());
3291 };
3292
3293 let Some(this) = this.upgrade() else {
3294 return Ok(());
3295 };
3296
3297 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3298 git_store.update(cx, |git_store, cx| {
3299 git_store
3300 .diffs
3301 .iter()
3302 .filter_map(|(buffer_id, diff_state)| {
3303 let buffer_store = git_store.buffer_store.read(cx);
3304 let buffer = buffer_store.get(*buffer_id)?;
3305 let file = File::from_dyn(buffer.read(cx).file())?;
3306 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3307 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3308 log::debug!(
3309 "start reload diff bases for repo path {}",
3310 repo_path.as_unix_str()
3311 );
3312 diff_state.update(cx, |diff_state, _| {
3313 let has_unstaged_diff = diff_state
3314 .unstaged_diff
3315 .as_ref()
3316 .is_some_and(|diff| diff.is_upgradable());
3317 let has_uncommitted_diff = diff_state
3318 .uncommitted_diff
3319 .as_ref()
3320 .is_some_and(|set| set.is_upgradable());
3321
3322 Some((
3323 buffer,
3324 repo_path,
3325 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3326 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3327 ))
3328 })
3329 })
3330 .collect::<Vec<_>>()
3331 })
3332 })??;
3333
3334 let buffer_diff_base_changes = cx
3335 .background_spawn(async move {
3336 let mut changes = Vec::new();
3337 for (buffer, repo_path, current_index_text, current_head_text) in
3338 &repo_diff_state_updates
3339 {
3340 let index_text = if current_index_text.is_some() {
3341 backend.load_index_text(repo_path.clone()).await
3342 } else {
3343 None
3344 };
3345 let head_text = if current_head_text.is_some() {
3346 backend.load_committed_text(repo_path.clone()).await
3347 } else {
3348 None
3349 };
3350
3351 let change =
3352 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3353 (Some(current_index), Some(current_head)) => {
3354 let index_changed =
3355 index_text.as_ref() != current_index.as_deref();
3356 let head_changed =
3357 head_text.as_ref() != current_head.as_deref();
3358 if index_changed && head_changed {
3359 if index_text == head_text {
3360 Some(DiffBasesChange::SetBoth(head_text))
3361 } else {
3362 Some(DiffBasesChange::SetEach {
3363 index: index_text,
3364 head: head_text,
3365 })
3366 }
3367 } else if index_changed {
3368 Some(DiffBasesChange::SetIndex(index_text))
3369 } else if head_changed {
3370 Some(DiffBasesChange::SetHead(head_text))
3371 } else {
3372 None
3373 }
3374 }
3375 (Some(current_index), None) => {
3376 let index_changed =
3377 index_text.as_ref() != current_index.as_deref();
3378 index_changed
3379 .then_some(DiffBasesChange::SetIndex(index_text))
3380 }
3381 (None, Some(current_head)) => {
3382 let head_changed =
3383 head_text.as_ref() != current_head.as_deref();
3384 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3385 }
3386 (None, None) => None,
3387 };
3388
3389 changes.push((buffer.clone(), change))
3390 }
3391 changes
3392 })
3393 .await;
3394
3395 git_store.update(&mut cx, |git_store, cx| {
3396 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3397 let buffer_snapshot = buffer.read(cx).text_snapshot();
3398 let buffer_id = buffer_snapshot.remote_id();
3399 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3400 continue;
3401 };
3402
3403 let downstream_client = git_store.downstream_client();
3404 diff_state.update(cx, |diff_state, cx| {
3405 use proto::update_diff_bases::Mode;
3406
3407 if let Some((diff_bases_change, (client, project_id))) =
3408 diff_bases_change.clone().zip(downstream_client)
3409 {
3410 let (staged_text, committed_text, mode) = match diff_bases_change {
3411 DiffBasesChange::SetIndex(index) => {
3412 (index, None, Mode::IndexOnly)
3413 }
3414 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3415 DiffBasesChange::SetEach { index, head } => {
3416 (index, head, Mode::IndexAndHead)
3417 }
3418 DiffBasesChange::SetBoth(text) => {
3419 (None, text, Mode::IndexMatchesHead)
3420 }
3421 };
3422 client
3423 .send(proto::UpdateDiffBases {
3424 project_id: project_id.to_proto(),
3425 buffer_id: buffer_id.to_proto(),
3426 staged_text,
3427 committed_text,
3428 mode: mode as i32,
3429 })
3430 .log_err();
3431 }
3432
3433 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3434 });
3435 }
3436 })
3437 },
3438 );
3439 }
3440
3441 pub fn send_job<F, Fut, R>(
3442 &mut self,
3443 status: Option<SharedString>,
3444 job: F,
3445 ) -> oneshot::Receiver<R>
3446 where
3447 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3448 Fut: Future<Output = R> + 'static,
3449 R: Send + 'static,
3450 {
3451 self.send_keyed_job(None, status, job)
3452 }
3453
3454 fn send_keyed_job<F, Fut, R>(
3455 &mut self,
3456 key: Option<GitJobKey>,
3457 status: Option<SharedString>,
3458 job: F,
3459 ) -> oneshot::Receiver<R>
3460 where
3461 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3462 Fut: Future<Output = R> + 'static,
3463 R: Send + 'static,
3464 {
3465 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3466 let job_id = post_inc(&mut self.job_id);
3467 let this = self.this.clone();
3468 self.job_sender
3469 .unbounded_send(GitJob {
3470 key,
3471 job: Box::new(move |state, cx: &mut AsyncApp| {
3472 let job = job(state, cx.clone());
3473 cx.spawn(async move |cx| {
3474 if let Some(s) = status.clone() {
3475 this.update(cx, |this, cx| {
3476 this.active_jobs.insert(
3477 job_id,
3478 JobInfo {
3479 start: Instant::now(),
3480 message: s.clone(),
3481 },
3482 );
3483
3484 cx.notify();
3485 })
3486 .ok();
3487 }
3488 let result = job.await;
3489
3490 this.update(cx, |this, cx| {
3491 this.active_jobs.remove(&job_id);
3492 cx.notify();
3493 })
3494 .ok();
3495
3496 result_tx.send(result).ok();
3497 })
3498 }),
3499 })
3500 .ok();
3501 result_rx
3502 }
3503
3504 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3505 let Some(git_store) = self.git_store.upgrade() else {
3506 return;
3507 };
3508 let entity = cx.entity();
3509 git_store.update(cx, |git_store, cx| {
3510 let Some((&id, _)) = git_store
3511 .repositories
3512 .iter()
3513 .find(|(_, handle)| *handle == &entity)
3514 else {
3515 return;
3516 };
3517 git_store.active_repo_id = Some(id);
3518 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3519 });
3520 }
3521
3522 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3523 self.snapshot.status()
3524 }
3525
3526 pub fn cached_stash(&self) -> GitStash {
3527 self.snapshot.stash_entries.clone()
3528 }
3529
3530 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3531 let git_store = self.git_store.upgrade()?;
3532 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3533 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3534 let abs_path = SanitizedPath::new(&abs_path);
3535 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3536 Some(ProjectPath {
3537 worktree_id: worktree.read(cx).id(),
3538 path: relative_path,
3539 })
3540 }
3541
3542 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3543 let git_store = self.git_store.upgrade()?;
3544 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3545 let abs_path = worktree_store.absolutize(path, cx)?;
3546 self.snapshot.abs_path_to_repo_path(&abs_path)
3547 }
3548
3549 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3550 other
3551 .read(cx)
3552 .snapshot
3553 .work_directory_abs_path
3554 .starts_with(&self.snapshot.work_directory_abs_path)
3555 }
3556
3557 pub fn open_commit_buffer(
3558 &mut self,
3559 languages: Option<Arc<LanguageRegistry>>,
3560 buffer_store: Entity<BufferStore>,
3561 cx: &mut Context<Self>,
3562 ) -> Task<Result<Entity<Buffer>>> {
3563 let id = self.id;
3564 if let Some(buffer) = self.commit_message_buffer.clone() {
3565 return Task::ready(Ok(buffer));
3566 }
3567 let this = cx.weak_entity();
3568
3569 let rx = self.send_job(None, move |state, mut cx| async move {
3570 let Some(this) = this.upgrade() else {
3571 bail!("git store was dropped");
3572 };
3573 match state {
3574 RepositoryState::Local { .. } => {
3575 this.update(&mut cx, |_, cx| {
3576 Self::open_local_commit_buffer(languages, buffer_store, cx)
3577 })?
3578 .await
3579 }
3580 RepositoryState::Remote { project_id, client } => {
3581 let request = client.request(proto::OpenCommitMessageBuffer {
3582 project_id: project_id.0,
3583 repository_id: id.to_proto(),
3584 });
3585 let response = request.await.context("requesting to open commit buffer")?;
3586 let buffer_id = BufferId::new(response.buffer_id)?;
3587 let buffer = buffer_store
3588 .update(&mut cx, |buffer_store, cx| {
3589 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3590 })?
3591 .await?;
3592 if let Some(language_registry) = languages {
3593 let git_commit_language =
3594 language_registry.language_for_name("Git Commit").await?;
3595 buffer.update(&mut cx, |buffer, cx| {
3596 buffer.set_language(Some(git_commit_language), cx);
3597 })?;
3598 }
3599 this.update(&mut cx, |this, _| {
3600 this.commit_message_buffer = Some(buffer.clone());
3601 })?;
3602 Ok(buffer)
3603 }
3604 }
3605 });
3606
3607 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3608 }
3609
3610 fn open_local_commit_buffer(
3611 language_registry: Option<Arc<LanguageRegistry>>,
3612 buffer_store: Entity<BufferStore>,
3613 cx: &mut Context<Self>,
3614 ) -> Task<Result<Entity<Buffer>>> {
3615 cx.spawn(async move |repository, cx| {
3616 let buffer = buffer_store
3617 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3618 .await?;
3619
3620 if let Some(language_registry) = language_registry {
3621 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3622 buffer.update(cx, |buffer, cx| {
3623 buffer.set_language(Some(git_commit_language), cx);
3624 })?;
3625 }
3626
3627 repository.update(cx, |repository, _| {
3628 repository.commit_message_buffer = Some(buffer.clone());
3629 })?;
3630 Ok(buffer)
3631 })
3632 }
3633
3634 pub fn checkout_files(
3635 &mut self,
3636 commit: &str,
3637 paths: Vec<RepoPath>,
3638 _cx: &mut App,
3639 ) -> oneshot::Receiver<Result<()>> {
3640 let commit = commit.to_string();
3641 let id = self.id;
3642
3643 self.send_job(
3644 Some(format!("git checkout {}", commit).into()),
3645 move |git_repo, _| async move {
3646 match git_repo {
3647 RepositoryState::Local {
3648 backend,
3649 environment,
3650 ..
3651 } => {
3652 backend
3653 .checkout_files(commit, paths, environment.clone())
3654 .await
3655 }
3656 RepositoryState::Remote { project_id, client } => {
3657 client
3658 .request(proto::GitCheckoutFiles {
3659 project_id: project_id.0,
3660 repository_id: id.to_proto(),
3661 commit,
3662 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3663 })
3664 .await?;
3665
3666 Ok(())
3667 }
3668 }
3669 },
3670 )
3671 }
3672
3673 pub fn reset(
3674 &mut self,
3675 commit: String,
3676 reset_mode: ResetMode,
3677 _cx: &mut App,
3678 ) -> oneshot::Receiver<Result<()>> {
3679 let id = self.id;
3680
3681 self.send_job(None, move |git_repo, _| async move {
3682 match git_repo {
3683 RepositoryState::Local {
3684 backend,
3685 environment,
3686 ..
3687 } => backend.reset(commit, reset_mode, environment).await,
3688 RepositoryState::Remote { project_id, client } => {
3689 client
3690 .request(proto::GitReset {
3691 project_id: project_id.0,
3692 repository_id: id.to_proto(),
3693 commit,
3694 mode: match reset_mode {
3695 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3696 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3697 },
3698 })
3699 .await?;
3700
3701 Ok(())
3702 }
3703 }
3704 })
3705 }
3706
3707 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3708 let id = self.id;
3709 self.send_job(None, move |git_repo, _cx| async move {
3710 match git_repo {
3711 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3712 RepositoryState::Remote { project_id, client } => {
3713 let resp = client
3714 .request(proto::GitShow {
3715 project_id: project_id.0,
3716 repository_id: id.to_proto(),
3717 commit,
3718 })
3719 .await?;
3720
3721 Ok(CommitDetails {
3722 sha: resp.sha.into(),
3723 message: resp.message.into(),
3724 commit_timestamp: resp.commit_timestamp,
3725 author_email: resp.author_email.into(),
3726 author_name: resp.author_name.into(),
3727 })
3728 }
3729 }
3730 })
3731 }
3732
3733 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3734 let id = self.id;
3735 self.send_job(None, move |git_repo, cx| async move {
3736 match git_repo {
3737 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3738 RepositoryState::Remote {
3739 client, project_id, ..
3740 } => {
3741 let response = client
3742 .request(proto::LoadCommitDiff {
3743 project_id: project_id.0,
3744 repository_id: id.to_proto(),
3745 commit,
3746 })
3747 .await?;
3748 Ok(CommitDiff {
3749 files: response
3750 .files
3751 .into_iter()
3752 .map(|file| {
3753 Ok(CommitFile {
3754 path: RepoPath::from_proto(&file.path)?,
3755 old_text: file.old_text,
3756 new_text: file.new_text,
3757 })
3758 })
3759 .collect::<Result<Vec<_>>>()?,
3760 })
3761 }
3762 }
3763 })
3764 }
3765
3766 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3767 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3768 }
3769
3770 fn save_buffers<'a>(
3771 &self,
3772 entries: impl IntoIterator<Item = &'a RepoPath>,
3773 cx: &mut Context<Self>,
3774 ) -> Vec<Task<anyhow::Result<()>>> {
3775 let mut save_futures = Vec::new();
3776 if let Some(buffer_store) = self.buffer_store(cx) {
3777 buffer_store.update(cx, |buffer_store, cx| {
3778 for path in entries {
3779 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3780 continue;
3781 };
3782 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3783 && buffer
3784 .read(cx)
3785 .file()
3786 .is_some_and(|file| file.disk_state().exists())
3787 && buffer.read(cx).has_unsaved_edits()
3788 {
3789 save_futures.push(buffer_store.save_buffer(buffer, cx));
3790 }
3791 }
3792 })
3793 }
3794 save_futures
3795 }
3796
3797 pub fn stage_entries(
3798 &self,
3799 entries: Vec<RepoPath>,
3800 cx: &mut Context<Self>,
3801 ) -> Task<anyhow::Result<()>> {
3802 if entries.is_empty() {
3803 return Task::ready(Ok(()));
3804 }
3805 let id = self.id;
3806 let save_tasks = self.save_buffers(&entries, cx);
3807 let paths = entries
3808 .iter()
3809 .map(|p| p.as_unix_str())
3810 .collect::<Vec<_>>()
3811 .join(" ");
3812 let status = format!("git add {paths}");
3813 let job_key = match entries.len() {
3814 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3815 _ => None,
3816 };
3817
3818 cx.spawn(async move |this, cx| {
3819 for save_task in save_tasks {
3820 save_task.await?;
3821 }
3822
3823 this.update(cx, |this, _| {
3824 this.send_keyed_job(
3825 job_key,
3826 Some(status.into()),
3827 move |git_repo, _cx| async move {
3828 match git_repo {
3829 RepositoryState::Local {
3830 backend,
3831 environment,
3832 ..
3833 } => backend.stage_paths(entries, environment.clone()).await,
3834 RepositoryState::Remote { project_id, client } => {
3835 client
3836 .request(proto::Stage {
3837 project_id: project_id.0,
3838 repository_id: id.to_proto(),
3839 paths: entries
3840 .into_iter()
3841 .map(|repo_path| repo_path.to_proto())
3842 .collect(),
3843 })
3844 .await
3845 .context("sending stage request")?;
3846
3847 Ok(())
3848 }
3849 }
3850 },
3851 )
3852 })?
3853 .await??;
3854
3855 Ok(())
3856 })
3857 }
3858
3859 pub fn unstage_entries(
3860 &self,
3861 entries: Vec<RepoPath>,
3862 cx: &mut Context<Self>,
3863 ) -> Task<anyhow::Result<()>> {
3864 if entries.is_empty() {
3865 return Task::ready(Ok(()));
3866 }
3867 let id = self.id;
3868 let save_tasks = self.save_buffers(&entries, cx);
3869 let paths = entries
3870 .iter()
3871 .map(|p| p.as_unix_str())
3872 .collect::<Vec<_>>()
3873 .join(" ");
3874 let status = format!("git reset {paths}");
3875 let job_key = match entries.len() {
3876 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3877 _ => None,
3878 };
3879
3880 cx.spawn(async move |this, cx| {
3881 for save_task in save_tasks {
3882 save_task.await?;
3883 }
3884
3885 this.update(cx, |this, _| {
3886 this.send_keyed_job(
3887 job_key,
3888 Some(status.into()),
3889 move |git_repo, _cx| async move {
3890 match git_repo {
3891 RepositoryState::Local {
3892 backend,
3893 environment,
3894 ..
3895 } => backend.unstage_paths(entries, environment).await,
3896 RepositoryState::Remote { project_id, client } => {
3897 client
3898 .request(proto::Unstage {
3899 project_id: project_id.0,
3900 repository_id: id.to_proto(),
3901 paths: entries
3902 .into_iter()
3903 .map(|repo_path| repo_path.to_proto())
3904 .collect(),
3905 })
3906 .await
3907 .context("sending unstage request")?;
3908
3909 Ok(())
3910 }
3911 }
3912 },
3913 )
3914 })?
3915 .await??;
3916
3917 Ok(())
3918 })
3919 }
3920
3921 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3922 let to_stage = self
3923 .cached_status()
3924 .filter(|entry| !entry.status.staging().is_fully_staged())
3925 .map(|entry| entry.repo_path)
3926 .collect();
3927 self.stage_entries(to_stage, cx)
3928 }
3929
3930 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3931 let to_unstage = self
3932 .cached_status()
3933 .filter(|entry| entry.status.staging().has_staged())
3934 .map(|entry| entry.repo_path)
3935 .collect();
3936 self.unstage_entries(to_unstage, cx)
3937 }
3938
3939 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3940 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3941
3942 self.stash_entries(to_stash, cx)
3943 }
3944
3945 pub fn stash_entries(
3946 &mut self,
3947 entries: Vec<RepoPath>,
3948 cx: &mut Context<Self>,
3949 ) -> Task<anyhow::Result<()>> {
3950 let id = self.id;
3951
3952 cx.spawn(async move |this, cx| {
3953 this.update(cx, |this, _| {
3954 this.send_job(None, move |git_repo, _cx| async move {
3955 match git_repo {
3956 RepositoryState::Local {
3957 backend,
3958 environment,
3959 ..
3960 } => backend.stash_paths(entries, environment).await,
3961 RepositoryState::Remote { project_id, client } => {
3962 client
3963 .request(proto::Stash {
3964 project_id: project_id.0,
3965 repository_id: id.to_proto(),
3966 paths: entries
3967 .into_iter()
3968 .map(|repo_path| repo_path.to_proto())
3969 .collect(),
3970 })
3971 .await
3972 .context("sending stash request")?;
3973 Ok(())
3974 }
3975 }
3976 })
3977 })?
3978 .await??;
3979 Ok(())
3980 })
3981 }
3982
3983 pub fn stash_pop(
3984 &mut self,
3985 index: Option<usize>,
3986 cx: &mut Context<Self>,
3987 ) -> Task<anyhow::Result<()>> {
3988 let id = self.id;
3989 cx.spawn(async move |this, cx| {
3990 this.update(cx, |this, _| {
3991 this.send_job(None, move |git_repo, _cx| async move {
3992 match git_repo {
3993 RepositoryState::Local {
3994 backend,
3995 environment,
3996 ..
3997 } => backend.stash_pop(index, environment).await,
3998 RepositoryState::Remote { project_id, client } => {
3999 client
4000 .request(proto::StashPop {
4001 project_id: project_id.0,
4002 repository_id: id.to_proto(),
4003 stash_index: index.map(|i| i as u64),
4004 })
4005 .await
4006 .context("sending stash pop request")?;
4007 Ok(())
4008 }
4009 }
4010 })
4011 })?
4012 .await??;
4013 Ok(())
4014 })
4015 }
4016
4017 pub fn stash_apply(
4018 &mut self,
4019 index: Option<usize>,
4020 cx: &mut Context<Self>,
4021 ) -> Task<anyhow::Result<()>> {
4022 let id = self.id;
4023 cx.spawn(async move |this, cx| {
4024 this.update(cx, |this, _| {
4025 this.send_job(None, move |git_repo, _cx| async move {
4026 match git_repo {
4027 RepositoryState::Local {
4028 backend,
4029 environment,
4030 ..
4031 } => backend.stash_apply(index, environment).await,
4032 RepositoryState::Remote { project_id, client } => {
4033 client
4034 .request(proto::StashApply {
4035 project_id: project_id.0,
4036 repository_id: id.to_proto(),
4037 stash_index: index.map(|i| i as u64),
4038 })
4039 .await
4040 .context("sending stash apply request")?;
4041 Ok(())
4042 }
4043 }
4044 })
4045 })?
4046 .await??;
4047 Ok(())
4048 })
4049 }
4050
4051 pub fn stash_drop(
4052 &mut self,
4053 index: Option<usize>,
4054 cx: &mut Context<Self>,
4055 ) -> oneshot::Receiver<anyhow::Result<()>> {
4056 let id = self.id;
4057 let updates_tx = self
4058 .git_store()
4059 .and_then(|git_store| match &git_store.read(cx).state {
4060 GitStoreState::Local { downstream, .. } => downstream
4061 .as_ref()
4062 .map(|downstream| downstream.updates_tx.clone()),
4063 _ => None,
4064 });
4065 let this = cx.weak_entity();
4066 self.send_job(None, move |git_repo, mut cx| async move {
4067 match git_repo {
4068 RepositoryState::Local {
4069 backend,
4070 environment,
4071 ..
4072 } => {
4073 // TODO would be nice to not have to do this manually
4074 let result = backend.stash_drop(index, environment).await;
4075 if result.is_ok()
4076 && let Ok(stash_entries) = backend.stash_entries().await
4077 {
4078 let snapshot = this.update(&mut cx, |this, cx| {
4079 this.snapshot.stash_entries = stash_entries;
4080 cx.emit(RepositoryEvent::StashEntriesChanged);
4081 this.snapshot.clone()
4082 })?;
4083 if let Some(updates_tx) = updates_tx {
4084 updates_tx
4085 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4086 .ok();
4087 }
4088 }
4089
4090 result
4091 }
4092 RepositoryState::Remote { project_id, client } => {
4093 client
4094 .request(proto::StashDrop {
4095 project_id: project_id.0,
4096 repository_id: id.to_proto(),
4097 stash_index: index.map(|i| i as u64),
4098 })
4099 .await
4100 .context("sending stash pop request")?;
4101 Ok(())
4102 }
4103 }
4104 })
4105 }
4106
4107 pub fn commit(
4108 &mut self,
4109 message: SharedString,
4110 name_and_email: Option<(SharedString, SharedString)>,
4111 options: CommitOptions,
4112 _cx: &mut App,
4113 ) -> oneshot::Receiver<Result<()>> {
4114 let id = self.id;
4115
4116 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4117 match git_repo {
4118 RepositoryState::Local {
4119 backend,
4120 environment,
4121 ..
4122 } => {
4123 backend
4124 .commit(message, name_and_email, options, environment)
4125 .await
4126 }
4127 RepositoryState::Remote { project_id, client } => {
4128 let (name, email) = name_and_email.unzip();
4129 client
4130 .request(proto::Commit {
4131 project_id: project_id.0,
4132 repository_id: id.to_proto(),
4133 message: String::from(message),
4134 name: name.map(String::from),
4135 email: email.map(String::from),
4136 options: Some(proto::commit::CommitOptions {
4137 amend: options.amend,
4138 signoff: options.signoff,
4139 }),
4140 })
4141 .await
4142 .context("sending commit request")?;
4143
4144 Ok(())
4145 }
4146 }
4147 })
4148 }
4149
4150 pub fn fetch(
4151 &mut self,
4152 fetch_options: FetchOptions,
4153 askpass: AskPassDelegate,
4154 _cx: &mut App,
4155 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4156 let askpass_delegates = self.askpass_delegates.clone();
4157 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4158 let id = self.id;
4159
4160 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4161 match git_repo {
4162 RepositoryState::Local {
4163 backend,
4164 environment,
4165 ..
4166 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4167 RepositoryState::Remote { project_id, client } => {
4168 askpass_delegates.lock().insert(askpass_id, askpass);
4169 let _defer = util::defer(|| {
4170 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4171 debug_assert!(askpass_delegate.is_some());
4172 });
4173
4174 let response = client
4175 .request(proto::Fetch {
4176 project_id: project_id.0,
4177 repository_id: id.to_proto(),
4178 askpass_id,
4179 remote: fetch_options.to_proto(),
4180 })
4181 .await
4182 .context("sending fetch request")?;
4183
4184 Ok(RemoteCommandOutput {
4185 stdout: response.stdout,
4186 stderr: response.stderr,
4187 })
4188 }
4189 }
4190 })
4191 }
4192
4193 pub fn push(
4194 &mut self,
4195 branch: SharedString,
4196 remote: SharedString,
4197 options: Option<PushOptions>,
4198 askpass: AskPassDelegate,
4199 cx: &mut Context<Self>,
4200 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4201 let askpass_delegates = self.askpass_delegates.clone();
4202 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4203 let id = self.id;
4204
4205 let args = options
4206 .map(|option| match option {
4207 PushOptions::SetUpstream => " --set-upstream",
4208 PushOptions::Force => " --force-with-lease",
4209 })
4210 .unwrap_or("");
4211
4212 let updates_tx = self
4213 .git_store()
4214 .and_then(|git_store| match &git_store.read(cx).state {
4215 GitStoreState::Local { downstream, .. } => downstream
4216 .as_ref()
4217 .map(|downstream| downstream.updates_tx.clone()),
4218 _ => None,
4219 });
4220
4221 let this = cx.weak_entity();
4222 self.send_job(
4223 Some(format!("git push {} {} {}", args, remote, branch).into()),
4224 move |git_repo, mut cx| async move {
4225 match git_repo {
4226 RepositoryState::Local {
4227 backend,
4228 environment,
4229 ..
4230 } => {
4231 let result = backend
4232 .push(
4233 branch.to_string(),
4234 remote.to_string(),
4235 options,
4236 askpass,
4237 environment.clone(),
4238 cx.clone(),
4239 )
4240 .await;
4241 // TODO would be nice to not have to do this manually
4242 if result.is_ok() {
4243 let branches = backend.branches().await?;
4244 let branch = branches.into_iter().find(|branch| branch.is_head);
4245 log::info!("head branch after scan is {branch:?}");
4246 let snapshot = this.update(&mut cx, |this, cx| {
4247 this.snapshot.branch = branch;
4248 cx.emit(RepositoryEvent::BranchChanged);
4249 this.snapshot.clone()
4250 })?;
4251 if let Some(updates_tx) = updates_tx {
4252 updates_tx
4253 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4254 .ok();
4255 }
4256 }
4257 result
4258 }
4259 RepositoryState::Remote { project_id, client } => {
4260 askpass_delegates.lock().insert(askpass_id, askpass);
4261 let _defer = util::defer(|| {
4262 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4263 debug_assert!(askpass_delegate.is_some());
4264 });
4265 let response = client
4266 .request(proto::Push {
4267 project_id: project_id.0,
4268 repository_id: id.to_proto(),
4269 askpass_id,
4270 branch_name: branch.to_string(),
4271 remote_name: remote.to_string(),
4272 options: options.map(|options| match options {
4273 PushOptions::Force => proto::push::PushOptions::Force,
4274 PushOptions::SetUpstream => {
4275 proto::push::PushOptions::SetUpstream
4276 }
4277 }
4278 as i32),
4279 })
4280 .await
4281 .context("sending push request")?;
4282
4283 Ok(RemoteCommandOutput {
4284 stdout: response.stdout,
4285 stderr: response.stderr,
4286 })
4287 }
4288 }
4289 },
4290 )
4291 }
4292
4293 pub fn pull(
4294 &mut self,
4295 branch: SharedString,
4296 remote: SharedString,
4297 askpass: AskPassDelegate,
4298 _cx: &mut App,
4299 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4300 let askpass_delegates = self.askpass_delegates.clone();
4301 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4302 let id = self.id;
4303
4304 self.send_job(
4305 Some(format!("git pull {} {}", remote, branch).into()),
4306 move |git_repo, cx| async move {
4307 match git_repo {
4308 RepositoryState::Local {
4309 backend,
4310 environment,
4311 ..
4312 } => {
4313 backend
4314 .pull(
4315 branch.to_string(),
4316 remote.to_string(),
4317 askpass,
4318 environment.clone(),
4319 cx,
4320 )
4321 .await
4322 }
4323 RepositoryState::Remote { project_id, client } => {
4324 askpass_delegates.lock().insert(askpass_id, askpass);
4325 let _defer = util::defer(|| {
4326 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4327 debug_assert!(askpass_delegate.is_some());
4328 });
4329 let response = client
4330 .request(proto::Pull {
4331 project_id: project_id.0,
4332 repository_id: id.to_proto(),
4333 askpass_id,
4334 branch_name: branch.to_string(),
4335 remote_name: remote.to_string(),
4336 })
4337 .await
4338 .context("sending pull request")?;
4339
4340 Ok(RemoteCommandOutput {
4341 stdout: response.stdout,
4342 stderr: response.stderr,
4343 })
4344 }
4345 }
4346 },
4347 )
4348 }
4349
4350 fn spawn_set_index_text_job(
4351 &mut self,
4352 path: RepoPath,
4353 content: Option<String>,
4354 hunk_staging_operation_count: Option<usize>,
4355 cx: &mut Context<Self>,
4356 ) -> oneshot::Receiver<anyhow::Result<()>> {
4357 let id = self.id;
4358 let this = cx.weak_entity();
4359 let git_store = self.git_store.clone();
4360 self.send_keyed_job(
4361 Some(GitJobKey::WriteIndex(path.clone())),
4362 None,
4363 move |git_repo, mut cx| async move {
4364 log::debug!(
4365 "start updating index text for buffer {}",
4366 path.as_unix_str()
4367 );
4368 match git_repo {
4369 RepositoryState::Local {
4370 backend,
4371 environment,
4372 ..
4373 } => {
4374 backend
4375 .set_index_text(path.clone(), content, environment.clone())
4376 .await?;
4377 }
4378 RepositoryState::Remote { project_id, client } => {
4379 client
4380 .request(proto::SetIndexText {
4381 project_id: project_id.0,
4382 repository_id: id.to_proto(),
4383 path: path.to_proto(),
4384 text: content,
4385 })
4386 .await?;
4387 }
4388 }
4389 log::debug!(
4390 "finish updating index text for buffer {}",
4391 path.as_unix_str()
4392 );
4393
4394 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4395 let project_path = this
4396 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4397 .ok()
4398 .flatten();
4399 git_store.update(&mut cx, |git_store, cx| {
4400 let buffer_id = git_store
4401 .buffer_store
4402 .read(cx)
4403 .get_by_path(&project_path?)?
4404 .read(cx)
4405 .remote_id();
4406 let diff_state = git_store.diffs.get(&buffer_id)?;
4407 diff_state.update(cx, |diff_state, _| {
4408 diff_state.hunk_staging_operation_count_as_of_write =
4409 hunk_staging_operation_count;
4410 });
4411 Some(())
4412 })?;
4413 }
4414 Ok(())
4415 },
4416 )
4417 }
4418
4419 pub fn get_remotes(
4420 &mut self,
4421 branch_name: Option<String>,
4422 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4423 let id = self.id;
4424 self.send_job(None, move |repo, _cx| async move {
4425 match repo {
4426 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4427 RepositoryState::Remote { project_id, client } => {
4428 let response = client
4429 .request(proto::GetRemotes {
4430 project_id: project_id.0,
4431 repository_id: id.to_proto(),
4432 branch_name,
4433 })
4434 .await?;
4435
4436 let remotes = response
4437 .remotes
4438 .into_iter()
4439 .map(|remotes| git::repository::Remote {
4440 name: remotes.name.into(),
4441 })
4442 .collect();
4443
4444 Ok(remotes)
4445 }
4446 }
4447 })
4448 }
4449
4450 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4451 let id = self.id;
4452 self.send_job(None, move |repo, _| async move {
4453 match repo {
4454 RepositoryState::Local { backend, .. } => backend.branches().await,
4455 RepositoryState::Remote { project_id, client } => {
4456 let response = client
4457 .request(proto::GitGetBranches {
4458 project_id: project_id.0,
4459 repository_id: id.to_proto(),
4460 })
4461 .await?;
4462
4463 let branches = response
4464 .branches
4465 .into_iter()
4466 .map(|branch| proto_to_branch(&branch))
4467 .collect();
4468
4469 Ok(branches)
4470 }
4471 }
4472 })
4473 }
4474
4475 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4476 let id = self.id;
4477 self.send_job(None, move |repo, _| async move {
4478 match repo {
4479 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4480 RepositoryState::Remote { project_id, client } => {
4481 let response = client
4482 .request(proto::GitGetWorktrees {
4483 project_id: project_id.0,
4484 repository_id: id.to_proto(),
4485 })
4486 .await?;
4487
4488 let worktrees = response
4489 .worktrees
4490 .into_iter()
4491 .map(|worktree| proto_to_worktree(&worktree))
4492 .collect();
4493
4494 Ok(worktrees)
4495 }
4496 }
4497 })
4498 }
4499
4500 pub fn create_worktree(
4501 &mut self,
4502 name: String,
4503 path: PathBuf,
4504 commit: Option<String>,
4505 ) -> oneshot::Receiver<Result<()>> {
4506 let id = self.id;
4507 self.send_job(
4508 Some("git worktree add".into()),
4509 move |repo, _cx| async move {
4510 match repo {
4511 RepositoryState::Local { backend, .. } => {
4512 backend.create_worktree(name, path, commit).await
4513 }
4514 RepositoryState::Remote { project_id, client } => {
4515 client
4516 .request(proto::GitCreateWorktree {
4517 project_id: project_id.0,
4518 repository_id: id.to_proto(),
4519 name,
4520 directory: path.to_string_lossy().to_string(),
4521 commit,
4522 })
4523 .await?;
4524
4525 Ok(())
4526 }
4527 }
4528 },
4529 )
4530 }
4531
4532 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4533 let id = self.id;
4534 self.send_job(None, move |repo, _| async move {
4535 match repo {
4536 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4537 RepositoryState::Remote { project_id, client } => {
4538 let response = client
4539 .request(proto::GetDefaultBranch {
4540 project_id: project_id.0,
4541 repository_id: id.to_proto(),
4542 })
4543 .await?;
4544
4545 anyhow::Ok(response.branch.map(SharedString::from))
4546 }
4547 }
4548 })
4549 }
4550
4551 pub fn diff_tree(
4552 &mut self,
4553 diff_type: DiffTreeType,
4554 _cx: &App,
4555 ) -> oneshot::Receiver<Result<TreeDiff>> {
4556 let repository_id = self.snapshot.id;
4557 self.send_job(None, move |repo, _cx| async move {
4558 match repo {
4559 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4560 RepositoryState::Remote { client, project_id } => {
4561 let response = client
4562 .request(proto::GetTreeDiff {
4563 project_id: project_id.0,
4564 repository_id: repository_id.0,
4565 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4566 base: diff_type.base().to_string(),
4567 head: diff_type.head().to_string(),
4568 })
4569 .await?;
4570
4571 let entries = response
4572 .entries
4573 .into_iter()
4574 .filter_map(|entry| {
4575 let status = match entry.status() {
4576 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4577 proto::tree_diff_status::Status::Modified => {
4578 TreeDiffStatus::Modified {
4579 old: git::Oid::from_str(
4580 &entry.oid.context("missing oid").log_err()?,
4581 )
4582 .log_err()?,
4583 }
4584 }
4585 proto::tree_diff_status::Status::Deleted => {
4586 TreeDiffStatus::Deleted {
4587 old: git::Oid::from_str(
4588 &entry.oid.context("missing oid").log_err()?,
4589 )
4590 .log_err()?,
4591 }
4592 }
4593 };
4594 Some((
4595 RepoPath(RelPath::from_proto(&entry.path).log_err()?),
4596 status,
4597 ))
4598 })
4599 .collect();
4600
4601 Ok(TreeDiff { entries })
4602 }
4603 }
4604 })
4605 }
4606
4607 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4608 let id = self.id;
4609 self.send_job(None, move |repo, _cx| async move {
4610 match repo {
4611 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4612 RepositoryState::Remote { project_id, client } => {
4613 let response = client
4614 .request(proto::GitDiff {
4615 project_id: project_id.0,
4616 repository_id: id.to_proto(),
4617 diff_type: match diff_type {
4618 DiffType::HeadToIndex => {
4619 proto::git_diff::DiffType::HeadToIndex.into()
4620 }
4621 DiffType::HeadToWorktree => {
4622 proto::git_diff::DiffType::HeadToWorktree.into()
4623 }
4624 },
4625 })
4626 .await?;
4627
4628 Ok(response.diff)
4629 }
4630 }
4631 })
4632 }
4633
4634 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4635 let id = self.id;
4636 self.send_job(
4637 Some(format!("git switch -c {branch_name}").into()),
4638 move |repo, _cx| async move {
4639 match repo {
4640 RepositoryState::Local { backend, .. } => {
4641 backend.create_branch(branch_name).await
4642 }
4643 RepositoryState::Remote { project_id, client } => {
4644 client
4645 .request(proto::GitCreateBranch {
4646 project_id: project_id.0,
4647 repository_id: id.to_proto(),
4648 branch_name,
4649 })
4650 .await?;
4651
4652 Ok(())
4653 }
4654 }
4655 },
4656 )
4657 }
4658
4659 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4660 let id = self.id;
4661 self.send_job(
4662 Some(format!("git switch {branch_name}").into()),
4663 move |repo, _cx| async move {
4664 match repo {
4665 RepositoryState::Local { backend, .. } => {
4666 backend.change_branch(branch_name).await
4667 }
4668 RepositoryState::Remote { project_id, client } => {
4669 client
4670 .request(proto::GitChangeBranch {
4671 project_id: project_id.0,
4672 repository_id: id.to_proto(),
4673 branch_name,
4674 })
4675 .await?;
4676
4677 Ok(())
4678 }
4679 }
4680 },
4681 )
4682 }
4683
4684 pub fn rename_branch(
4685 &mut self,
4686 branch: String,
4687 new_name: String,
4688 ) -> oneshot::Receiver<Result<()>> {
4689 let id = self.id;
4690 self.send_job(
4691 Some(format!("git branch -m {branch} {new_name}").into()),
4692 move |repo, _cx| async move {
4693 match repo {
4694 RepositoryState::Local { backend, .. } => {
4695 backend.rename_branch(branch, new_name).await
4696 }
4697 RepositoryState::Remote { project_id, client } => {
4698 client
4699 .request(proto::GitRenameBranch {
4700 project_id: project_id.0,
4701 repository_id: id.to_proto(),
4702 branch,
4703 new_name,
4704 })
4705 .await?;
4706
4707 Ok(())
4708 }
4709 }
4710 },
4711 )
4712 }
4713
4714 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4715 let id = self.id;
4716 self.send_job(None, move |repo, _cx| async move {
4717 match repo {
4718 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4719 RepositoryState::Remote { project_id, client } => {
4720 let response = client
4721 .request(proto::CheckForPushedCommits {
4722 project_id: project_id.0,
4723 repository_id: id.to_proto(),
4724 })
4725 .await?;
4726
4727 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4728
4729 Ok(branches)
4730 }
4731 }
4732 })
4733 }
4734
4735 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4736 self.send_job(None, |repo, _cx| async move {
4737 match repo {
4738 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4739 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4740 }
4741 })
4742 }
4743
4744 pub fn restore_checkpoint(
4745 &mut self,
4746 checkpoint: GitRepositoryCheckpoint,
4747 ) -> oneshot::Receiver<Result<()>> {
4748 self.send_job(None, move |repo, _cx| async move {
4749 match repo {
4750 RepositoryState::Local { backend, .. } => {
4751 backend.restore_checkpoint(checkpoint).await
4752 }
4753 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4754 }
4755 })
4756 }
4757
4758 pub(crate) fn apply_remote_update(
4759 &mut self,
4760 update: proto::UpdateRepository,
4761 cx: &mut Context<Self>,
4762 ) -> Result<()> {
4763 let conflicted_paths = TreeSet::from_ordered_entries(
4764 update
4765 .current_merge_conflicts
4766 .into_iter()
4767 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4768 );
4769 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4770 let new_head_commit = update
4771 .head_commit_details
4772 .as_ref()
4773 .map(proto_to_commit_details);
4774 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4775 cx.emit(RepositoryEvent::BranchChanged)
4776 }
4777 self.snapshot.branch = new_branch;
4778 self.snapshot.head_commit = new_head_commit;
4779
4780 self.snapshot.merge.conflicted_paths = conflicted_paths;
4781 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4782 let new_stash_entries = GitStash {
4783 entries: update
4784 .stash_entries
4785 .iter()
4786 .filter_map(|entry| proto_to_stash(entry).ok())
4787 .collect(),
4788 };
4789 if self.snapshot.stash_entries != new_stash_entries {
4790 cx.emit(RepositoryEvent::StashEntriesChanged)
4791 }
4792 self.snapshot.stash_entries = new_stash_entries;
4793
4794 let edits = update
4795 .removed_statuses
4796 .into_iter()
4797 .filter_map(|path| {
4798 Some(sum_tree::Edit::Remove(PathKey(
4799 RelPath::from_proto(&path).log_err()?,
4800 )))
4801 })
4802 .chain(
4803 update
4804 .updated_statuses
4805 .into_iter()
4806 .filter_map(|updated_status| {
4807 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4808 }),
4809 )
4810 .collect::<Vec<_>>();
4811 if !edits.is_empty() {
4812 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4813 }
4814 self.snapshot.statuses_by_path.edit(edits, ());
4815 if update.is_last_update {
4816 self.snapshot.scan_id = update.scan_id;
4817 }
4818 Ok(())
4819 }
4820
4821 pub fn compare_checkpoints(
4822 &mut self,
4823 left: GitRepositoryCheckpoint,
4824 right: GitRepositoryCheckpoint,
4825 ) -> oneshot::Receiver<Result<bool>> {
4826 self.send_job(None, move |repo, _cx| async move {
4827 match repo {
4828 RepositoryState::Local { backend, .. } => {
4829 backend.compare_checkpoints(left, right).await
4830 }
4831 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4832 }
4833 })
4834 }
4835
4836 pub fn diff_checkpoints(
4837 &mut self,
4838 base_checkpoint: GitRepositoryCheckpoint,
4839 target_checkpoint: GitRepositoryCheckpoint,
4840 ) -> oneshot::Receiver<Result<String>> {
4841 self.send_job(None, move |repo, _cx| async move {
4842 match repo {
4843 RepositoryState::Local { backend, .. } => {
4844 backend
4845 .diff_checkpoints(base_checkpoint, target_checkpoint)
4846 .await
4847 }
4848 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4849 }
4850 })
4851 }
4852
4853 fn schedule_scan(
4854 &mut self,
4855 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4856 cx: &mut Context<Self>,
4857 ) {
4858 let this = cx.weak_entity();
4859 let _ = self.send_keyed_job(
4860 Some(GitJobKey::ReloadGitState),
4861 None,
4862 |state, mut cx| async move {
4863 log::debug!("run scheduled git status scan");
4864
4865 let Some(this) = this.upgrade() else {
4866 return Ok(());
4867 };
4868 let RepositoryState::Local { backend, .. } = state else {
4869 bail!("not a local repository")
4870 };
4871 let (snapshot, events) = this
4872 .update(&mut cx, |this, _| {
4873 this.paths_needing_status_update.clear();
4874 compute_snapshot(
4875 this.id,
4876 this.work_directory_abs_path.clone(),
4877 this.snapshot.clone(),
4878 backend.clone(),
4879 )
4880 })?
4881 .await?;
4882 this.update(&mut cx, |this, cx| {
4883 this.snapshot = snapshot.clone();
4884 for event in events {
4885 cx.emit(event);
4886 }
4887 })?;
4888 if let Some(updates_tx) = updates_tx {
4889 updates_tx
4890 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4891 .ok();
4892 }
4893 Ok(())
4894 },
4895 );
4896 }
4897
4898 fn spawn_local_git_worker(
4899 work_directory_abs_path: Arc<Path>,
4900 dot_git_abs_path: Arc<Path>,
4901 _repository_dir_abs_path: Arc<Path>,
4902 _common_dir_abs_path: Arc<Path>,
4903 project_environment: WeakEntity<ProjectEnvironment>,
4904 fs: Arc<dyn Fs>,
4905 cx: &mut Context<Self>,
4906 ) -> mpsc::UnboundedSender<GitJob> {
4907 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4908
4909 cx.spawn(async move |_, cx| {
4910 let environment = project_environment
4911 .upgrade()
4912 .context("missing project environment")?
4913 .update(cx, |project_environment, cx| {
4914 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
4915 })?
4916 .await
4917 .unwrap_or_else(|| {
4918 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4919 HashMap::default()
4920 });
4921 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4922 let backend = cx
4923 .background_spawn(async move {
4924 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4925 .or_else(|| which::which("git").ok());
4926 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4927 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4928 })
4929 .await?;
4930
4931 if let Some(git_hosting_provider_registry) =
4932 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4933 {
4934 git_hosting_providers::register_additional_providers(
4935 git_hosting_provider_registry,
4936 backend.clone(),
4937 );
4938 }
4939
4940 let state = RepositoryState::Local {
4941 backend,
4942 environment: Arc::new(environment),
4943 };
4944 let mut jobs = VecDeque::new();
4945 loop {
4946 while let Ok(Some(next_job)) = job_rx.try_next() {
4947 jobs.push_back(next_job);
4948 }
4949
4950 if let Some(job) = jobs.pop_front() {
4951 if let Some(current_key) = &job.key
4952 && jobs
4953 .iter()
4954 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4955 {
4956 continue;
4957 }
4958 (job.job)(state.clone(), cx).await;
4959 } else if let Some(job) = job_rx.next().await {
4960 jobs.push_back(job);
4961 } else {
4962 break;
4963 }
4964 }
4965 anyhow::Ok(())
4966 })
4967 .detach_and_log_err(cx);
4968
4969 job_tx
4970 }
4971
4972 fn spawn_remote_git_worker(
4973 project_id: ProjectId,
4974 client: AnyProtoClient,
4975 cx: &mut Context<Self>,
4976 ) -> mpsc::UnboundedSender<GitJob> {
4977 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4978
4979 cx.spawn(async move |_, cx| {
4980 let state = RepositoryState::Remote { project_id, client };
4981 let mut jobs = VecDeque::new();
4982 loop {
4983 while let Ok(Some(next_job)) = job_rx.try_next() {
4984 jobs.push_back(next_job);
4985 }
4986
4987 if let Some(job) = jobs.pop_front() {
4988 if let Some(current_key) = &job.key
4989 && jobs
4990 .iter()
4991 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4992 {
4993 continue;
4994 }
4995 (job.job)(state.clone(), cx).await;
4996 } else if let Some(job) = job_rx.next().await {
4997 jobs.push_back(job);
4998 } else {
4999 break;
5000 }
5001 }
5002 anyhow::Ok(())
5003 })
5004 .detach_and_log_err(cx);
5005
5006 job_tx
5007 }
5008
5009 fn load_staged_text(
5010 &mut self,
5011 buffer_id: BufferId,
5012 repo_path: RepoPath,
5013 cx: &App,
5014 ) -> Task<Result<Option<String>>> {
5015 let rx = self.send_job(None, move |state, _| async move {
5016 match state {
5017 RepositoryState::Local { backend, .. } => {
5018 anyhow::Ok(backend.load_index_text(repo_path).await)
5019 }
5020 RepositoryState::Remote { project_id, client } => {
5021 let response = client
5022 .request(proto::OpenUnstagedDiff {
5023 project_id: project_id.to_proto(),
5024 buffer_id: buffer_id.to_proto(),
5025 })
5026 .await?;
5027 Ok(response.staged_text)
5028 }
5029 }
5030 });
5031 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5032 }
5033
5034 fn load_committed_text(
5035 &mut self,
5036 buffer_id: BufferId,
5037 repo_path: RepoPath,
5038 cx: &App,
5039 ) -> Task<Result<DiffBasesChange>> {
5040 let rx = self.send_job(None, move |state, _| async move {
5041 match state {
5042 RepositoryState::Local { backend, .. } => {
5043 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5044 let staged_text = backend.load_index_text(repo_path).await;
5045 let diff_bases_change = if committed_text == staged_text {
5046 DiffBasesChange::SetBoth(committed_text)
5047 } else {
5048 DiffBasesChange::SetEach {
5049 index: staged_text,
5050 head: committed_text,
5051 }
5052 };
5053 anyhow::Ok(diff_bases_change)
5054 }
5055 RepositoryState::Remote { project_id, client } => {
5056 use proto::open_uncommitted_diff_response::Mode;
5057
5058 let response = client
5059 .request(proto::OpenUncommittedDiff {
5060 project_id: project_id.to_proto(),
5061 buffer_id: buffer_id.to_proto(),
5062 })
5063 .await?;
5064 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5065 let bases = match mode {
5066 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5067 Mode::IndexAndHead => DiffBasesChange::SetEach {
5068 head: response.committed_text,
5069 index: response.staged_text,
5070 },
5071 };
5072 Ok(bases)
5073 }
5074 }
5075 });
5076
5077 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5078 }
5079 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5080 let repository_id = self.snapshot.id;
5081 let rx = self.send_job(None, move |state, _| async move {
5082 match state {
5083 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5084 RepositoryState::Remote { client, project_id } => {
5085 let response = client
5086 .request(proto::GetBlobContent {
5087 project_id: project_id.to_proto(),
5088 repository_id: repository_id.0,
5089 oid: oid.to_string(),
5090 })
5091 .await?;
5092 Ok(response.content)
5093 }
5094 }
5095 });
5096 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5097 }
5098
5099 fn paths_changed(
5100 &mut self,
5101 paths: Vec<RepoPath>,
5102 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5103 cx: &mut Context<Self>,
5104 ) {
5105 self.paths_needing_status_update.extend(paths);
5106
5107 let this = cx.weak_entity();
5108 let _ = self.send_keyed_job(
5109 Some(GitJobKey::RefreshStatuses),
5110 None,
5111 |state, mut cx| async move {
5112 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5113 (
5114 this.snapshot.clone(),
5115 mem::take(&mut this.paths_needing_status_update),
5116 )
5117 })?;
5118 let RepositoryState::Local { backend, .. } = state else {
5119 bail!("not a local repository")
5120 };
5121
5122 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5123 if paths.is_empty() {
5124 return Ok(());
5125 }
5126 let statuses = backend.status(&paths).await?;
5127 let stash_entries = backend.stash_entries().await?;
5128
5129 let changed_path_statuses = cx
5130 .background_spawn(async move {
5131 let mut changed_path_statuses = Vec::new();
5132 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5133 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5134
5135 for (repo_path, status) in &*statuses.entries {
5136 changed_paths.remove(repo_path);
5137 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5138 && cursor.item().is_some_and(|entry| entry.status == *status)
5139 {
5140 continue;
5141 }
5142
5143 changed_path_statuses.push(Edit::Insert(StatusEntry {
5144 repo_path: repo_path.clone(),
5145 status: *status,
5146 }));
5147 }
5148 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5149 for path in changed_paths.into_iter() {
5150 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5151 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5152 }
5153 }
5154 changed_path_statuses
5155 })
5156 .await;
5157
5158 this.update(&mut cx, |this, cx| {
5159 if this.snapshot.stash_entries != stash_entries {
5160 cx.emit(RepositoryEvent::StashEntriesChanged);
5161 this.snapshot.stash_entries = stash_entries;
5162 }
5163
5164 if !changed_path_statuses.is_empty() {
5165 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5166 this.snapshot
5167 .statuses_by_path
5168 .edit(changed_path_statuses, ());
5169 this.snapshot.scan_id += 1;
5170 }
5171
5172 if let Some(updates_tx) = updates_tx {
5173 updates_tx
5174 .unbounded_send(DownstreamUpdate::UpdateRepository(
5175 this.snapshot.clone(),
5176 ))
5177 .ok();
5178 }
5179 })
5180 },
5181 );
5182 }
5183
5184 /// currently running git command and when it started
5185 pub fn current_job(&self) -> Option<JobInfo> {
5186 self.active_jobs.values().next().cloned()
5187 }
5188
5189 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5190 self.send_job(None, |_, _| async {})
5191 }
5192}
5193
5194fn get_permalink_in_rust_registry_src(
5195 provider_registry: Arc<GitHostingProviderRegistry>,
5196 path: PathBuf,
5197 selection: Range<u32>,
5198) -> Result<url::Url> {
5199 #[derive(Deserialize)]
5200 struct CargoVcsGit {
5201 sha1: String,
5202 }
5203
5204 #[derive(Deserialize)]
5205 struct CargoVcsInfo {
5206 git: CargoVcsGit,
5207 path_in_vcs: String,
5208 }
5209
5210 #[derive(Deserialize)]
5211 struct CargoPackage {
5212 repository: String,
5213 }
5214
5215 #[derive(Deserialize)]
5216 struct CargoToml {
5217 package: CargoPackage,
5218 }
5219
5220 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5221 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5222 Some((dir, json))
5223 }) else {
5224 bail!("No .cargo_vcs_info.json found in parent directories")
5225 };
5226 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5227 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5228 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5229 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5230 .context("parsing package.repository field of manifest")?;
5231 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5232 let permalink = provider.build_permalink(
5233 remote,
5234 BuildPermalinkParams::new(
5235 &cargo_vcs_info.git.sha1,
5236 &RepoPath(
5237 RelPath::new(&path, PathStyle::local())
5238 .context("invalid path")?
5239 .into_arc(),
5240 ),
5241 Some(selection),
5242 ),
5243 );
5244 Ok(permalink)
5245}
5246
5247fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5248 let Some(blame) = blame else {
5249 return proto::BlameBufferResponse {
5250 blame_response: None,
5251 };
5252 };
5253
5254 let entries = blame
5255 .entries
5256 .into_iter()
5257 .map(|entry| proto::BlameEntry {
5258 sha: entry.sha.as_bytes().into(),
5259 start_line: entry.range.start,
5260 end_line: entry.range.end,
5261 original_line_number: entry.original_line_number,
5262 author: entry.author,
5263 author_mail: entry.author_mail,
5264 author_time: entry.author_time,
5265 author_tz: entry.author_tz,
5266 committer: entry.committer_name,
5267 committer_mail: entry.committer_email,
5268 committer_time: entry.committer_time,
5269 committer_tz: entry.committer_tz,
5270 summary: entry.summary,
5271 previous: entry.previous,
5272 filename: entry.filename,
5273 })
5274 .collect::<Vec<_>>();
5275
5276 let messages = blame
5277 .messages
5278 .into_iter()
5279 .map(|(oid, message)| proto::CommitMessage {
5280 oid: oid.as_bytes().into(),
5281 message,
5282 })
5283 .collect::<Vec<_>>();
5284
5285 proto::BlameBufferResponse {
5286 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5287 entries,
5288 messages,
5289 remote_url: blame.remote_url,
5290 }),
5291 }
5292}
5293
5294fn deserialize_blame_buffer_response(
5295 response: proto::BlameBufferResponse,
5296) -> Option<git::blame::Blame> {
5297 let response = response.blame_response?;
5298 let entries = response
5299 .entries
5300 .into_iter()
5301 .filter_map(|entry| {
5302 Some(git::blame::BlameEntry {
5303 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5304 range: entry.start_line..entry.end_line,
5305 original_line_number: entry.original_line_number,
5306 committer_name: entry.committer,
5307 committer_time: entry.committer_time,
5308 committer_tz: entry.committer_tz,
5309 committer_email: entry.committer_mail,
5310 author: entry.author,
5311 author_mail: entry.author_mail,
5312 author_time: entry.author_time,
5313 author_tz: entry.author_tz,
5314 summary: entry.summary,
5315 previous: entry.previous,
5316 filename: entry.filename,
5317 })
5318 })
5319 .collect::<Vec<_>>();
5320
5321 let messages = response
5322 .messages
5323 .into_iter()
5324 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5325 .collect::<HashMap<_, _>>();
5326
5327 Some(Blame {
5328 entries,
5329 messages,
5330 remote_url: response.remote_url,
5331 })
5332}
5333
5334fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5335 proto::Branch {
5336 is_head: branch.is_head,
5337 ref_name: branch.ref_name.to_string(),
5338 unix_timestamp: branch
5339 .most_recent_commit
5340 .as_ref()
5341 .map(|commit| commit.commit_timestamp as u64),
5342 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5343 ref_name: upstream.ref_name.to_string(),
5344 tracking: upstream
5345 .tracking
5346 .status()
5347 .map(|upstream| proto::UpstreamTracking {
5348 ahead: upstream.ahead as u64,
5349 behind: upstream.behind as u64,
5350 }),
5351 }),
5352 most_recent_commit: branch
5353 .most_recent_commit
5354 .as_ref()
5355 .map(|commit| proto::CommitSummary {
5356 sha: commit.sha.to_string(),
5357 subject: commit.subject.to_string(),
5358 commit_timestamp: commit.commit_timestamp,
5359 author_name: commit.author_name.to_string(),
5360 }),
5361 }
5362}
5363
5364fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5365 proto::Worktree {
5366 path: worktree.path.to_string_lossy().to_string(),
5367 ref_name: worktree.ref_name.to_string(),
5368 sha: worktree.sha.to_string(),
5369 }
5370}
5371
5372fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5373 git::repository::Worktree {
5374 path: PathBuf::from(proto.path.clone()),
5375 ref_name: proto.ref_name.clone().into(),
5376 sha: proto.sha.clone().into(),
5377 }
5378}
5379
5380fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5381 git::repository::Branch {
5382 is_head: proto.is_head,
5383 ref_name: proto.ref_name.clone().into(),
5384 upstream: proto
5385 .upstream
5386 .as_ref()
5387 .map(|upstream| git::repository::Upstream {
5388 ref_name: upstream.ref_name.to_string().into(),
5389 tracking: upstream
5390 .tracking
5391 .as_ref()
5392 .map(|tracking| {
5393 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5394 ahead: tracking.ahead as u32,
5395 behind: tracking.behind as u32,
5396 })
5397 })
5398 .unwrap_or(git::repository::UpstreamTracking::Gone),
5399 }),
5400 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5401 git::repository::CommitSummary {
5402 sha: commit.sha.to_string().into(),
5403 subject: commit.subject.to_string().into(),
5404 commit_timestamp: commit.commit_timestamp,
5405 author_name: commit.author_name.to_string().into(),
5406 has_parent: true,
5407 }
5408 }),
5409 }
5410}
5411
5412fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5413 proto::GitCommitDetails {
5414 sha: commit.sha.to_string(),
5415 message: commit.message.to_string(),
5416 commit_timestamp: commit.commit_timestamp,
5417 author_email: commit.author_email.to_string(),
5418 author_name: commit.author_name.to_string(),
5419 }
5420}
5421
5422fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5423 CommitDetails {
5424 sha: proto.sha.clone().into(),
5425 message: proto.message.clone().into(),
5426 commit_timestamp: proto.commit_timestamp,
5427 author_email: proto.author_email.clone().into(),
5428 author_name: proto.author_name.clone().into(),
5429 }
5430}
5431
5432async fn compute_snapshot(
5433 id: RepositoryId,
5434 work_directory_abs_path: Arc<Path>,
5435 prev_snapshot: RepositorySnapshot,
5436 backend: Arc<dyn GitRepository>,
5437) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5438 let mut events = Vec::new();
5439 let branches = backend.branches().await?;
5440 let branch = branches.into_iter().find(|branch| branch.is_head);
5441 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5442 let stash_entries = backend.stash_entries().await?;
5443 let statuses_by_path = SumTree::from_iter(
5444 statuses
5445 .entries
5446 .iter()
5447 .map(|(repo_path, status)| StatusEntry {
5448 repo_path: repo_path.clone(),
5449 status: *status,
5450 }),
5451 (),
5452 );
5453 let (merge_details, merge_heads_changed) =
5454 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5455 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5456
5457 if merge_heads_changed {
5458 events.push(RepositoryEvent::MergeHeadsChanged);
5459 }
5460
5461 if statuses_by_path != prev_snapshot.statuses_by_path {
5462 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5463 }
5464
5465 // Useful when branch is None in detached head state
5466 let head_commit = match backend.head_sha().await {
5467 Some(head_sha) => backend.show(head_sha).await.log_err(),
5468 None => None,
5469 };
5470
5471 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5472 events.push(RepositoryEvent::BranchChanged);
5473 }
5474
5475 // Used by edit prediction data collection
5476 let remote_origin_url = backend.remote_url("origin");
5477 let remote_upstream_url = backend.remote_url("upstream");
5478
5479 let snapshot = RepositorySnapshot {
5480 id,
5481 statuses_by_path,
5482 work_directory_abs_path,
5483 path_style: prev_snapshot.path_style,
5484 scan_id: prev_snapshot.scan_id + 1,
5485 branch,
5486 head_commit,
5487 merge: merge_details,
5488 remote_origin_url,
5489 remote_upstream_url,
5490 stash_entries,
5491 };
5492
5493 Ok((snapshot, events))
5494}
5495
5496fn status_from_proto(
5497 simple_status: i32,
5498 status: Option<proto::GitFileStatus>,
5499) -> anyhow::Result<FileStatus> {
5500 use proto::git_file_status::Variant;
5501
5502 let Some(variant) = status.and_then(|status| status.variant) else {
5503 let code = proto::GitStatus::from_i32(simple_status)
5504 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5505 let result = match code {
5506 proto::GitStatus::Added => TrackedStatus {
5507 worktree_status: StatusCode::Added,
5508 index_status: StatusCode::Unmodified,
5509 }
5510 .into(),
5511 proto::GitStatus::Modified => TrackedStatus {
5512 worktree_status: StatusCode::Modified,
5513 index_status: StatusCode::Unmodified,
5514 }
5515 .into(),
5516 proto::GitStatus::Conflict => UnmergedStatus {
5517 first_head: UnmergedStatusCode::Updated,
5518 second_head: UnmergedStatusCode::Updated,
5519 }
5520 .into(),
5521 proto::GitStatus::Deleted => TrackedStatus {
5522 worktree_status: StatusCode::Deleted,
5523 index_status: StatusCode::Unmodified,
5524 }
5525 .into(),
5526 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5527 };
5528 return Ok(result);
5529 };
5530
5531 let result = match variant {
5532 Variant::Untracked(_) => FileStatus::Untracked,
5533 Variant::Ignored(_) => FileStatus::Ignored,
5534 Variant::Unmerged(unmerged) => {
5535 let [first_head, second_head] =
5536 [unmerged.first_head, unmerged.second_head].map(|head| {
5537 let code = proto::GitStatus::from_i32(head)
5538 .with_context(|| format!("Invalid git status code: {head}"))?;
5539 let result = match code {
5540 proto::GitStatus::Added => UnmergedStatusCode::Added,
5541 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5542 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5543 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5544 };
5545 Ok(result)
5546 });
5547 let [first_head, second_head] = [first_head?, second_head?];
5548 UnmergedStatus {
5549 first_head,
5550 second_head,
5551 }
5552 .into()
5553 }
5554 Variant::Tracked(tracked) => {
5555 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5556 .map(|status| {
5557 let code = proto::GitStatus::from_i32(status)
5558 .with_context(|| format!("Invalid git status code: {status}"))?;
5559 let result = match code {
5560 proto::GitStatus::Modified => StatusCode::Modified,
5561 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5562 proto::GitStatus::Added => StatusCode::Added,
5563 proto::GitStatus::Deleted => StatusCode::Deleted,
5564 proto::GitStatus::Renamed => StatusCode::Renamed,
5565 proto::GitStatus::Copied => StatusCode::Copied,
5566 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5567 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5568 };
5569 Ok(result)
5570 });
5571 let [index_status, worktree_status] = [index_status?, worktree_status?];
5572 TrackedStatus {
5573 index_status,
5574 worktree_status,
5575 }
5576 .into()
5577 }
5578 };
5579 Ok(result)
5580}
5581
5582fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5583 use proto::git_file_status::{Tracked, Unmerged, Variant};
5584
5585 let variant = match status {
5586 FileStatus::Untracked => Variant::Untracked(Default::default()),
5587 FileStatus::Ignored => Variant::Ignored(Default::default()),
5588 FileStatus::Unmerged(UnmergedStatus {
5589 first_head,
5590 second_head,
5591 }) => Variant::Unmerged(Unmerged {
5592 first_head: unmerged_status_to_proto(first_head),
5593 second_head: unmerged_status_to_proto(second_head),
5594 }),
5595 FileStatus::Tracked(TrackedStatus {
5596 index_status,
5597 worktree_status,
5598 }) => Variant::Tracked(Tracked {
5599 index_status: tracked_status_to_proto(index_status),
5600 worktree_status: tracked_status_to_proto(worktree_status),
5601 }),
5602 };
5603 proto::GitFileStatus {
5604 variant: Some(variant),
5605 }
5606}
5607
5608fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5609 match code {
5610 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5611 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5612 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5613 }
5614}
5615
5616fn tracked_status_to_proto(code: StatusCode) -> i32 {
5617 match code {
5618 StatusCode::Added => proto::GitStatus::Added as _,
5619 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5620 StatusCode::Modified => proto::GitStatus::Modified as _,
5621 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5622 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5623 StatusCode::Copied => proto::GitStatus::Copied as _,
5624 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5625 }
5626}