1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 status::{
32 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
33 },
34};
35use gpui::{
36 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
37 WeakEntity,
38};
39use language::{
40 Buffer, BufferEvent, Language, LanguageRegistry,
41 proto::{deserialize_version, serialize_version},
42};
43use parking_lot::Mutex;
44use postage::stream::Stream as _;
45use rpc::{
46 AnyProtoClient, TypedEnvelope,
47 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
48};
49use serde::Deserialize;
50use std::{
51 cmp::Ordering,
52 collections::{BTreeSet, VecDeque},
53 future::Future,
54 mem,
55 ops::Range,
56 path::{Path, PathBuf},
57 sync::{
58 Arc,
59 atomic::{self, AtomicU64},
60 },
61 time::Instant,
62};
63use sum_tree::{Edit, SumTree, TreeSet};
64use text::{Bias, BufferId};
65use util::{ResultExt, debug_panic, post_inc};
66use worktree::{
67 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
68 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
69};
70
71pub struct GitStore {
72 state: GitStoreState,
73 buffer_store: Entity<BufferStore>,
74 worktree_store: Entity<WorktreeStore>,
75 repositories: HashMap<RepositoryId, Entity<Repository>>,
76 active_repo_id: Option<RepositoryId>,
77 #[allow(clippy::type_complexity)]
78 loading_diffs:
79 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
80 diffs: HashMap<BufferId, Entity<BufferGitState>>,
81 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
82 _subscriptions: Vec<Subscription>,
83}
84
85#[derive(Default)]
86struct SharedDiffs {
87 unstaged: Option<Entity<BufferDiff>>,
88 uncommitted: Option<Entity<BufferDiff>>,
89}
90
91struct BufferGitState {
92 unstaged_diff: Option<WeakEntity<BufferDiff>>,
93 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
94 conflict_set: Option<WeakEntity<ConflictSet>>,
95 recalculate_diff_task: Option<Task<Result<()>>>,
96 reparse_conflict_markers_task: Option<Task<Result<()>>>,
97 language: Option<Arc<Language>>,
98 language_registry: Option<Arc<LanguageRegistry>>,
99 conflict_updated_futures: Vec<oneshot::Sender<()>>,
100 recalculating_tx: postage::watch::Sender<bool>,
101
102 /// These operation counts are used to ensure that head and index text
103 /// values read from the git repository are up-to-date with any hunk staging
104 /// operations that have been performed on the BufferDiff.
105 ///
106 /// The operation count is incremented immediately when the user initiates a
107 /// hunk stage/unstage operation. Then, upon finishing writing the new index
108 /// text do disk, the `operation count as of write` is updated to reflect
109 /// the operation count that prompted the write.
110 hunk_staging_operation_count: usize,
111 hunk_staging_operation_count_as_of_write: usize,
112
113 head_text: Option<Arc<String>>,
114 index_text: Option<Arc<String>>,
115 head_changed: bool,
116 index_changed: bool,
117 language_changed: bool,
118}
119
120#[derive(Clone, Debug)]
121enum DiffBasesChange {
122 SetIndex(Option<String>),
123 SetHead(Option<String>),
124 SetEach {
125 index: Option<String>,
126 head: Option<String>,
127 },
128 SetBoth(Option<String>),
129}
130
131#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
132enum DiffKind {
133 Unstaged,
134 Uncommitted,
135}
136
137enum GitStoreState {
138 Local {
139 next_repository_id: Arc<AtomicU64>,
140 downstream: Option<LocalDownstreamState>,
141 project_environment: Entity<ProjectEnvironment>,
142 fs: Arc<dyn Fs>,
143 },
144 Ssh {
145 upstream_client: AnyProtoClient,
146 upstream_project_id: ProjectId,
147 downstream: Option<(AnyProtoClient, ProjectId)>,
148 },
149 Remote {
150 upstream_client: AnyProtoClient,
151 upstream_project_id: ProjectId,
152 },
153}
154
155enum DownstreamUpdate {
156 UpdateRepository(RepositorySnapshot),
157 RemoveRepository(RepositoryId),
158}
159
160struct LocalDownstreamState {
161 client: AnyProtoClient,
162 project_id: ProjectId,
163 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
164 _task: Task<Result<()>>,
165}
166
167#[derive(Clone, Debug)]
168pub struct GitStoreCheckpoint {
169 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
170}
171
172#[derive(Clone, Debug, PartialEq, Eq)]
173pub struct StatusEntry {
174 pub repo_path: RepoPath,
175 pub status: FileStatus,
176}
177
178impl StatusEntry {
179 fn to_proto(&self) -> proto::StatusEntry {
180 let simple_status = match self.status {
181 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
182 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
183 FileStatus::Tracked(TrackedStatus {
184 index_status,
185 worktree_status,
186 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
187 worktree_status
188 } else {
189 index_status
190 }),
191 };
192
193 proto::StatusEntry {
194 repo_path: self.repo_path.as_ref().to_proto(),
195 simple_status,
196 status: Some(status_to_proto(self.status)),
197 }
198 }
199}
200
201impl TryFrom<proto::StatusEntry> for StatusEntry {
202 type Error = anyhow::Error;
203
204 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
205 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
206 let status = status_from_proto(value.simple_status, value.status)?;
207 Ok(Self { repo_path, status })
208 }
209}
210
211impl sum_tree::Item for StatusEntry {
212 type Summary = PathSummary<GitSummary>;
213
214 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
215 PathSummary {
216 max_path: self.repo_path.0.clone(),
217 item_summary: self.status.summary(),
218 }
219 }
220}
221
222impl sum_tree::KeyedItem for StatusEntry {
223 type Key = PathKey;
224
225 fn key(&self) -> Self::Key {
226 PathKey(self.repo_path.0.clone())
227 }
228}
229
230#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
231pub struct RepositoryId(pub u64);
232
233#[derive(Clone, Debug, Default, PartialEq, Eq)]
234pub struct MergeDetails {
235 pub conflicted_paths: TreeSet<RepoPath>,
236 pub message: Option<SharedString>,
237 pub heads: Vec<Option<SharedString>>,
238}
239
240#[derive(Clone, Debug, PartialEq, Eq)]
241pub struct RepositorySnapshot {
242 pub id: RepositoryId,
243 pub statuses_by_path: SumTree<StatusEntry>,
244 pub work_directory_abs_path: Arc<Path>,
245 pub branch: Option<Branch>,
246 pub head_commit: Option<CommitDetails>,
247 pub scan_id: u64,
248 pub merge: MergeDetails,
249 pub remote_origin_url: Option<String>,
250 pub remote_upstream_url: Option<String>,
251}
252
253type JobId = u64;
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct JobInfo {
257 pub start: Instant,
258 pub message: SharedString,
259}
260
261pub struct Repository {
262 this: WeakEntity<Self>,
263 snapshot: RepositorySnapshot,
264 commit_message_buffer: Option<Entity<Buffer>>,
265 git_store: WeakEntity<GitStore>,
266 // For a local repository, holds paths that have had worktree events since the last status scan completed,
267 // and that should be examined during the next status scan.
268 paths_needing_status_update: BTreeSet<RepoPath>,
269 job_sender: mpsc::UnboundedSender<GitJob>,
270 active_jobs: HashMap<JobId, JobInfo>,
271 job_id: JobId,
272 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
273 latest_askpass_id: u64,
274}
275
276impl std::ops::Deref for Repository {
277 type Target = RepositorySnapshot;
278
279 fn deref(&self) -> &Self::Target {
280 &self.snapshot
281 }
282}
283
284#[derive(Clone)]
285pub enum RepositoryState {
286 Local {
287 backend: Arc<dyn GitRepository>,
288 environment: Arc<HashMap<String, String>>,
289 },
290 Remote {
291 project_id: ProjectId,
292 client: AnyProtoClient,
293 },
294}
295
296#[derive(Clone, Debug)]
297pub enum RepositoryEvent {
298 Updated { full_scan: bool, new_instance: bool },
299 MergeHeadsChanged,
300}
301
302#[derive(Clone, Debug)]
303pub struct JobsUpdated;
304
305#[derive(Debug)]
306pub enum GitStoreEvent {
307 ActiveRepositoryChanged(Option<RepositoryId>),
308 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
309 RepositoryAdded(RepositoryId),
310 RepositoryRemoved(RepositoryId),
311 IndexWriteError(anyhow::Error),
312 JobsUpdated,
313 ConflictsUpdated,
314}
315
316impl EventEmitter<RepositoryEvent> for Repository {}
317impl EventEmitter<JobsUpdated> for Repository {}
318impl EventEmitter<GitStoreEvent> for GitStore {}
319
320pub struct GitJob {
321 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
322 key: Option<GitJobKey>,
323}
324
325#[derive(PartialEq, Eq)]
326enum GitJobKey {
327 WriteIndex(RepoPath),
328 ReloadBufferDiffBases,
329 RefreshStatuses,
330 ReloadGitState,
331}
332
333impl GitStore {
334 pub fn local(
335 worktree_store: &Entity<WorktreeStore>,
336 buffer_store: Entity<BufferStore>,
337 environment: Entity<ProjectEnvironment>,
338 fs: Arc<dyn Fs>,
339 cx: &mut Context<Self>,
340 ) -> Self {
341 Self::new(
342 worktree_store.clone(),
343 buffer_store,
344 GitStoreState::Local {
345 next_repository_id: Arc::new(AtomicU64::new(1)),
346 downstream: None,
347 project_environment: environment,
348 fs,
349 },
350 cx,
351 )
352 }
353
354 pub fn remote(
355 worktree_store: &Entity<WorktreeStore>,
356 buffer_store: Entity<BufferStore>,
357 upstream_client: AnyProtoClient,
358 project_id: ProjectId,
359 cx: &mut Context<Self>,
360 ) -> Self {
361 Self::new(
362 worktree_store.clone(),
363 buffer_store,
364 GitStoreState::Remote {
365 upstream_client,
366 upstream_project_id: project_id,
367 },
368 cx,
369 )
370 }
371
372 pub fn ssh(
373 worktree_store: &Entity<WorktreeStore>,
374 buffer_store: Entity<BufferStore>,
375 upstream_client: AnyProtoClient,
376 cx: &mut Context<Self>,
377 ) -> Self {
378 Self::new(
379 worktree_store.clone(),
380 buffer_store,
381 GitStoreState::Ssh {
382 upstream_client,
383 upstream_project_id: ProjectId(SSH_PROJECT_ID),
384 downstream: None,
385 },
386 cx,
387 )
388 }
389
390 fn new(
391 worktree_store: Entity<WorktreeStore>,
392 buffer_store: Entity<BufferStore>,
393 state: GitStoreState,
394 cx: &mut Context<Self>,
395 ) -> Self {
396 let _subscriptions = vec![
397 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
398 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
399 ];
400
401 GitStore {
402 state,
403 buffer_store,
404 worktree_store,
405 repositories: HashMap::default(),
406 active_repo_id: None,
407 _subscriptions,
408 loading_diffs: HashMap::default(),
409 shared_diffs: HashMap::default(),
410 diffs: HashMap::default(),
411 }
412 }
413
414 pub fn init(client: &AnyProtoClient) {
415 client.add_entity_request_handler(Self::handle_get_remotes);
416 client.add_entity_request_handler(Self::handle_get_branches);
417 client.add_entity_request_handler(Self::handle_get_default_branch);
418 client.add_entity_request_handler(Self::handle_change_branch);
419 client.add_entity_request_handler(Self::handle_create_branch);
420 client.add_entity_request_handler(Self::handle_git_init);
421 client.add_entity_request_handler(Self::handle_push);
422 client.add_entity_request_handler(Self::handle_pull);
423 client.add_entity_request_handler(Self::handle_fetch);
424 client.add_entity_request_handler(Self::handle_stage);
425 client.add_entity_request_handler(Self::handle_unstage);
426 client.add_entity_request_handler(Self::handle_stash);
427 client.add_entity_request_handler(Self::handle_stash_pop);
428 client.add_entity_request_handler(Self::handle_commit);
429 client.add_entity_request_handler(Self::handle_reset);
430 client.add_entity_request_handler(Self::handle_show);
431 client.add_entity_request_handler(Self::handle_load_commit_diff);
432 client.add_entity_request_handler(Self::handle_checkout_files);
433 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
434 client.add_entity_request_handler(Self::handle_set_index_text);
435 client.add_entity_request_handler(Self::handle_askpass);
436 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
437 client.add_entity_request_handler(Self::handle_git_diff);
438 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
439 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
440 client.add_entity_message_handler(Self::handle_update_diff_bases);
441 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
442 client.add_entity_request_handler(Self::handle_blame_buffer);
443 client.add_entity_message_handler(Self::handle_update_repository);
444 client.add_entity_message_handler(Self::handle_remove_repository);
445 client.add_entity_request_handler(Self::handle_git_clone);
446 }
447
448 pub fn is_local(&self) -> bool {
449 matches!(self.state, GitStoreState::Local { .. })
450 }
451
452 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
453 match &mut self.state {
454 GitStoreState::Ssh {
455 downstream: downstream_client,
456 ..
457 } => {
458 for repo in self.repositories.values() {
459 let update = repo.read(cx).snapshot.initial_update(project_id);
460 for update in split_repository_update(update) {
461 client.send(update).log_err();
462 }
463 }
464 *downstream_client = Some((client, ProjectId(project_id)));
465 }
466 GitStoreState::Local {
467 downstream: downstream_client,
468 ..
469 } => {
470 let mut snapshots = HashMap::default();
471 let (updates_tx, mut updates_rx) = mpsc::unbounded();
472 for repo in self.repositories.values() {
473 updates_tx
474 .unbounded_send(DownstreamUpdate::UpdateRepository(
475 repo.read(cx).snapshot.clone(),
476 ))
477 .ok();
478 }
479 *downstream_client = Some(LocalDownstreamState {
480 client: client.clone(),
481 project_id: ProjectId(project_id),
482 updates_tx,
483 _task: cx.spawn(async move |this, cx| {
484 cx.background_spawn(async move {
485 while let Some(update) = updates_rx.next().await {
486 match update {
487 DownstreamUpdate::UpdateRepository(snapshot) => {
488 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
489 {
490 let update =
491 snapshot.build_update(old_snapshot, project_id);
492 *old_snapshot = snapshot;
493 for update in split_repository_update(update) {
494 client.send(update)?;
495 }
496 } else {
497 let update = snapshot.initial_update(project_id);
498 for update in split_repository_update(update) {
499 client.send(update)?;
500 }
501 snapshots.insert(snapshot.id, snapshot);
502 }
503 }
504 DownstreamUpdate::RemoveRepository(id) => {
505 client.send(proto::RemoveRepository {
506 project_id,
507 id: id.to_proto(),
508 })?;
509 }
510 }
511 }
512 anyhow::Ok(())
513 })
514 .await
515 .ok();
516 this.update(cx, |this, _| {
517 if let GitStoreState::Local {
518 downstream: downstream_client,
519 ..
520 } = &mut this.state
521 {
522 downstream_client.take();
523 } else {
524 unreachable!("unshared called on remote store");
525 }
526 })
527 }),
528 });
529 }
530 GitStoreState::Remote { .. } => {
531 debug_panic!("shared called on remote store");
532 }
533 }
534 }
535
536 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
537 match &mut self.state {
538 GitStoreState::Local {
539 downstream: downstream_client,
540 ..
541 } => {
542 downstream_client.take();
543 }
544 GitStoreState::Ssh {
545 downstream: downstream_client,
546 ..
547 } => {
548 downstream_client.take();
549 }
550 GitStoreState::Remote { .. } => {
551 debug_panic!("unshared called on remote store");
552 }
553 }
554 self.shared_diffs.clear();
555 }
556
557 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
558 self.shared_diffs.remove(peer_id);
559 }
560
561 pub fn active_repository(&self) -> Option<Entity<Repository>> {
562 self.active_repo_id
563 .as_ref()
564 .map(|id| self.repositories[id].clone())
565 }
566
567 pub fn open_unstaged_diff(
568 &mut self,
569 buffer: Entity<Buffer>,
570 cx: &mut Context<Self>,
571 ) -> Task<Result<Entity<BufferDiff>>> {
572 let buffer_id = buffer.read(cx).remote_id();
573 if let Some(diff_state) = self.diffs.get(&buffer_id)
574 && let Some(unstaged_diff) = diff_state
575 .read(cx)
576 .unstaged_diff
577 .as_ref()
578 .and_then(|weak| weak.upgrade())
579 {
580 if let Some(task) =
581 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
582 {
583 return cx.background_executor().spawn(async move {
584 task.await;
585 Ok(unstaged_diff)
586 });
587 }
588 return Task::ready(Ok(unstaged_diff));
589 }
590
591 let Some((repo, repo_path)) =
592 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
593 else {
594 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
595 };
596
597 let task = self
598 .loading_diffs
599 .entry((buffer_id, DiffKind::Unstaged))
600 .or_insert_with(|| {
601 let staged_text = repo.update(cx, |repo, cx| {
602 repo.load_staged_text(buffer_id, repo_path, cx)
603 });
604 cx.spawn(async move |this, cx| {
605 Self::open_diff_internal(
606 this,
607 DiffKind::Unstaged,
608 staged_text.await.map(DiffBasesChange::SetIndex),
609 buffer,
610 cx,
611 )
612 .await
613 .map_err(Arc::new)
614 })
615 .shared()
616 })
617 .clone();
618
619 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
620 }
621
622 pub fn open_uncommitted_diff(
623 &mut self,
624 buffer: Entity<Buffer>,
625 cx: &mut Context<Self>,
626 ) -> Task<Result<Entity<BufferDiff>>> {
627 let buffer_id = buffer.read(cx).remote_id();
628
629 if let Some(diff_state) = self.diffs.get(&buffer_id)
630 && let Some(uncommitted_diff) = diff_state
631 .read(cx)
632 .uncommitted_diff
633 .as_ref()
634 .and_then(|weak| weak.upgrade())
635 {
636 if let Some(task) =
637 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
638 {
639 return cx.background_executor().spawn(async move {
640 task.await;
641 Ok(uncommitted_diff)
642 });
643 }
644 return Task::ready(Ok(uncommitted_diff));
645 }
646
647 let Some((repo, repo_path)) =
648 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
649 else {
650 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
651 };
652
653 let task = self
654 .loading_diffs
655 .entry((buffer_id, DiffKind::Uncommitted))
656 .or_insert_with(|| {
657 let changes = repo.update(cx, |repo, cx| {
658 repo.load_committed_text(buffer_id, repo_path, cx)
659 });
660
661 cx.spawn(async move |this, cx| {
662 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
663 .await
664 .map_err(Arc::new)
665 })
666 .shared()
667 })
668 .clone();
669
670 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
671 }
672
673 async fn open_diff_internal(
674 this: WeakEntity<Self>,
675 kind: DiffKind,
676 texts: Result<DiffBasesChange>,
677 buffer_entity: Entity<Buffer>,
678 cx: &mut AsyncApp,
679 ) -> Result<Entity<BufferDiff>> {
680 let diff_bases_change = match texts {
681 Err(e) => {
682 this.update(cx, |this, cx| {
683 let buffer = buffer_entity.read(cx);
684 let buffer_id = buffer.remote_id();
685 this.loading_diffs.remove(&(buffer_id, kind));
686 })?;
687 return Err(e);
688 }
689 Ok(change) => change,
690 };
691
692 this.update(cx, |this, cx| {
693 let buffer = buffer_entity.read(cx);
694 let buffer_id = buffer.remote_id();
695 let language = buffer.language().cloned();
696 let language_registry = buffer.language_registry();
697 let text_snapshot = buffer.text_snapshot();
698 this.loading_diffs.remove(&(buffer_id, kind));
699
700 let git_store = cx.weak_entity();
701 let diff_state = this
702 .diffs
703 .entry(buffer_id)
704 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
705
706 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
707
708 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
709 diff_state.update(cx, |diff_state, cx| {
710 diff_state.language = language;
711 diff_state.language_registry = language_registry;
712
713 match kind {
714 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
715 DiffKind::Uncommitted => {
716 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
717 diff
718 } else {
719 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
720 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
721 unstaged_diff
722 };
723
724 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
725 diff_state.uncommitted_diff = Some(diff.downgrade())
726 }
727 }
728
729 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
730 let rx = diff_state.wait_for_recalculation();
731
732 anyhow::Ok(async move {
733 if let Some(rx) = rx {
734 rx.await;
735 }
736 Ok(diff)
737 })
738 })
739 })??
740 .await
741 }
742
743 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
744 let diff_state = self.diffs.get(&buffer_id)?;
745 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
746 }
747
748 pub fn get_uncommitted_diff(
749 &self,
750 buffer_id: BufferId,
751 cx: &App,
752 ) -> Option<Entity<BufferDiff>> {
753 let diff_state = self.diffs.get(&buffer_id)?;
754 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
755 }
756
757 pub fn open_conflict_set(
758 &mut self,
759 buffer: Entity<Buffer>,
760 cx: &mut Context<Self>,
761 ) -> Entity<ConflictSet> {
762 log::debug!("open conflict set");
763 let buffer_id = buffer.read(cx).remote_id();
764
765 if let Some(git_state) = self.diffs.get(&buffer_id)
766 && let Some(conflict_set) = git_state
767 .read(cx)
768 .conflict_set
769 .as_ref()
770 .and_then(|weak| weak.upgrade())
771 {
772 let conflict_set = conflict_set.clone();
773 let buffer_snapshot = buffer.read(cx).text_snapshot();
774
775 git_state.update(cx, |state, cx| {
776 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
777 });
778
779 return conflict_set;
780 }
781
782 let is_unmerged = self
783 .repository_and_path_for_buffer_id(buffer_id, cx)
784 .map_or(false, |(repo, path)| {
785 repo.read(cx).snapshot.has_conflict(&path)
786 });
787 let git_store = cx.weak_entity();
788 let buffer_git_state = self
789 .diffs
790 .entry(buffer_id)
791 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
792 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
793
794 self._subscriptions
795 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
796 cx.emit(GitStoreEvent::ConflictsUpdated);
797 }));
798
799 buffer_git_state.update(cx, |state, cx| {
800 state.conflict_set = Some(conflict_set.downgrade());
801 let buffer_snapshot = buffer.read(cx).text_snapshot();
802 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
803 });
804
805 conflict_set
806 }
807
808 pub fn project_path_git_status(
809 &self,
810 project_path: &ProjectPath,
811 cx: &App,
812 ) -> Option<FileStatus> {
813 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
814 Some(repo.read(cx).status_for_path(&repo_path)?.status)
815 }
816
817 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
818 let mut work_directory_abs_paths = Vec::new();
819 let mut checkpoints = Vec::new();
820 for repository in self.repositories.values() {
821 repository.update(cx, |repository, _| {
822 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
823 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
824 });
825 }
826
827 cx.background_executor().spawn(async move {
828 let checkpoints = future::try_join_all(checkpoints).await?;
829 Ok(GitStoreCheckpoint {
830 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
831 .into_iter()
832 .zip(checkpoints)
833 .collect(),
834 })
835 })
836 }
837
838 pub fn restore_checkpoint(
839 &self,
840 checkpoint: GitStoreCheckpoint,
841 cx: &mut App,
842 ) -> Task<Result<()>> {
843 let repositories_by_work_dir_abs_path = self
844 .repositories
845 .values()
846 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
847 .collect::<HashMap<_, _>>();
848
849 let mut tasks = Vec::new();
850 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
851 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
852 let restore = repository.update(cx, |repository, _| {
853 repository.restore_checkpoint(checkpoint)
854 });
855 tasks.push(async move { restore.await? });
856 }
857 }
858 cx.background_spawn(async move {
859 future::try_join_all(tasks).await?;
860 Ok(())
861 })
862 }
863
864 /// Compares two checkpoints, returning true if they are equal.
865 pub fn compare_checkpoints(
866 &self,
867 left: GitStoreCheckpoint,
868 mut right: GitStoreCheckpoint,
869 cx: &mut App,
870 ) -> Task<Result<bool>> {
871 let repositories_by_work_dir_abs_path = self
872 .repositories
873 .values()
874 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
875 .collect::<HashMap<_, _>>();
876
877 let mut tasks = Vec::new();
878 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
879 if let Some(right_checkpoint) = right
880 .checkpoints_by_work_dir_abs_path
881 .remove(&work_dir_abs_path)
882 {
883 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
884 {
885 let compare = repository.update(cx, |repository, _| {
886 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
887 });
888
889 tasks.push(async move { compare.await? });
890 }
891 } else {
892 return Task::ready(Ok(false));
893 }
894 }
895 cx.background_spawn(async move {
896 Ok(future::try_join_all(tasks)
897 .await?
898 .into_iter()
899 .all(|result| result))
900 })
901 }
902
903 /// Blames a buffer.
904 pub fn blame_buffer(
905 &self,
906 buffer: &Entity<Buffer>,
907 version: Option<clock::Global>,
908 cx: &mut App,
909 ) -> Task<Result<Option<Blame>>> {
910 let buffer = buffer.read(cx);
911 let Some((repo, repo_path)) =
912 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
913 else {
914 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
915 };
916 let content = match &version {
917 Some(version) => buffer.rope_for_version(version).clone(),
918 None => buffer.as_rope().clone(),
919 };
920 let version = version.unwrap_or(buffer.version());
921 let buffer_id = buffer.remote_id();
922
923 let rx = repo.update(cx, |repo, _| {
924 repo.send_job(None, move |state, _| async move {
925 match state {
926 RepositoryState::Local { backend, .. } => backend
927 .blame(repo_path.clone(), content)
928 .await
929 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
930 .map(Some),
931 RepositoryState::Remote { project_id, client } => {
932 let response = client
933 .request(proto::BlameBuffer {
934 project_id: project_id.to_proto(),
935 buffer_id: buffer_id.into(),
936 version: serialize_version(&version),
937 })
938 .await?;
939 Ok(deserialize_blame_buffer_response(response))
940 }
941 }
942 })
943 });
944
945 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
946 }
947
948 pub fn get_permalink_to_line(
949 &self,
950 buffer: &Entity<Buffer>,
951 selection: Range<u32>,
952 cx: &mut App,
953 ) -> Task<Result<url::Url>> {
954 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
955 return Task::ready(Err(anyhow!("buffer has no file")));
956 };
957
958 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
959 &(file.worktree.read(cx).id(), file.path.clone()).into(),
960 cx,
961 ) else {
962 // If we're not in a Git repo, check whether this is a Rust source
963 // file in the Cargo registry (presumably opened with go-to-definition
964 // from a normal Rust file). If so, we can put together a permalink
965 // using crate metadata.
966 if buffer
967 .read(cx)
968 .language()
969 .is_none_or(|lang| lang.name() != "Rust".into())
970 {
971 return Task::ready(Err(anyhow!("no permalink available")));
972 }
973 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
974 return Task::ready(Err(anyhow!("no permalink available")));
975 };
976 return cx.spawn(async move |cx| {
977 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
978 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
979 .context("no permalink available")
980 });
981
982 // TODO remote case
983 };
984
985 let buffer_id = buffer.read(cx).remote_id();
986 let branch = repo.read(cx).branch.clone();
987 let remote = branch
988 .as_ref()
989 .and_then(|b| b.upstream.as_ref())
990 .and_then(|b| b.remote_name())
991 .unwrap_or("origin")
992 .to_string();
993
994 let rx = repo.update(cx, |repo, _| {
995 repo.send_job(None, move |state, cx| async move {
996 match state {
997 RepositoryState::Local { backend, .. } => {
998 let origin_url = backend
999 .remote_url(&remote)
1000 .with_context(|| format!("remote \"{remote}\" not found"))?;
1001
1002 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1003
1004 let provider_registry =
1005 cx.update(GitHostingProviderRegistry::default_global)?;
1006
1007 let (provider, remote) =
1008 parse_git_remote_url(provider_registry, &origin_url)
1009 .context("parsing Git remote URL")?;
1010
1011 let path = repo_path.to_str().with_context(|| {
1012 format!("converting repo path {repo_path:?} to string")
1013 })?;
1014
1015 Ok(provider.build_permalink(
1016 remote,
1017 BuildPermalinkParams {
1018 sha: &sha,
1019 path,
1020 selection: Some(selection),
1021 },
1022 ))
1023 }
1024 RepositoryState::Remote { project_id, client } => {
1025 let response = client
1026 .request(proto::GetPermalinkToLine {
1027 project_id: project_id.to_proto(),
1028 buffer_id: buffer_id.into(),
1029 selection: Some(proto::Range {
1030 start: selection.start as u64,
1031 end: selection.end as u64,
1032 }),
1033 })
1034 .await?;
1035
1036 url::Url::parse(&response.permalink).context("failed to parse permalink")
1037 }
1038 }
1039 })
1040 });
1041 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1042 }
1043
1044 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1045 match &self.state {
1046 GitStoreState::Local {
1047 downstream: downstream_client,
1048 ..
1049 } => downstream_client
1050 .as_ref()
1051 .map(|state| (state.client.clone(), state.project_id)),
1052 GitStoreState::Ssh {
1053 downstream: downstream_client,
1054 ..
1055 } => downstream_client.clone(),
1056 GitStoreState::Remote { .. } => None,
1057 }
1058 }
1059
1060 fn upstream_client(&self) -> Option<AnyProtoClient> {
1061 match &self.state {
1062 GitStoreState::Local { .. } => None,
1063 GitStoreState::Ssh {
1064 upstream_client, ..
1065 }
1066 | GitStoreState::Remote {
1067 upstream_client, ..
1068 } => Some(upstream_client.clone()),
1069 }
1070 }
1071
1072 fn on_worktree_store_event(
1073 &mut self,
1074 worktree_store: Entity<WorktreeStore>,
1075 event: &WorktreeStoreEvent,
1076 cx: &mut Context<Self>,
1077 ) {
1078 let GitStoreState::Local {
1079 project_environment,
1080 downstream,
1081 next_repository_id,
1082 fs,
1083 } = &self.state
1084 else {
1085 return;
1086 };
1087
1088 match event {
1089 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1090 if let Some(worktree) = self
1091 .worktree_store
1092 .read(cx)
1093 .worktree_for_id(*worktree_id, cx)
1094 {
1095 let paths_by_git_repo =
1096 self.process_updated_entries(&worktree, updated_entries, cx);
1097 let downstream = downstream
1098 .as_ref()
1099 .map(|downstream| downstream.updates_tx.clone());
1100 cx.spawn(async move |_, cx| {
1101 let paths_by_git_repo = paths_by_git_repo.await;
1102 for (repo, paths) in paths_by_git_repo {
1103 repo.update(cx, |repo, cx| {
1104 repo.paths_changed(paths, downstream.clone(), cx);
1105 })
1106 .ok();
1107 }
1108 })
1109 .detach();
1110 }
1111 }
1112 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1113 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1114 else {
1115 return;
1116 };
1117 if !worktree.read(cx).is_visible() {
1118 log::debug!(
1119 "not adding repositories for local worktree {:?} because it's not visible",
1120 worktree.read(cx).abs_path()
1121 );
1122 return;
1123 }
1124 self.update_repositories_from_worktree(
1125 project_environment.clone(),
1126 next_repository_id.clone(),
1127 downstream
1128 .as_ref()
1129 .map(|downstream| downstream.updates_tx.clone()),
1130 changed_repos.clone(),
1131 fs.clone(),
1132 cx,
1133 );
1134 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1135 }
1136 _ => {}
1137 }
1138 }
1139
1140 fn on_repository_event(
1141 &mut self,
1142 repo: Entity<Repository>,
1143 event: &RepositoryEvent,
1144 cx: &mut Context<Self>,
1145 ) {
1146 let id = repo.read(cx).id;
1147 let repo_snapshot = repo.read(cx).snapshot.clone();
1148 for (buffer_id, diff) in self.diffs.iter() {
1149 if let Some((buffer_repo, repo_path)) =
1150 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1151 && buffer_repo == repo
1152 {
1153 diff.update(cx, |diff, cx| {
1154 if let Some(conflict_set) = &diff.conflict_set {
1155 let conflict_status_changed =
1156 conflict_set.update(cx, |conflict_set, cx| {
1157 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1158 conflict_set.set_has_conflict(has_conflict, cx)
1159 })?;
1160 if conflict_status_changed {
1161 let buffer_store = self.buffer_store.read(cx);
1162 if let Some(buffer) = buffer_store.get(*buffer_id) {
1163 let _ = diff
1164 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1165 }
1166 }
1167 }
1168 anyhow::Ok(())
1169 })
1170 .ok();
1171 }
1172 }
1173 cx.emit(GitStoreEvent::RepositoryUpdated(
1174 id,
1175 event.clone(),
1176 self.active_repo_id == Some(id),
1177 ))
1178 }
1179
1180 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1181 cx.emit(GitStoreEvent::JobsUpdated)
1182 }
1183
1184 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1185 fn update_repositories_from_worktree(
1186 &mut self,
1187 project_environment: Entity<ProjectEnvironment>,
1188 next_repository_id: Arc<AtomicU64>,
1189 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1190 updated_git_repositories: UpdatedGitRepositoriesSet,
1191 fs: Arc<dyn Fs>,
1192 cx: &mut Context<Self>,
1193 ) {
1194 let mut removed_ids = Vec::new();
1195 for update in updated_git_repositories.iter() {
1196 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1197 let existing_work_directory_abs_path =
1198 repo.read(cx).work_directory_abs_path.clone();
1199 Some(&existing_work_directory_abs_path)
1200 == update.old_work_directory_abs_path.as_ref()
1201 || Some(&existing_work_directory_abs_path)
1202 == update.new_work_directory_abs_path.as_ref()
1203 }) {
1204 if let Some(new_work_directory_abs_path) =
1205 update.new_work_directory_abs_path.clone()
1206 {
1207 existing.update(cx, |existing, cx| {
1208 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1209 existing.schedule_scan(updates_tx.clone(), cx);
1210 });
1211 } else {
1212 removed_ids.push(*id);
1213 }
1214 } else if let UpdatedGitRepository {
1215 new_work_directory_abs_path: Some(work_directory_abs_path),
1216 dot_git_abs_path: Some(dot_git_abs_path),
1217 repository_dir_abs_path: Some(repository_dir_abs_path),
1218 common_dir_abs_path: Some(common_dir_abs_path),
1219 ..
1220 } = update
1221 {
1222 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1223 let git_store = cx.weak_entity();
1224 let repo = cx.new(|cx| {
1225 let mut repo = Repository::local(
1226 id,
1227 work_directory_abs_path.clone(),
1228 dot_git_abs_path.clone(),
1229 repository_dir_abs_path.clone(),
1230 common_dir_abs_path.clone(),
1231 project_environment.downgrade(),
1232 fs.clone(),
1233 git_store,
1234 cx,
1235 );
1236 repo.schedule_scan(updates_tx.clone(), cx);
1237 repo
1238 });
1239 self._subscriptions
1240 .push(cx.subscribe(&repo, Self::on_repository_event));
1241 self._subscriptions
1242 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1243 self.repositories.insert(id, repo);
1244 cx.emit(GitStoreEvent::RepositoryAdded(id));
1245 self.active_repo_id.get_or_insert_with(|| {
1246 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1247 id
1248 });
1249 }
1250 }
1251
1252 for id in removed_ids {
1253 if self.active_repo_id == Some(id) {
1254 self.active_repo_id = None;
1255 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1256 }
1257 self.repositories.remove(&id);
1258 if let Some(updates_tx) = updates_tx.as_ref() {
1259 updates_tx
1260 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1261 .ok();
1262 }
1263 }
1264 }
1265
1266 fn on_buffer_store_event(
1267 &mut self,
1268 _: Entity<BufferStore>,
1269 event: &BufferStoreEvent,
1270 cx: &mut Context<Self>,
1271 ) {
1272 match event {
1273 BufferStoreEvent::BufferAdded(buffer) => {
1274 cx.subscribe(buffer, |this, buffer, event, cx| {
1275 if let BufferEvent::LanguageChanged = event {
1276 let buffer_id = buffer.read(cx).remote_id();
1277 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1278 diff_state.update(cx, |diff_state, cx| {
1279 diff_state.buffer_language_changed(buffer, cx);
1280 });
1281 }
1282 }
1283 })
1284 .detach();
1285 }
1286 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1287 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1288 diffs.remove(buffer_id);
1289 }
1290 }
1291 BufferStoreEvent::BufferDropped(buffer_id) => {
1292 self.diffs.remove(buffer_id);
1293 for diffs in self.shared_diffs.values_mut() {
1294 diffs.remove(buffer_id);
1295 }
1296 }
1297
1298 _ => {}
1299 }
1300 }
1301
1302 pub fn recalculate_buffer_diffs(
1303 &mut self,
1304 buffers: Vec<Entity<Buffer>>,
1305 cx: &mut Context<Self>,
1306 ) -> impl Future<Output = ()> + use<> {
1307 let mut futures = Vec::new();
1308 for buffer in buffers {
1309 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1310 let buffer = buffer.read(cx).text_snapshot();
1311 diff_state.update(cx, |diff_state, cx| {
1312 diff_state.recalculate_diffs(buffer.clone(), cx);
1313 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1314 });
1315 futures.push(diff_state.update(cx, |diff_state, cx| {
1316 diff_state
1317 .reparse_conflict_markers(buffer, cx)
1318 .map(|_| {})
1319 .boxed()
1320 }));
1321 }
1322 }
1323 async move {
1324 futures::future::join_all(futures).await;
1325 }
1326 }
1327
1328 fn on_buffer_diff_event(
1329 &mut self,
1330 diff: Entity<buffer_diff::BufferDiff>,
1331 event: &BufferDiffEvent,
1332 cx: &mut Context<Self>,
1333 ) {
1334 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1335 let buffer_id = diff.read(cx).buffer_id;
1336 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1337 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1338 diff_state.hunk_staging_operation_count += 1;
1339 diff_state.hunk_staging_operation_count
1340 });
1341 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1342 let recv = repo.update(cx, |repo, cx| {
1343 log::debug!("hunks changed for {}", path.display());
1344 repo.spawn_set_index_text_job(
1345 path,
1346 new_index_text.as_ref().map(|rope| rope.to_string()),
1347 Some(hunk_staging_operation_count),
1348 cx,
1349 )
1350 });
1351 let diff = diff.downgrade();
1352 cx.spawn(async move |this, cx| {
1353 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1354 diff.update(cx, |diff, cx| {
1355 diff.clear_pending_hunks(cx);
1356 })
1357 .ok();
1358 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1359 .ok();
1360 }
1361 })
1362 .detach();
1363 }
1364 }
1365 }
1366 }
1367
1368 fn local_worktree_git_repos_changed(
1369 &mut self,
1370 worktree: Entity<Worktree>,
1371 changed_repos: &UpdatedGitRepositoriesSet,
1372 cx: &mut Context<Self>,
1373 ) {
1374 log::debug!("local worktree repos changed");
1375 debug_assert!(worktree.read(cx).is_local());
1376
1377 for repository in self.repositories.values() {
1378 repository.update(cx, |repository, cx| {
1379 let repo_abs_path = &repository.work_directory_abs_path;
1380 if changed_repos.iter().any(|update| {
1381 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1382 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1383 }) {
1384 repository.reload_buffer_diff_bases(cx);
1385 }
1386 });
1387 }
1388 }
1389
1390 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1391 &self.repositories
1392 }
1393
1394 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1395 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1396 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1397 Some(status.status)
1398 }
1399
1400 pub fn repository_and_path_for_buffer_id(
1401 &self,
1402 buffer_id: BufferId,
1403 cx: &App,
1404 ) -> Option<(Entity<Repository>, RepoPath)> {
1405 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1406 let project_path = buffer.read(cx).project_path(cx)?;
1407 self.repository_and_path_for_project_path(&project_path, cx)
1408 }
1409
1410 pub fn repository_and_path_for_project_path(
1411 &self,
1412 path: &ProjectPath,
1413 cx: &App,
1414 ) -> Option<(Entity<Repository>, RepoPath)> {
1415 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1416 self.repositories
1417 .values()
1418 .filter_map(|repo| {
1419 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1420 Some((repo.clone(), repo_path))
1421 })
1422 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1423 }
1424
1425 pub fn git_init(
1426 &self,
1427 path: Arc<Path>,
1428 fallback_branch_name: String,
1429 cx: &App,
1430 ) -> Task<Result<()>> {
1431 match &self.state {
1432 GitStoreState::Local { fs, .. } => {
1433 let fs = fs.clone();
1434 cx.background_executor()
1435 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1436 }
1437 GitStoreState::Ssh {
1438 upstream_client,
1439 upstream_project_id: project_id,
1440 ..
1441 }
1442 | GitStoreState::Remote {
1443 upstream_client,
1444 upstream_project_id: project_id,
1445 ..
1446 } => {
1447 let client = upstream_client.clone();
1448 let project_id = *project_id;
1449 cx.background_executor().spawn(async move {
1450 client
1451 .request(proto::GitInit {
1452 project_id: project_id.0,
1453 abs_path: path.to_string_lossy().to_string(),
1454 fallback_branch_name,
1455 })
1456 .await?;
1457 Ok(())
1458 })
1459 }
1460 }
1461 }
1462
1463 pub fn git_clone(
1464 &self,
1465 repo: String,
1466 path: impl Into<Arc<std::path::Path>>,
1467 cx: &App,
1468 ) -> Task<Result<()>> {
1469 let path = path.into();
1470 match &self.state {
1471 GitStoreState::Local { fs, .. } => {
1472 let fs = fs.clone();
1473 cx.background_executor()
1474 .spawn(async move { fs.git_clone(&repo, &path).await })
1475 }
1476 GitStoreState::Ssh {
1477 upstream_client,
1478 upstream_project_id,
1479 ..
1480 } => {
1481 let request = upstream_client.request(proto::GitClone {
1482 project_id: upstream_project_id.0,
1483 abs_path: path.to_string_lossy().to_string(),
1484 remote_repo: repo,
1485 });
1486
1487 cx.background_spawn(async move {
1488 let result = request.await?;
1489
1490 match result.success {
1491 true => Ok(()),
1492 false => Err(anyhow!("Git Clone failed")),
1493 }
1494 })
1495 }
1496 GitStoreState::Remote { .. } => {
1497 Task::ready(Err(anyhow!("Git Clone isn't supported for remote users")))
1498 }
1499 }
1500 }
1501
1502 async fn handle_update_repository(
1503 this: Entity<Self>,
1504 envelope: TypedEnvelope<proto::UpdateRepository>,
1505 mut cx: AsyncApp,
1506 ) -> Result<()> {
1507 this.update(&mut cx, |this, cx| {
1508 let mut update = envelope.payload;
1509
1510 let id = RepositoryId::from_proto(update.id);
1511 let client = this
1512 .upstream_client()
1513 .context("no upstream client")?
1514 .clone();
1515
1516 let mut is_new = false;
1517 let repo = this.repositories.entry(id).or_insert_with(|| {
1518 is_new = true;
1519 let git_store = cx.weak_entity();
1520 cx.new(|cx| {
1521 Repository::remote(
1522 id,
1523 Path::new(&update.abs_path).into(),
1524 ProjectId(update.project_id),
1525 client,
1526 git_store,
1527 cx,
1528 )
1529 })
1530 });
1531 if is_new {
1532 this._subscriptions
1533 .push(cx.subscribe(repo, Self::on_repository_event))
1534 }
1535
1536 repo.update(cx, {
1537 let update = update.clone();
1538 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1539 })?;
1540
1541 this.active_repo_id.get_or_insert_with(|| {
1542 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1543 id
1544 });
1545
1546 if let Some((client, project_id)) = this.downstream_client() {
1547 update.project_id = project_id.to_proto();
1548 client.send(update).log_err();
1549 }
1550 Ok(())
1551 })?
1552 }
1553
1554 async fn handle_remove_repository(
1555 this: Entity<Self>,
1556 envelope: TypedEnvelope<proto::RemoveRepository>,
1557 mut cx: AsyncApp,
1558 ) -> Result<()> {
1559 this.update(&mut cx, |this, cx| {
1560 let mut update = envelope.payload;
1561 let id = RepositoryId::from_proto(update.id);
1562 this.repositories.remove(&id);
1563 if let Some((client, project_id)) = this.downstream_client() {
1564 update.project_id = project_id.to_proto();
1565 client.send(update).log_err();
1566 }
1567 if this.active_repo_id == Some(id) {
1568 this.active_repo_id = None;
1569 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1570 }
1571 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1572 })
1573 }
1574
1575 async fn handle_git_init(
1576 this: Entity<Self>,
1577 envelope: TypedEnvelope<proto::GitInit>,
1578 cx: AsyncApp,
1579 ) -> Result<proto::Ack> {
1580 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1581 let name = envelope.payload.fallback_branch_name;
1582 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1583 .await?;
1584
1585 Ok(proto::Ack {})
1586 }
1587
1588 async fn handle_git_clone(
1589 this: Entity<Self>,
1590 envelope: TypedEnvelope<proto::GitClone>,
1591 cx: AsyncApp,
1592 ) -> Result<proto::GitCloneResponse> {
1593 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1594 let repo_name = envelope.payload.remote_repo;
1595 let result = cx
1596 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1597 .await;
1598
1599 Ok(proto::GitCloneResponse {
1600 success: result.is_ok(),
1601 })
1602 }
1603
1604 async fn handle_fetch(
1605 this: Entity<Self>,
1606 envelope: TypedEnvelope<proto::Fetch>,
1607 mut cx: AsyncApp,
1608 ) -> Result<proto::RemoteMessageResponse> {
1609 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1610 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1611 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1612 let askpass_id = envelope.payload.askpass_id;
1613
1614 let askpass = make_remote_delegate(
1615 this,
1616 envelope.payload.project_id,
1617 repository_id,
1618 askpass_id,
1619 &mut cx,
1620 );
1621
1622 let remote_output = repository_handle
1623 .update(&mut cx, |repository_handle, cx| {
1624 repository_handle.fetch(fetch_options, askpass, cx)
1625 })?
1626 .await??;
1627
1628 Ok(proto::RemoteMessageResponse {
1629 stdout: remote_output.stdout,
1630 stderr: remote_output.stderr,
1631 })
1632 }
1633
1634 async fn handle_push(
1635 this: Entity<Self>,
1636 envelope: TypedEnvelope<proto::Push>,
1637 mut cx: AsyncApp,
1638 ) -> Result<proto::RemoteMessageResponse> {
1639 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1640 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1641
1642 let askpass_id = envelope.payload.askpass_id;
1643 let askpass = make_remote_delegate(
1644 this,
1645 envelope.payload.project_id,
1646 repository_id,
1647 askpass_id,
1648 &mut cx,
1649 );
1650
1651 let options = envelope
1652 .payload
1653 .options
1654 .as_ref()
1655 .map(|_| match envelope.payload.options() {
1656 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1657 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1658 });
1659
1660 let branch_name = envelope.payload.branch_name.into();
1661 let remote_name = envelope.payload.remote_name.into();
1662
1663 let remote_output = repository_handle
1664 .update(&mut cx, |repository_handle, cx| {
1665 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1666 })?
1667 .await??;
1668 Ok(proto::RemoteMessageResponse {
1669 stdout: remote_output.stdout,
1670 stderr: remote_output.stderr,
1671 })
1672 }
1673
1674 async fn handle_pull(
1675 this: Entity<Self>,
1676 envelope: TypedEnvelope<proto::Pull>,
1677 mut cx: AsyncApp,
1678 ) -> Result<proto::RemoteMessageResponse> {
1679 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1680 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1681 let askpass_id = envelope.payload.askpass_id;
1682 let askpass = make_remote_delegate(
1683 this,
1684 envelope.payload.project_id,
1685 repository_id,
1686 askpass_id,
1687 &mut cx,
1688 );
1689
1690 let branch_name = envelope.payload.branch_name.into();
1691 let remote_name = envelope.payload.remote_name.into();
1692
1693 let remote_message = repository_handle
1694 .update(&mut cx, |repository_handle, cx| {
1695 repository_handle.pull(branch_name, remote_name, askpass, cx)
1696 })?
1697 .await??;
1698
1699 Ok(proto::RemoteMessageResponse {
1700 stdout: remote_message.stdout,
1701 stderr: remote_message.stderr,
1702 })
1703 }
1704
1705 async fn handle_stage(
1706 this: Entity<Self>,
1707 envelope: TypedEnvelope<proto::Stage>,
1708 mut cx: AsyncApp,
1709 ) -> Result<proto::Ack> {
1710 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1711 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1712
1713 let entries = envelope
1714 .payload
1715 .paths
1716 .into_iter()
1717 .map(PathBuf::from)
1718 .map(RepoPath::new)
1719 .collect();
1720
1721 repository_handle
1722 .update(&mut cx, |repository_handle, cx| {
1723 repository_handle.stage_entries(entries, cx)
1724 })?
1725 .await?;
1726 Ok(proto::Ack {})
1727 }
1728
1729 async fn handle_unstage(
1730 this: Entity<Self>,
1731 envelope: TypedEnvelope<proto::Unstage>,
1732 mut cx: AsyncApp,
1733 ) -> Result<proto::Ack> {
1734 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1735 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1736
1737 let entries = envelope
1738 .payload
1739 .paths
1740 .into_iter()
1741 .map(PathBuf::from)
1742 .map(RepoPath::new)
1743 .collect();
1744
1745 repository_handle
1746 .update(&mut cx, |repository_handle, cx| {
1747 repository_handle.unstage_entries(entries, cx)
1748 })?
1749 .await?;
1750
1751 Ok(proto::Ack {})
1752 }
1753
1754 async fn handle_stash(
1755 this: Entity<Self>,
1756 envelope: TypedEnvelope<proto::Stash>,
1757 mut cx: AsyncApp,
1758 ) -> Result<proto::Ack> {
1759 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1760 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1761
1762 let entries = envelope
1763 .payload
1764 .paths
1765 .into_iter()
1766 .map(PathBuf::from)
1767 .map(RepoPath::new)
1768 .collect();
1769
1770 repository_handle
1771 .update(&mut cx, |repository_handle, cx| {
1772 repository_handle.stash_entries(entries, cx)
1773 })?
1774 .await?;
1775
1776 Ok(proto::Ack {})
1777 }
1778
1779 async fn handle_stash_pop(
1780 this: Entity<Self>,
1781 envelope: TypedEnvelope<proto::StashPop>,
1782 mut cx: AsyncApp,
1783 ) -> Result<proto::Ack> {
1784 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1785 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1786
1787 repository_handle
1788 .update(&mut cx, |repository_handle, cx| {
1789 repository_handle.stash_pop(cx)
1790 })?
1791 .await?;
1792
1793 Ok(proto::Ack {})
1794 }
1795
1796 async fn handle_set_index_text(
1797 this: Entity<Self>,
1798 envelope: TypedEnvelope<proto::SetIndexText>,
1799 mut cx: AsyncApp,
1800 ) -> Result<proto::Ack> {
1801 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1802 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1803 let repo_path = RepoPath::from_str(&envelope.payload.path);
1804
1805 repository_handle
1806 .update(&mut cx, |repository_handle, cx| {
1807 repository_handle.spawn_set_index_text_job(
1808 repo_path,
1809 envelope.payload.text,
1810 None,
1811 cx,
1812 )
1813 })?
1814 .await??;
1815 Ok(proto::Ack {})
1816 }
1817
1818 async fn handle_commit(
1819 this: Entity<Self>,
1820 envelope: TypedEnvelope<proto::Commit>,
1821 mut cx: AsyncApp,
1822 ) -> Result<proto::Ack> {
1823 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1824 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1825
1826 let message = SharedString::from(envelope.payload.message);
1827 let name = envelope.payload.name.map(SharedString::from);
1828 let email = envelope.payload.email.map(SharedString::from);
1829 let options = envelope.payload.options.unwrap_or_default();
1830
1831 repository_handle
1832 .update(&mut cx, |repository_handle, cx| {
1833 repository_handle.commit(
1834 message,
1835 name.zip(email),
1836 CommitOptions {
1837 amend: options.amend,
1838 signoff: options.signoff,
1839 },
1840 cx,
1841 )
1842 })?
1843 .await??;
1844 Ok(proto::Ack {})
1845 }
1846
1847 async fn handle_get_remotes(
1848 this: Entity<Self>,
1849 envelope: TypedEnvelope<proto::GetRemotes>,
1850 mut cx: AsyncApp,
1851 ) -> Result<proto::GetRemotesResponse> {
1852 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1853 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1854
1855 let branch_name = envelope.payload.branch_name;
1856
1857 let remotes = repository_handle
1858 .update(&mut cx, |repository_handle, _| {
1859 repository_handle.get_remotes(branch_name)
1860 })?
1861 .await??;
1862
1863 Ok(proto::GetRemotesResponse {
1864 remotes: remotes
1865 .into_iter()
1866 .map(|remotes| proto::get_remotes_response::Remote {
1867 name: remotes.name.to_string(),
1868 })
1869 .collect::<Vec<_>>(),
1870 })
1871 }
1872
1873 async fn handle_get_branches(
1874 this: Entity<Self>,
1875 envelope: TypedEnvelope<proto::GitGetBranches>,
1876 mut cx: AsyncApp,
1877 ) -> Result<proto::GitBranchesResponse> {
1878 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1879 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1880
1881 let branches = repository_handle
1882 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1883 .await??;
1884
1885 Ok(proto::GitBranchesResponse {
1886 branches: branches
1887 .into_iter()
1888 .map(|branch| branch_to_proto(&branch))
1889 .collect::<Vec<_>>(),
1890 })
1891 }
1892 async fn handle_get_default_branch(
1893 this: Entity<Self>,
1894 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1895 mut cx: AsyncApp,
1896 ) -> Result<proto::GetDefaultBranchResponse> {
1897 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1898 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1899
1900 let branch = repository_handle
1901 .update(&mut cx, |repository_handle, _| {
1902 repository_handle.default_branch()
1903 })?
1904 .await??
1905 .map(Into::into);
1906
1907 Ok(proto::GetDefaultBranchResponse { branch })
1908 }
1909 async fn handle_create_branch(
1910 this: Entity<Self>,
1911 envelope: TypedEnvelope<proto::GitCreateBranch>,
1912 mut cx: AsyncApp,
1913 ) -> Result<proto::Ack> {
1914 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1915 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1916 let branch_name = envelope.payload.branch_name;
1917
1918 repository_handle
1919 .update(&mut cx, |repository_handle, _| {
1920 repository_handle.create_branch(branch_name)
1921 })?
1922 .await??;
1923
1924 Ok(proto::Ack {})
1925 }
1926
1927 async fn handle_change_branch(
1928 this: Entity<Self>,
1929 envelope: TypedEnvelope<proto::GitChangeBranch>,
1930 mut cx: AsyncApp,
1931 ) -> Result<proto::Ack> {
1932 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1933 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1934 let branch_name = envelope.payload.branch_name;
1935
1936 repository_handle
1937 .update(&mut cx, |repository_handle, _| {
1938 repository_handle.change_branch(branch_name)
1939 })?
1940 .await??;
1941
1942 Ok(proto::Ack {})
1943 }
1944
1945 async fn handle_show(
1946 this: Entity<Self>,
1947 envelope: TypedEnvelope<proto::GitShow>,
1948 mut cx: AsyncApp,
1949 ) -> Result<proto::GitCommitDetails> {
1950 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1951 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1952
1953 let commit = repository_handle
1954 .update(&mut cx, |repository_handle, _| {
1955 repository_handle.show(envelope.payload.commit)
1956 })?
1957 .await??;
1958 Ok(proto::GitCommitDetails {
1959 sha: commit.sha.into(),
1960 message: commit.message.into(),
1961 commit_timestamp: commit.commit_timestamp,
1962 author_email: commit.author_email.into(),
1963 author_name: commit.author_name.into(),
1964 })
1965 }
1966
1967 async fn handle_load_commit_diff(
1968 this: Entity<Self>,
1969 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1970 mut cx: AsyncApp,
1971 ) -> Result<proto::LoadCommitDiffResponse> {
1972 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1973 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1974
1975 let commit_diff = repository_handle
1976 .update(&mut cx, |repository_handle, _| {
1977 repository_handle.load_commit_diff(envelope.payload.commit)
1978 })?
1979 .await??;
1980 Ok(proto::LoadCommitDiffResponse {
1981 files: commit_diff
1982 .files
1983 .into_iter()
1984 .map(|file| proto::CommitFile {
1985 path: file.path.to_string(),
1986 old_text: file.old_text,
1987 new_text: file.new_text,
1988 })
1989 .collect(),
1990 })
1991 }
1992
1993 async fn handle_reset(
1994 this: Entity<Self>,
1995 envelope: TypedEnvelope<proto::GitReset>,
1996 mut cx: AsyncApp,
1997 ) -> Result<proto::Ack> {
1998 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1999 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2000
2001 let mode = match envelope.payload.mode() {
2002 git_reset::ResetMode::Soft => ResetMode::Soft,
2003 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2004 };
2005
2006 repository_handle
2007 .update(&mut cx, |repository_handle, cx| {
2008 repository_handle.reset(envelope.payload.commit, mode, cx)
2009 })?
2010 .await??;
2011 Ok(proto::Ack {})
2012 }
2013
2014 async fn handle_checkout_files(
2015 this: Entity<Self>,
2016 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2017 mut cx: AsyncApp,
2018 ) -> Result<proto::Ack> {
2019 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2020 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2021 let paths = envelope
2022 .payload
2023 .paths
2024 .iter()
2025 .map(|s| RepoPath::from_str(s))
2026 .collect();
2027
2028 repository_handle
2029 .update(&mut cx, |repository_handle, cx| {
2030 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2031 })?
2032 .await??;
2033 Ok(proto::Ack {})
2034 }
2035
2036 async fn handle_open_commit_message_buffer(
2037 this: Entity<Self>,
2038 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2039 mut cx: AsyncApp,
2040 ) -> Result<proto::OpenBufferResponse> {
2041 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2042 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2043 let buffer = repository
2044 .update(&mut cx, |repository, cx| {
2045 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2046 })?
2047 .await?;
2048
2049 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2050 this.update(&mut cx, |this, cx| {
2051 this.buffer_store.update(cx, |buffer_store, cx| {
2052 buffer_store
2053 .create_buffer_for_peer(
2054 &buffer,
2055 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2056 cx,
2057 )
2058 .detach_and_log_err(cx);
2059 })
2060 })?;
2061
2062 Ok(proto::OpenBufferResponse {
2063 buffer_id: buffer_id.to_proto(),
2064 })
2065 }
2066
2067 async fn handle_askpass(
2068 this: Entity<Self>,
2069 envelope: TypedEnvelope<proto::AskPassRequest>,
2070 mut cx: AsyncApp,
2071 ) -> Result<proto::AskPassResponse> {
2072 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2073 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2074
2075 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2076 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2077 debug_panic!("no askpass found");
2078 anyhow::bail!("no askpass found");
2079 };
2080
2081 let response = askpass.ask_password(envelope.payload.prompt).await?;
2082
2083 delegates
2084 .lock()
2085 .insert(envelope.payload.askpass_id, askpass);
2086
2087 Ok(proto::AskPassResponse { response })
2088 }
2089
2090 async fn handle_check_for_pushed_commits(
2091 this: Entity<Self>,
2092 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2093 mut cx: AsyncApp,
2094 ) -> Result<proto::CheckForPushedCommitsResponse> {
2095 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2096 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2097
2098 let branches = repository_handle
2099 .update(&mut cx, |repository_handle, _| {
2100 repository_handle.check_for_pushed_commits()
2101 })?
2102 .await??;
2103 Ok(proto::CheckForPushedCommitsResponse {
2104 pushed_to: branches
2105 .into_iter()
2106 .map(|commit| commit.to_string())
2107 .collect(),
2108 })
2109 }
2110
2111 async fn handle_git_diff(
2112 this: Entity<Self>,
2113 envelope: TypedEnvelope<proto::GitDiff>,
2114 mut cx: AsyncApp,
2115 ) -> Result<proto::GitDiffResponse> {
2116 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2117 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2118 let diff_type = match envelope.payload.diff_type() {
2119 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2120 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2121 };
2122
2123 let mut diff = repository_handle
2124 .update(&mut cx, |repository_handle, cx| {
2125 repository_handle.diff(diff_type, cx)
2126 })?
2127 .await??;
2128 const ONE_MB: usize = 1_000_000;
2129 if diff.len() > ONE_MB {
2130 diff = diff.chars().take(ONE_MB).collect()
2131 }
2132
2133 Ok(proto::GitDiffResponse { diff })
2134 }
2135
2136 async fn handle_open_unstaged_diff(
2137 this: Entity<Self>,
2138 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2139 mut cx: AsyncApp,
2140 ) -> Result<proto::OpenUnstagedDiffResponse> {
2141 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2142 let diff = this
2143 .update(&mut cx, |this, cx| {
2144 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2145 Some(this.open_unstaged_diff(buffer, cx))
2146 })?
2147 .context("missing buffer")?
2148 .await?;
2149 this.update(&mut cx, |this, _| {
2150 let shared_diffs = this
2151 .shared_diffs
2152 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2153 .or_default();
2154 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2155 })?;
2156 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2157 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2158 }
2159
2160 async fn handle_open_uncommitted_diff(
2161 this: Entity<Self>,
2162 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2163 mut cx: AsyncApp,
2164 ) -> Result<proto::OpenUncommittedDiffResponse> {
2165 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2166 let diff = this
2167 .update(&mut cx, |this, cx| {
2168 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2169 Some(this.open_uncommitted_diff(buffer, cx))
2170 })?
2171 .context("missing buffer")?
2172 .await?;
2173 this.update(&mut cx, |this, _| {
2174 let shared_diffs = this
2175 .shared_diffs
2176 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2177 .or_default();
2178 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2179 })?;
2180 diff.read_with(&cx, |diff, cx| {
2181 use proto::open_uncommitted_diff_response::Mode;
2182
2183 let unstaged_diff = diff.secondary_diff();
2184 let index_snapshot = unstaged_diff.and_then(|diff| {
2185 let diff = diff.read(cx);
2186 diff.base_text_exists().then(|| diff.base_text())
2187 });
2188
2189 let mode;
2190 let staged_text;
2191 let committed_text;
2192 if diff.base_text_exists() {
2193 let committed_snapshot = diff.base_text();
2194 committed_text = Some(committed_snapshot.text());
2195 if let Some(index_text) = index_snapshot {
2196 if index_text.remote_id() == committed_snapshot.remote_id() {
2197 mode = Mode::IndexMatchesHead;
2198 staged_text = None;
2199 } else {
2200 mode = Mode::IndexAndHead;
2201 staged_text = Some(index_text.text());
2202 }
2203 } else {
2204 mode = Mode::IndexAndHead;
2205 staged_text = None;
2206 }
2207 } else {
2208 mode = Mode::IndexAndHead;
2209 committed_text = None;
2210 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2211 }
2212
2213 proto::OpenUncommittedDiffResponse {
2214 committed_text,
2215 staged_text,
2216 mode: mode.into(),
2217 }
2218 })
2219 }
2220
2221 async fn handle_update_diff_bases(
2222 this: Entity<Self>,
2223 request: TypedEnvelope<proto::UpdateDiffBases>,
2224 mut cx: AsyncApp,
2225 ) -> Result<()> {
2226 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2227 this.update(&mut cx, |this, cx| {
2228 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2229 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2230 {
2231 let buffer = buffer.read(cx).text_snapshot();
2232 diff_state.update(cx, |diff_state, cx| {
2233 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2234 })
2235 }
2236 })
2237 }
2238
2239 async fn handle_blame_buffer(
2240 this: Entity<Self>,
2241 envelope: TypedEnvelope<proto::BlameBuffer>,
2242 mut cx: AsyncApp,
2243 ) -> Result<proto::BlameBufferResponse> {
2244 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2245 let version = deserialize_version(&envelope.payload.version);
2246 let buffer = this.read_with(&cx, |this, cx| {
2247 this.buffer_store.read(cx).get_existing(buffer_id)
2248 })??;
2249 buffer
2250 .update(&mut cx, |buffer, _| {
2251 buffer.wait_for_version(version.clone())
2252 })?
2253 .await?;
2254 let blame = this
2255 .update(&mut cx, |this, cx| {
2256 this.blame_buffer(&buffer, Some(version), cx)
2257 })?
2258 .await?;
2259 Ok(serialize_blame_buffer_response(blame))
2260 }
2261
2262 async fn handle_get_permalink_to_line(
2263 this: Entity<Self>,
2264 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2265 mut cx: AsyncApp,
2266 ) -> Result<proto::GetPermalinkToLineResponse> {
2267 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2268 // let version = deserialize_version(&envelope.payload.version);
2269 let selection = {
2270 let proto_selection = envelope
2271 .payload
2272 .selection
2273 .context("no selection to get permalink for defined")?;
2274 proto_selection.start as u32..proto_selection.end as u32
2275 };
2276 let buffer = this.read_with(&cx, |this, cx| {
2277 this.buffer_store.read(cx).get_existing(buffer_id)
2278 })??;
2279 let permalink = this
2280 .update(&mut cx, |this, cx| {
2281 this.get_permalink_to_line(&buffer, selection, cx)
2282 })?
2283 .await?;
2284 Ok(proto::GetPermalinkToLineResponse {
2285 permalink: permalink.to_string(),
2286 })
2287 }
2288
2289 fn repository_for_request(
2290 this: &Entity<Self>,
2291 id: RepositoryId,
2292 cx: &mut AsyncApp,
2293 ) -> Result<Entity<Repository>> {
2294 this.read_with(cx, |this, _| {
2295 this.repositories
2296 .get(&id)
2297 .context("missing repository handle")
2298 .cloned()
2299 })?
2300 }
2301
2302 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2303 self.repositories
2304 .iter()
2305 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2306 .collect()
2307 }
2308
2309 fn process_updated_entries(
2310 &self,
2311 worktree: &Entity<Worktree>,
2312 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2313 cx: &mut App,
2314 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2315 let mut repo_paths = self
2316 .repositories
2317 .values()
2318 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2319 .collect::<Vec<_>>();
2320 let mut entries: Vec<_> = updated_entries
2321 .iter()
2322 .map(|(path, _, _)| path.clone())
2323 .collect();
2324 entries.sort();
2325 let worktree = worktree.read(cx);
2326
2327 let entries = entries
2328 .into_iter()
2329 .filter_map(|path| worktree.absolutize(&path).ok())
2330 .collect::<Arc<[_]>>();
2331
2332 let executor = cx.background_executor().clone();
2333 cx.background_executor().spawn(async move {
2334 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2335 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2336 let mut tasks = FuturesOrdered::new();
2337 for (repo_path, repo) in repo_paths.into_iter().rev() {
2338 let entries = entries.clone();
2339 let task = executor.spawn(async move {
2340 // Find all repository paths that belong to this repo
2341 let mut ix = entries.partition_point(|path| path < &*repo_path);
2342 if ix == entries.len() {
2343 return None;
2344 };
2345
2346 let mut paths = Vec::new();
2347 // All paths prefixed by a given repo will constitute a continuous range.
2348 while let Some(path) = entries.get(ix)
2349 && let Some(repo_path) =
2350 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, path)
2351 {
2352 paths.push((repo_path, ix));
2353 ix += 1;
2354 }
2355 if paths.is_empty() {
2356 None
2357 } else {
2358 Some((repo, paths))
2359 }
2360 });
2361 tasks.push_back(task);
2362 }
2363
2364 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2365 let mut path_was_used = vec![false; entries.len()];
2366 let tasks = tasks.collect::<Vec<_>>().await;
2367 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2368 // We always want to assign a path to it's innermost repository.
2369 for t in tasks {
2370 let Some((repo, paths)) = t else {
2371 continue;
2372 };
2373 let entry = paths_by_git_repo.entry(repo).or_default();
2374 for (repo_path, ix) in paths {
2375 if path_was_used[ix] {
2376 continue;
2377 }
2378 path_was_used[ix] = true;
2379 entry.push(repo_path);
2380 }
2381 }
2382
2383 paths_by_git_repo
2384 })
2385 }
2386}
2387
2388impl BufferGitState {
2389 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2390 Self {
2391 unstaged_diff: Default::default(),
2392 uncommitted_diff: Default::default(),
2393 recalculate_diff_task: Default::default(),
2394 language: Default::default(),
2395 language_registry: Default::default(),
2396 recalculating_tx: postage::watch::channel_with(false).0,
2397 hunk_staging_operation_count: 0,
2398 hunk_staging_operation_count_as_of_write: 0,
2399 head_text: Default::default(),
2400 index_text: Default::default(),
2401 head_changed: Default::default(),
2402 index_changed: Default::default(),
2403 language_changed: Default::default(),
2404 conflict_updated_futures: Default::default(),
2405 conflict_set: Default::default(),
2406 reparse_conflict_markers_task: Default::default(),
2407 }
2408 }
2409
2410 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2411 self.language = buffer.read(cx).language().cloned();
2412 self.language_changed = true;
2413 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2414 }
2415
2416 fn reparse_conflict_markers(
2417 &mut self,
2418 buffer: text::BufferSnapshot,
2419 cx: &mut Context<Self>,
2420 ) -> oneshot::Receiver<()> {
2421 let (tx, rx) = oneshot::channel();
2422
2423 let Some(conflict_set) = self
2424 .conflict_set
2425 .as_ref()
2426 .and_then(|conflict_set| conflict_set.upgrade())
2427 else {
2428 return rx;
2429 };
2430
2431 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2432 if conflict_set.has_conflict {
2433 Some(conflict_set.snapshot())
2434 } else {
2435 None
2436 }
2437 });
2438
2439 if let Some(old_snapshot) = old_snapshot {
2440 self.conflict_updated_futures.push(tx);
2441 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2442 let (snapshot, changed_range) = cx
2443 .background_spawn(async move {
2444 let new_snapshot = ConflictSet::parse(&buffer);
2445 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2446 (new_snapshot, changed_range)
2447 })
2448 .await;
2449 this.update(cx, |this, cx| {
2450 if let Some(conflict_set) = &this.conflict_set {
2451 conflict_set
2452 .update(cx, |conflict_set, cx| {
2453 conflict_set.set_snapshot(snapshot, changed_range, cx);
2454 })
2455 .ok();
2456 }
2457 let futures = std::mem::take(&mut this.conflict_updated_futures);
2458 for tx in futures {
2459 tx.send(()).ok();
2460 }
2461 })
2462 }))
2463 }
2464
2465 rx
2466 }
2467
2468 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2469 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2470 }
2471
2472 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2473 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2474 }
2475
2476 fn handle_base_texts_updated(
2477 &mut self,
2478 buffer: text::BufferSnapshot,
2479 message: proto::UpdateDiffBases,
2480 cx: &mut Context<Self>,
2481 ) {
2482 use proto::update_diff_bases::Mode;
2483
2484 let Some(mode) = Mode::from_i32(message.mode) else {
2485 return;
2486 };
2487
2488 let diff_bases_change = match mode {
2489 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2490 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2491 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2492 Mode::IndexAndHead => DiffBasesChange::SetEach {
2493 index: message.staged_text,
2494 head: message.committed_text,
2495 },
2496 };
2497
2498 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2499 }
2500
2501 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2502 if *self.recalculating_tx.borrow() {
2503 let mut rx = self.recalculating_tx.subscribe();
2504 return Some(async move {
2505 loop {
2506 let is_recalculating = rx.recv().await;
2507 if is_recalculating != Some(true) {
2508 break;
2509 }
2510 }
2511 });
2512 } else {
2513 None
2514 }
2515 }
2516
2517 fn diff_bases_changed(
2518 &mut self,
2519 buffer: text::BufferSnapshot,
2520 diff_bases_change: Option<DiffBasesChange>,
2521 cx: &mut Context<Self>,
2522 ) {
2523 match diff_bases_change {
2524 Some(DiffBasesChange::SetIndex(index)) => {
2525 self.index_text = index.map(|mut index| {
2526 text::LineEnding::normalize(&mut index);
2527 Arc::new(index)
2528 });
2529 self.index_changed = true;
2530 }
2531 Some(DiffBasesChange::SetHead(head)) => {
2532 self.head_text = head.map(|mut head| {
2533 text::LineEnding::normalize(&mut head);
2534 Arc::new(head)
2535 });
2536 self.head_changed = true;
2537 }
2538 Some(DiffBasesChange::SetBoth(text)) => {
2539 let text = text.map(|mut text| {
2540 text::LineEnding::normalize(&mut text);
2541 Arc::new(text)
2542 });
2543 self.head_text = text.clone();
2544 self.index_text = text;
2545 self.head_changed = true;
2546 self.index_changed = true;
2547 }
2548 Some(DiffBasesChange::SetEach { index, head }) => {
2549 self.index_text = index.map(|mut index| {
2550 text::LineEnding::normalize(&mut index);
2551 Arc::new(index)
2552 });
2553 self.index_changed = true;
2554 self.head_text = head.map(|mut head| {
2555 text::LineEnding::normalize(&mut head);
2556 Arc::new(head)
2557 });
2558 self.head_changed = true;
2559 }
2560 None => {}
2561 }
2562
2563 self.recalculate_diffs(buffer, cx)
2564 }
2565
2566 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2567 *self.recalculating_tx.borrow_mut() = true;
2568
2569 let language = self.language.clone();
2570 let language_registry = self.language_registry.clone();
2571 let unstaged_diff = self.unstaged_diff();
2572 let uncommitted_diff = self.uncommitted_diff();
2573 let head = self.head_text.clone();
2574 let index = self.index_text.clone();
2575 let index_changed = self.index_changed;
2576 let head_changed = self.head_changed;
2577 let language_changed = self.language_changed;
2578 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2579 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2580 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2581 (None, None) => true,
2582 _ => false,
2583 };
2584 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2585 log::debug!(
2586 "start recalculating diffs for buffer {}",
2587 buffer.remote_id()
2588 );
2589
2590 let mut new_unstaged_diff = None;
2591 if let Some(unstaged_diff) = &unstaged_diff {
2592 new_unstaged_diff = Some(
2593 BufferDiff::update_diff(
2594 unstaged_diff.clone(),
2595 buffer.clone(),
2596 index,
2597 index_changed,
2598 language_changed,
2599 language.clone(),
2600 language_registry.clone(),
2601 cx,
2602 )
2603 .await?,
2604 );
2605 }
2606
2607 let mut new_uncommitted_diff = None;
2608 if let Some(uncommitted_diff) = &uncommitted_diff {
2609 new_uncommitted_diff = if index_matches_head {
2610 new_unstaged_diff.clone()
2611 } else {
2612 Some(
2613 BufferDiff::update_diff(
2614 uncommitted_diff.clone(),
2615 buffer.clone(),
2616 head,
2617 head_changed,
2618 language_changed,
2619 language.clone(),
2620 language_registry.clone(),
2621 cx,
2622 )
2623 .await?,
2624 )
2625 }
2626 }
2627
2628 let cancel = this.update(cx, |this, _| {
2629 // This checks whether all pending stage/unstage operations
2630 // have quiesced (i.e. both the corresponding write and the
2631 // read of that write have completed). If not, then we cancel
2632 // this recalculation attempt to avoid invalidating pending
2633 // state too quickly; another recalculation will come along
2634 // later and clear the pending state once the state of the index has settled.
2635 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2636 *this.recalculating_tx.borrow_mut() = false;
2637 true
2638 } else {
2639 false
2640 }
2641 })?;
2642 if cancel {
2643 log::debug!(
2644 concat!(
2645 "aborting recalculating diffs for buffer {}",
2646 "due to subsequent hunk operations",
2647 ),
2648 buffer.remote_id()
2649 );
2650 return Ok(());
2651 }
2652
2653 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2654 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2655 {
2656 unstaged_diff.update(cx, |diff, cx| {
2657 if language_changed {
2658 diff.language_changed(cx);
2659 }
2660 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2661 })?
2662 } else {
2663 None
2664 };
2665
2666 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2667 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2668 {
2669 uncommitted_diff.update(cx, |diff, cx| {
2670 if language_changed {
2671 diff.language_changed(cx);
2672 }
2673 diff.set_snapshot_with_secondary(
2674 new_uncommitted_diff,
2675 &buffer,
2676 unstaged_changed_range,
2677 true,
2678 cx,
2679 );
2680 })?;
2681 }
2682
2683 log::debug!(
2684 "finished recalculating diffs for buffer {}",
2685 buffer.remote_id()
2686 );
2687
2688 if let Some(this) = this.upgrade() {
2689 this.update(cx, |this, _| {
2690 this.index_changed = false;
2691 this.head_changed = false;
2692 this.language_changed = false;
2693 *this.recalculating_tx.borrow_mut() = false;
2694 })?;
2695 }
2696
2697 Ok(())
2698 }));
2699 }
2700}
2701
2702fn make_remote_delegate(
2703 this: Entity<GitStore>,
2704 project_id: u64,
2705 repository_id: RepositoryId,
2706 askpass_id: u64,
2707 cx: &mut AsyncApp,
2708) -> AskPassDelegate {
2709 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2710 this.update(cx, |this, cx| {
2711 let Some((client, _)) = this.downstream_client() else {
2712 return;
2713 };
2714 let response = client.request(proto::AskPassRequest {
2715 project_id,
2716 repository_id: repository_id.to_proto(),
2717 askpass_id,
2718 prompt,
2719 });
2720 cx.spawn(async move |_, _| {
2721 tx.send(response.await?.response).ok();
2722 anyhow::Ok(())
2723 })
2724 .detach_and_log_err(cx);
2725 })
2726 .log_err();
2727 })
2728}
2729
2730impl RepositoryId {
2731 pub fn to_proto(self) -> u64 {
2732 self.0
2733 }
2734
2735 pub fn from_proto(id: u64) -> Self {
2736 RepositoryId(id)
2737 }
2738}
2739
2740impl RepositorySnapshot {
2741 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2742 Self {
2743 id,
2744 statuses_by_path: Default::default(),
2745 work_directory_abs_path,
2746 branch: None,
2747 head_commit: None,
2748 scan_id: 0,
2749 merge: Default::default(),
2750 remote_origin_url: None,
2751 remote_upstream_url: None,
2752 }
2753 }
2754
2755 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2756 proto::UpdateRepository {
2757 branch_summary: self.branch.as_ref().map(branch_to_proto),
2758 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2759 updated_statuses: self
2760 .statuses_by_path
2761 .iter()
2762 .map(|entry| entry.to_proto())
2763 .collect(),
2764 removed_statuses: Default::default(),
2765 current_merge_conflicts: self
2766 .merge
2767 .conflicted_paths
2768 .iter()
2769 .map(|repo_path| repo_path.to_proto())
2770 .collect(),
2771 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2772 project_id,
2773 id: self.id.to_proto(),
2774 abs_path: self.work_directory_abs_path.to_proto(),
2775 entry_ids: vec![self.id.to_proto()],
2776 scan_id: self.scan_id,
2777 is_last_update: true,
2778 }
2779 }
2780
2781 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2782 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2783 let mut removed_statuses: Vec<String> = Vec::new();
2784
2785 let mut new_statuses = self.statuses_by_path.iter().peekable();
2786 let mut old_statuses = old.statuses_by_path.iter().peekable();
2787
2788 let mut current_new_entry = new_statuses.next();
2789 let mut current_old_entry = old_statuses.next();
2790 loop {
2791 match (current_new_entry, current_old_entry) {
2792 (Some(new_entry), Some(old_entry)) => {
2793 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2794 Ordering::Less => {
2795 updated_statuses.push(new_entry.to_proto());
2796 current_new_entry = new_statuses.next();
2797 }
2798 Ordering::Equal => {
2799 if new_entry.status != old_entry.status {
2800 updated_statuses.push(new_entry.to_proto());
2801 }
2802 current_old_entry = old_statuses.next();
2803 current_new_entry = new_statuses.next();
2804 }
2805 Ordering::Greater => {
2806 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2807 current_old_entry = old_statuses.next();
2808 }
2809 }
2810 }
2811 (None, Some(old_entry)) => {
2812 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2813 current_old_entry = old_statuses.next();
2814 }
2815 (Some(new_entry), None) => {
2816 updated_statuses.push(new_entry.to_proto());
2817 current_new_entry = new_statuses.next();
2818 }
2819 (None, None) => break,
2820 }
2821 }
2822
2823 proto::UpdateRepository {
2824 branch_summary: self.branch.as_ref().map(branch_to_proto),
2825 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2826 updated_statuses,
2827 removed_statuses,
2828 current_merge_conflicts: self
2829 .merge
2830 .conflicted_paths
2831 .iter()
2832 .map(|path| path.as_ref().to_proto())
2833 .collect(),
2834 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2835 project_id,
2836 id: self.id.to_proto(),
2837 abs_path: self.work_directory_abs_path.to_proto(),
2838 entry_ids: vec![],
2839 scan_id: self.scan_id,
2840 is_last_update: true,
2841 }
2842 }
2843
2844 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2845 self.statuses_by_path.iter().cloned()
2846 }
2847
2848 pub fn status_summary(&self) -> GitSummary {
2849 self.statuses_by_path.summary().item_summary
2850 }
2851
2852 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2853 self.statuses_by_path
2854 .get(&PathKey(path.0.clone()), &())
2855 .cloned()
2856 }
2857
2858 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2859 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2860 }
2861
2862 #[inline]
2863 fn abs_path_to_repo_path_inner(
2864 work_directory_abs_path: &Path,
2865 abs_path: &Path,
2866 ) -> Option<RepoPath> {
2867 abs_path
2868 .strip_prefix(&work_directory_abs_path)
2869 .map(RepoPath::from)
2870 .ok()
2871 }
2872
2873 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2874 self.merge.conflicted_paths.contains(repo_path)
2875 }
2876
2877 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2878 let had_conflict_on_last_merge_head_change =
2879 self.merge.conflicted_paths.contains(repo_path);
2880 let has_conflict_currently = self
2881 .status_for_path(repo_path)
2882 .map_or(false, |entry| entry.status.is_conflicted());
2883 had_conflict_on_last_merge_head_change || has_conflict_currently
2884 }
2885
2886 /// This is the name that will be displayed in the repository selector for this repository.
2887 pub fn display_name(&self) -> SharedString {
2888 self.work_directory_abs_path
2889 .file_name()
2890 .unwrap_or_default()
2891 .to_string_lossy()
2892 .to_string()
2893 .into()
2894 }
2895}
2896
2897impl MergeDetails {
2898 async fn load(
2899 backend: &Arc<dyn GitRepository>,
2900 status: &SumTree<StatusEntry>,
2901 prev_snapshot: &RepositorySnapshot,
2902 ) -> Result<(MergeDetails, bool)> {
2903 log::debug!("load merge details");
2904 let message = backend.merge_message().await;
2905 let heads = backend
2906 .revparse_batch(vec![
2907 "MERGE_HEAD".into(),
2908 "CHERRY_PICK_HEAD".into(),
2909 "REBASE_HEAD".into(),
2910 "REVERT_HEAD".into(),
2911 "APPLY_HEAD".into(),
2912 ])
2913 .await
2914 .log_err()
2915 .unwrap_or_default()
2916 .into_iter()
2917 .map(|opt| opt.map(SharedString::from))
2918 .collect::<Vec<_>>();
2919 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2920 let conflicted_paths = if merge_heads_changed {
2921 let current_conflicted_paths = TreeSet::from_ordered_entries(
2922 status
2923 .iter()
2924 .filter(|entry| entry.status.is_conflicted())
2925 .map(|entry| entry.repo_path.clone()),
2926 );
2927
2928 // It can happen that we run a scan while a lengthy merge is in progress
2929 // that will eventually result in conflicts, but before those conflicts
2930 // are reported by `git status`. Since for the moment we only care about
2931 // the merge heads state for the purposes of tracking conflicts, don't update
2932 // this state until we see some conflicts.
2933 if heads.iter().any(Option::is_some)
2934 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2935 && current_conflicted_paths.is_empty()
2936 {
2937 log::debug!("not updating merge heads because no conflicts found");
2938 return Ok((
2939 MergeDetails {
2940 message: message.map(SharedString::from),
2941 ..prev_snapshot.merge.clone()
2942 },
2943 false,
2944 ));
2945 }
2946
2947 current_conflicted_paths
2948 } else {
2949 prev_snapshot.merge.conflicted_paths.clone()
2950 };
2951 let details = MergeDetails {
2952 conflicted_paths,
2953 message: message.map(SharedString::from),
2954 heads,
2955 };
2956 Ok((details, merge_heads_changed))
2957 }
2958}
2959
2960impl Repository {
2961 pub fn snapshot(&self) -> RepositorySnapshot {
2962 self.snapshot.clone()
2963 }
2964
2965 fn local(
2966 id: RepositoryId,
2967 work_directory_abs_path: Arc<Path>,
2968 dot_git_abs_path: Arc<Path>,
2969 repository_dir_abs_path: Arc<Path>,
2970 common_dir_abs_path: Arc<Path>,
2971 project_environment: WeakEntity<ProjectEnvironment>,
2972 fs: Arc<dyn Fs>,
2973 git_store: WeakEntity<GitStore>,
2974 cx: &mut Context<Self>,
2975 ) -> Self {
2976 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2977 Repository {
2978 this: cx.weak_entity(),
2979 git_store,
2980 snapshot,
2981 commit_message_buffer: None,
2982 askpass_delegates: Default::default(),
2983 paths_needing_status_update: Default::default(),
2984 latest_askpass_id: 0,
2985 job_sender: Repository::spawn_local_git_worker(
2986 work_directory_abs_path,
2987 dot_git_abs_path,
2988 repository_dir_abs_path,
2989 common_dir_abs_path,
2990 project_environment,
2991 fs,
2992 cx,
2993 ),
2994 job_id: 0,
2995 active_jobs: Default::default(),
2996 }
2997 }
2998
2999 fn remote(
3000 id: RepositoryId,
3001 work_directory_abs_path: Arc<Path>,
3002 project_id: ProjectId,
3003 client: AnyProtoClient,
3004 git_store: WeakEntity<GitStore>,
3005 cx: &mut Context<Self>,
3006 ) -> Self {
3007 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
3008 Self {
3009 this: cx.weak_entity(),
3010 snapshot,
3011 commit_message_buffer: None,
3012 git_store,
3013 paths_needing_status_update: Default::default(),
3014 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3015 askpass_delegates: Default::default(),
3016 latest_askpass_id: 0,
3017 active_jobs: Default::default(),
3018 job_id: 0,
3019 }
3020 }
3021
3022 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3023 self.git_store.upgrade()
3024 }
3025
3026 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3027 let this = cx.weak_entity();
3028 let git_store = self.git_store.clone();
3029 let _ = self.send_keyed_job(
3030 Some(GitJobKey::ReloadBufferDiffBases),
3031 None,
3032 |state, mut cx| async move {
3033 let RepositoryState::Local { backend, .. } = state else {
3034 log::error!("tried to recompute diffs for a non-local repository");
3035 return Ok(());
3036 };
3037
3038 let Some(this) = this.upgrade() else {
3039 return Ok(());
3040 };
3041
3042 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3043 git_store.update(cx, |git_store, cx| {
3044 git_store
3045 .diffs
3046 .iter()
3047 .filter_map(|(buffer_id, diff_state)| {
3048 let buffer_store = git_store.buffer_store.read(cx);
3049 let buffer = buffer_store.get(*buffer_id)?;
3050 let file = File::from_dyn(buffer.read(cx).file())?;
3051 let abs_path =
3052 file.worktree.read(cx).absolutize(&file.path).ok()?;
3053 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3054 log::debug!(
3055 "start reload diff bases for repo path {}",
3056 repo_path.0.display()
3057 );
3058 diff_state.update(cx, |diff_state, _| {
3059 let has_unstaged_diff = diff_state
3060 .unstaged_diff
3061 .as_ref()
3062 .is_some_and(|diff| diff.is_upgradable());
3063 let has_uncommitted_diff = diff_state
3064 .uncommitted_diff
3065 .as_ref()
3066 .is_some_and(|set| set.is_upgradable());
3067
3068 Some((
3069 buffer,
3070 repo_path,
3071 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3072 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3073 ))
3074 })
3075 })
3076 .collect::<Vec<_>>()
3077 })
3078 })??;
3079
3080 let buffer_diff_base_changes = cx
3081 .background_spawn(async move {
3082 let mut changes = Vec::new();
3083 for (buffer, repo_path, current_index_text, current_head_text) in
3084 &repo_diff_state_updates
3085 {
3086 let index_text = if current_index_text.is_some() {
3087 backend.load_index_text(repo_path.clone()).await
3088 } else {
3089 None
3090 };
3091 let head_text = if current_head_text.is_some() {
3092 backend.load_committed_text(repo_path.clone()).await
3093 } else {
3094 None
3095 };
3096
3097 let change =
3098 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3099 (Some(current_index), Some(current_head)) => {
3100 let index_changed =
3101 index_text.as_ref() != current_index.as_deref();
3102 let head_changed =
3103 head_text.as_ref() != current_head.as_deref();
3104 if index_changed && head_changed {
3105 if index_text == head_text {
3106 Some(DiffBasesChange::SetBoth(head_text))
3107 } else {
3108 Some(DiffBasesChange::SetEach {
3109 index: index_text,
3110 head: head_text,
3111 })
3112 }
3113 } else if index_changed {
3114 Some(DiffBasesChange::SetIndex(index_text))
3115 } else if head_changed {
3116 Some(DiffBasesChange::SetHead(head_text))
3117 } else {
3118 None
3119 }
3120 }
3121 (Some(current_index), None) => {
3122 let index_changed =
3123 index_text.as_ref() != current_index.as_deref();
3124 index_changed
3125 .then_some(DiffBasesChange::SetIndex(index_text))
3126 }
3127 (None, Some(current_head)) => {
3128 let head_changed =
3129 head_text.as_ref() != current_head.as_deref();
3130 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3131 }
3132 (None, None) => None,
3133 };
3134
3135 changes.push((buffer.clone(), change))
3136 }
3137 changes
3138 })
3139 .await;
3140
3141 git_store.update(&mut cx, |git_store, cx| {
3142 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3143 let buffer_snapshot = buffer.read(cx).text_snapshot();
3144 let buffer_id = buffer_snapshot.remote_id();
3145 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3146 continue;
3147 };
3148
3149 let downstream_client = git_store.downstream_client();
3150 diff_state.update(cx, |diff_state, cx| {
3151 use proto::update_diff_bases::Mode;
3152
3153 if let Some((diff_bases_change, (client, project_id))) =
3154 diff_bases_change.clone().zip(downstream_client)
3155 {
3156 let (staged_text, committed_text, mode) = match diff_bases_change {
3157 DiffBasesChange::SetIndex(index) => {
3158 (index, None, Mode::IndexOnly)
3159 }
3160 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3161 DiffBasesChange::SetEach { index, head } => {
3162 (index, head, Mode::IndexAndHead)
3163 }
3164 DiffBasesChange::SetBoth(text) => {
3165 (None, text, Mode::IndexMatchesHead)
3166 }
3167 };
3168 client
3169 .send(proto::UpdateDiffBases {
3170 project_id: project_id.to_proto(),
3171 buffer_id: buffer_id.to_proto(),
3172 staged_text,
3173 committed_text,
3174 mode: mode as i32,
3175 })
3176 .log_err();
3177 }
3178
3179 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3180 });
3181 }
3182 })
3183 },
3184 );
3185 }
3186
3187 pub fn send_job<F, Fut, R>(
3188 &mut self,
3189 status: Option<SharedString>,
3190 job: F,
3191 ) -> oneshot::Receiver<R>
3192 where
3193 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3194 Fut: Future<Output = R> + 'static,
3195 R: Send + 'static,
3196 {
3197 self.send_keyed_job(None, status, job)
3198 }
3199
3200 fn send_keyed_job<F, Fut, R>(
3201 &mut self,
3202 key: Option<GitJobKey>,
3203 status: Option<SharedString>,
3204 job: F,
3205 ) -> oneshot::Receiver<R>
3206 where
3207 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3208 Fut: Future<Output = R> + 'static,
3209 R: Send + 'static,
3210 {
3211 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3212 let job_id = post_inc(&mut self.job_id);
3213 let this = self.this.clone();
3214 self.job_sender
3215 .unbounded_send(GitJob {
3216 key,
3217 job: Box::new(move |state, cx: &mut AsyncApp| {
3218 let job = job(state, cx.clone());
3219 cx.spawn(async move |cx| {
3220 if let Some(s) = status.clone() {
3221 this.update(cx, |this, cx| {
3222 this.active_jobs.insert(
3223 job_id,
3224 JobInfo {
3225 start: Instant::now(),
3226 message: s.clone(),
3227 },
3228 );
3229
3230 cx.notify();
3231 })
3232 .ok();
3233 }
3234 let result = job.await;
3235
3236 this.update(cx, |this, cx| {
3237 this.active_jobs.remove(&job_id);
3238 cx.notify();
3239 })
3240 .ok();
3241
3242 result_tx.send(result).ok();
3243 })
3244 }),
3245 })
3246 .ok();
3247 result_rx
3248 }
3249
3250 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3251 let Some(git_store) = self.git_store.upgrade() else {
3252 return;
3253 };
3254 let entity = cx.entity();
3255 git_store.update(cx, |git_store, cx| {
3256 let Some((&id, _)) = git_store
3257 .repositories
3258 .iter()
3259 .find(|(_, handle)| *handle == &entity)
3260 else {
3261 return;
3262 };
3263 git_store.active_repo_id = Some(id);
3264 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3265 });
3266 }
3267
3268 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3269 self.snapshot.status()
3270 }
3271
3272 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3273 let git_store = self.git_store.upgrade()?;
3274 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3275 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3276 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3277 Some(ProjectPath {
3278 worktree_id: worktree.read(cx).id(),
3279 path: relative_path.into(),
3280 })
3281 }
3282
3283 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3284 let git_store = self.git_store.upgrade()?;
3285 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3286 let abs_path = worktree_store.absolutize(path, cx)?;
3287 self.snapshot.abs_path_to_repo_path(&abs_path)
3288 }
3289
3290 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3291 other
3292 .read(cx)
3293 .snapshot
3294 .work_directory_abs_path
3295 .starts_with(&self.snapshot.work_directory_abs_path)
3296 }
3297
3298 pub fn open_commit_buffer(
3299 &mut self,
3300 languages: Option<Arc<LanguageRegistry>>,
3301 buffer_store: Entity<BufferStore>,
3302 cx: &mut Context<Self>,
3303 ) -> Task<Result<Entity<Buffer>>> {
3304 let id = self.id;
3305 if let Some(buffer) = self.commit_message_buffer.clone() {
3306 return Task::ready(Ok(buffer));
3307 }
3308 let this = cx.weak_entity();
3309
3310 let rx = self.send_job(None, move |state, mut cx| async move {
3311 let Some(this) = this.upgrade() else {
3312 bail!("git store was dropped");
3313 };
3314 match state {
3315 RepositoryState::Local { .. } => {
3316 this.update(&mut cx, |_, cx| {
3317 Self::open_local_commit_buffer(languages, buffer_store, cx)
3318 })?
3319 .await
3320 }
3321 RepositoryState::Remote { project_id, client } => {
3322 let request = client.request(proto::OpenCommitMessageBuffer {
3323 project_id: project_id.0,
3324 repository_id: id.to_proto(),
3325 });
3326 let response = request.await.context("requesting to open commit buffer")?;
3327 let buffer_id = BufferId::new(response.buffer_id)?;
3328 let buffer = buffer_store
3329 .update(&mut cx, |buffer_store, cx| {
3330 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3331 })?
3332 .await?;
3333 if let Some(language_registry) = languages {
3334 let git_commit_language =
3335 language_registry.language_for_name("Git Commit").await?;
3336 buffer.update(&mut cx, |buffer, cx| {
3337 buffer.set_language(Some(git_commit_language), cx);
3338 })?;
3339 }
3340 this.update(&mut cx, |this, _| {
3341 this.commit_message_buffer = Some(buffer.clone());
3342 })?;
3343 Ok(buffer)
3344 }
3345 }
3346 });
3347
3348 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3349 }
3350
3351 fn open_local_commit_buffer(
3352 language_registry: Option<Arc<LanguageRegistry>>,
3353 buffer_store: Entity<BufferStore>,
3354 cx: &mut Context<Self>,
3355 ) -> Task<Result<Entity<Buffer>>> {
3356 cx.spawn(async move |repository, cx| {
3357 let buffer = buffer_store
3358 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
3359 .await?;
3360
3361 if let Some(language_registry) = language_registry {
3362 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3363 buffer.update(cx, |buffer, cx| {
3364 buffer.set_language(Some(git_commit_language), cx);
3365 })?;
3366 }
3367
3368 repository.update(cx, |repository, _| {
3369 repository.commit_message_buffer = Some(buffer.clone());
3370 })?;
3371 Ok(buffer)
3372 })
3373 }
3374
3375 pub fn checkout_files(
3376 &mut self,
3377 commit: &str,
3378 paths: Vec<RepoPath>,
3379 _cx: &mut App,
3380 ) -> oneshot::Receiver<Result<()>> {
3381 let commit = commit.to_string();
3382 let id = self.id;
3383
3384 self.send_job(
3385 Some(format!("git checkout {}", commit).into()),
3386 move |git_repo, _| async move {
3387 match git_repo {
3388 RepositoryState::Local {
3389 backend,
3390 environment,
3391 ..
3392 } => {
3393 backend
3394 .checkout_files(commit, paths, environment.clone())
3395 .await
3396 }
3397 RepositoryState::Remote { project_id, client } => {
3398 client
3399 .request(proto::GitCheckoutFiles {
3400 project_id: project_id.0,
3401 repository_id: id.to_proto(),
3402 commit,
3403 paths: paths
3404 .into_iter()
3405 .map(|p| p.to_string_lossy().to_string())
3406 .collect(),
3407 })
3408 .await?;
3409
3410 Ok(())
3411 }
3412 }
3413 },
3414 )
3415 }
3416
3417 pub fn reset(
3418 &mut self,
3419 commit: String,
3420 reset_mode: ResetMode,
3421 _cx: &mut App,
3422 ) -> oneshot::Receiver<Result<()>> {
3423 let commit = commit.to_string();
3424 let id = self.id;
3425
3426 self.send_job(None, move |git_repo, _| async move {
3427 match git_repo {
3428 RepositoryState::Local {
3429 backend,
3430 environment,
3431 ..
3432 } => backend.reset(commit, reset_mode, environment).await,
3433 RepositoryState::Remote { project_id, client } => {
3434 client
3435 .request(proto::GitReset {
3436 project_id: project_id.0,
3437 repository_id: id.to_proto(),
3438 commit,
3439 mode: match reset_mode {
3440 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3441 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3442 },
3443 })
3444 .await?;
3445
3446 Ok(())
3447 }
3448 }
3449 })
3450 }
3451
3452 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3453 let id = self.id;
3454 self.send_job(None, move |git_repo, _cx| async move {
3455 match git_repo {
3456 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3457 RepositoryState::Remote { project_id, client } => {
3458 let resp = client
3459 .request(proto::GitShow {
3460 project_id: project_id.0,
3461 repository_id: id.to_proto(),
3462 commit,
3463 })
3464 .await?;
3465
3466 Ok(CommitDetails {
3467 sha: resp.sha.into(),
3468 message: resp.message.into(),
3469 commit_timestamp: resp.commit_timestamp,
3470 author_email: resp.author_email.into(),
3471 author_name: resp.author_name.into(),
3472 })
3473 }
3474 }
3475 })
3476 }
3477
3478 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3479 let id = self.id;
3480 self.send_job(None, move |git_repo, cx| async move {
3481 match git_repo {
3482 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3483 RepositoryState::Remote {
3484 client, project_id, ..
3485 } => {
3486 let response = client
3487 .request(proto::LoadCommitDiff {
3488 project_id: project_id.0,
3489 repository_id: id.to_proto(),
3490 commit,
3491 })
3492 .await?;
3493 Ok(CommitDiff {
3494 files: response
3495 .files
3496 .into_iter()
3497 .map(|file| CommitFile {
3498 path: Path::new(&file.path).into(),
3499 old_text: file.old_text,
3500 new_text: file.new_text,
3501 })
3502 .collect(),
3503 })
3504 }
3505 }
3506 })
3507 }
3508
3509 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3510 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3511 }
3512
3513 pub fn stage_entries(
3514 &self,
3515 entries: Vec<RepoPath>,
3516 cx: &mut Context<Self>,
3517 ) -> Task<anyhow::Result<()>> {
3518 if entries.is_empty() {
3519 return Task::ready(Ok(()));
3520 }
3521 let id = self.id;
3522
3523 let mut save_futures = Vec::new();
3524 if let Some(buffer_store) = self.buffer_store(cx) {
3525 buffer_store.update(cx, |buffer_store, cx| {
3526 for path in &entries {
3527 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3528 continue;
3529 };
3530 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3531 && buffer
3532 .read(cx)
3533 .file()
3534 .map_or(false, |file| file.disk_state().exists())
3535 {
3536 save_futures.push(buffer_store.save_buffer(buffer, cx));
3537 }
3538 }
3539 })
3540 }
3541
3542 cx.spawn(async move |this, cx| {
3543 for save_future in save_futures {
3544 save_future.await?;
3545 }
3546
3547 this.update(cx, |this, _| {
3548 this.send_job(None, move |git_repo, _cx| async move {
3549 match git_repo {
3550 RepositoryState::Local {
3551 backend,
3552 environment,
3553 ..
3554 } => backend.stage_paths(entries, environment.clone()).await,
3555 RepositoryState::Remote { project_id, client } => {
3556 client
3557 .request(proto::Stage {
3558 project_id: project_id.0,
3559 repository_id: id.to_proto(),
3560 paths: entries
3561 .into_iter()
3562 .map(|repo_path| repo_path.as_ref().to_proto())
3563 .collect(),
3564 })
3565 .await
3566 .context("sending stage request")?;
3567
3568 Ok(())
3569 }
3570 }
3571 })
3572 })?
3573 .await??;
3574
3575 Ok(())
3576 })
3577 }
3578
3579 pub fn unstage_entries(
3580 &self,
3581 entries: Vec<RepoPath>,
3582 cx: &mut Context<Self>,
3583 ) -> Task<anyhow::Result<()>> {
3584 if entries.is_empty() {
3585 return Task::ready(Ok(()));
3586 }
3587 let id = self.id;
3588
3589 let mut save_futures = Vec::new();
3590 if let Some(buffer_store) = self.buffer_store(cx) {
3591 buffer_store.update(cx, |buffer_store, cx| {
3592 for path in &entries {
3593 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3594 continue;
3595 };
3596 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3597 && buffer
3598 .read(cx)
3599 .file()
3600 .map_or(false, |file| file.disk_state().exists())
3601 {
3602 save_futures.push(buffer_store.save_buffer(buffer, cx));
3603 }
3604 }
3605 })
3606 }
3607
3608 cx.spawn(async move |this, cx| {
3609 for save_future in save_futures {
3610 save_future.await?;
3611 }
3612
3613 this.update(cx, |this, _| {
3614 this.send_job(None, move |git_repo, _cx| async move {
3615 match git_repo {
3616 RepositoryState::Local {
3617 backend,
3618 environment,
3619 ..
3620 } => backend.unstage_paths(entries, environment).await,
3621 RepositoryState::Remote { project_id, client } => {
3622 client
3623 .request(proto::Unstage {
3624 project_id: project_id.0,
3625 repository_id: id.to_proto(),
3626 paths: entries
3627 .into_iter()
3628 .map(|repo_path| repo_path.as_ref().to_proto())
3629 .collect(),
3630 })
3631 .await
3632 .context("sending unstage request")?;
3633
3634 Ok(())
3635 }
3636 }
3637 })
3638 })?
3639 .await??;
3640
3641 Ok(())
3642 })
3643 }
3644
3645 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3646 let to_stage = self
3647 .cached_status()
3648 .filter(|entry| !entry.status.staging().is_fully_staged())
3649 .map(|entry| entry.repo_path.clone())
3650 .collect();
3651 self.stage_entries(to_stage, cx)
3652 }
3653
3654 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3655 let to_unstage = self
3656 .cached_status()
3657 .filter(|entry| entry.status.staging().has_staged())
3658 .map(|entry| entry.repo_path.clone())
3659 .collect();
3660 self.unstage_entries(to_unstage, cx)
3661 }
3662
3663 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3664 let to_stash = self
3665 .cached_status()
3666 .map(|entry| entry.repo_path.clone())
3667 .collect();
3668
3669 self.stash_entries(to_stash, cx)
3670 }
3671
3672 pub fn stash_entries(
3673 &mut self,
3674 entries: Vec<RepoPath>,
3675 cx: &mut Context<Self>,
3676 ) -> Task<anyhow::Result<()>> {
3677 let id = self.id;
3678
3679 cx.spawn(async move |this, cx| {
3680 this.update(cx, |this, _| {
3681 this.send_job(None, move |git_repo, _cx| async move {
3682 match git_repo {
3683 RepositoryState::Local {
3684 backend,
3685 environment,
3686 ..
3687 } => backend.stash_paths(entries, environment).await,
3688 RepositoryState::Remote { project_id, client } => {
3689 client
3690 .request(proto::Stash {
3691 project_id: project_id.0,
3692 repository_id: id.to_proto(),
3693 paths: entries
3694 .into_iter()
3695 .map(|repo_path| repo_path.as_ref().to_proto())
3696 .collect(),
3697 })
3698 .await
3699 .context("sending stash request")?;
3700 Ok(())
3701 }
3702 }
3703 })
3704 })?
3705 .await??;
3706 Ok(())
3707 })
3708 }
3709
3710 pub fn stash_pop(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3711 let id = self.id;
3712 cx.spawn(async move |this, cx| {
3713 this.update(cx, |this, _| {
3714 this.send_job(None, move |git_repo, _cx| async move {
3715 match git_repo {
3716 RepositoryState::Local {
3717 backend,
3718 environment,
3719 ..
3720 } => backend.stash_pop(environment).await,
3721 RepositoryState::Remote { project_id, client } => {
3722 client
3723 .request(proto::StashPop {
3724 project_id: project_id.0,
3725 repository_id: id.to_proto(),
3726 })
3727 .await
3728 .context("sending stash pop request")?;
3729 Ok(())
3730 }
3731 }
3732 })
3733 })?
3734 .await??;
3735 Ok(())
3736 })
3737 }
3738
3739 pub fn commit(
3740 &mut self,
3741 message: SharedString,
3742 name_and_email: Option<(SharedString, SharedString)>,
3743 options: CommitOptions,
3744 _cx: &mut App,
3745 ) -> oneshot::Receiver<Result<()>> {
3746 let id = self.id;
3747
3748 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3749 match git_repo {
3750 RepositoryState::Local {
3751 backend,
3752 environment,
3753 ..
3754 } => {
3755 backend
3756 .commit(message, name_and_email, options, environment)
3757 .await
3758 }
3759 RepositoryState::Remote { project_id, client } => {
3760 let (name, email) = name_and_email.unzip();
3761 client
3762 .request(proto::Commit {
3763 project_id: project_id.0,
3764 repository_id: id.to_proto(),
3765 message: String::from(message),
3766 name: name.map(String::from),
3767 email: email.map(String::from),
3768 options: Some(proto::commit::CommitOptions {
3769 amend: options.amend,
3770 signoff: options.signoff,
3771 }),
3772 })
3773 .await
3774 .context("sending commit request")?;
3775
3776 Ok(())
3777 }
3778 }
3779 })
3780 }
3781
3782 pub fn fetch(
3783 &mut self,
3784 fetch_options: FetchOptions,
3785 askpass: AskPassDelegate,
3786 _cx: &mut App,
3787 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3788 let askpass_delegates = self.askpass_delegates.clone();
3789 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3790 let id = self.id;
3791
3792 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3793 match git_repo {
3794 RepositoryState::Local {
3795 backend,
3796 environment,
3797 ..
3798 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3799 RepositoryState::Remote { project_id, client } => {
3800 askpass_delegates.lock().insert(askpass_id, askpass);
3801 let _defer = util::defer(|| {
3802 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3803 debug_assert!(askpass_delegate.is_some());
3804 });
3805
3806 let response = client
3807 .request(proto::Fetch {
3808 project_id: project_id.0,
3809 repository_id: id.to_proto(),
3810 askpass_id,
3811 remote: fetch_options.to_proto(),
3812 })
3813 .await
3814 .context("sending fetch request")?;
3815
3816 Ok(RemoteCommandOutput {
3817 stdout: response.stdout,
3818 stderr: response.stderr,
3819 })
3820 }
3821 }
3822 })
3823 }
3824
3825 pub fn push(
3826 &mut self,
3827 branch: SharedString,
3828 remote: SharedString,
3829 options: Option<PushOptions>,
3830 askpass: AskPassDelegate,
3831 cx: &mut Context<Self>,
3832 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3833 let askpass_delegates = self.askpass_delegates.clone();
3834 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3835 let id = self.id;
3836
3837 let args = options
3838 .map(|option| match option {
3839 PushOptions::SetUpstream => " --set-upstream",
3840 PushOptions::Force => " --force-with-lease",
3841 })
3842 .unwrap_or("");
3843
3844 let updates_tx = self
3845 .git_store()
3846 .and_then(|git_store| match &git_store.read(cx).state {
3847 GitStoreState::Local { downstream, .. } => downstream
3848 .as_ref()
3849 .map(|downstream| downstream.updates_tx.clone()),
3850 _ => None,
3851 });
3852
3853 let this = cx.weak_entity();
3854 self.send_job(
3855 Some(format!("git push {} {} {}", args, branch, remote).into()),
3856 move |git_repo, mut cx| async move {
3857 match git_repo {
3858 RepositoryState::Local {
3859 backend,
3860 environment,
3861 ..
3862 } => {
3863 let result = backend
3864 .push(
3865 branch.to_string(),
3866 remote.to_string(),
3867 options,
3868 askpass,
3869 environment.clone(),
3870 cx.clone(),
3871 )
3872 .await;
3873 if result.is_ok() {
3874 let branches = backend.branches().await?;
3875 let branch = branches.into_iter().find(|branch| branch.is_head);
3876 log::info!("head branch after scan is {branch:?}");
3877 let snapshot = this.update(&mut cx, |this, cx| {
3878 this.snapshot.branch = branch;
3879 let snapshot = this.snapshot.clone();
3880 cx.emit(RepositoryEvent::Updated {
3881 full_scan: false,
3882 new_instance: false,
3883 });
3884 snapshot
3885 })?;
3886 if let Some(updates_tx) = updates_tx {
3887 updates_tx
3888 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3889 .ok();
3890 }
3891 }
3892 result
3893 }
3894 RepositoryState::Remote { project_id, client } => {
3895 askpass_delegates.lock().insert(askpass_id, askpass);
3896 let _defer = util::defer(|| {
3897 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3898 debug_assert!(askpass_delegate.is_some());
3899 });
3900 let response = client
3901 .request(proto::Push {
3902 project_id: project_id.0,
3903 repository_id: id.to_proto(),
3904 askpass_id,
3905 branch_name: branch.to_string(),
3906 remote_name: remote.to_string(),
3907 options: options.map(|options| match options {
3908 PushOptions::Force => proto::push::PushOptions::Force,
3909 PushOptions::SetUpstream => {
3910 proto::push::PushOptions::SetUpstream
3911 }
3912 }
3913 as i32),
3914 })
3915 .await
3916 .context("sending push request")?;
3917
3918 Ok(RemoteCommandOutput {
3919 stdout: response.stdout,
3920 stderr: response.stderr,
3921 })
3922 }
3923 }
3924 },
3925 )
3926 }
3927
3928 pub fn pull(
3929 &mut self,
3930 branch: SharedString,
3931 remote: SharedString,
3932 askpass: AskPassDelegate,
3933 _cx: &mut App,
3934 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3935 let askpass_delegates = self.askpass_delegates.clone();
3936 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3937 let id = self.id;
3938
3939 self.send_job(
3940 Some(format!("git pull {} {}", remote, branch).into()),
3941 move |git_repo, cx| async move {
3942 match git_repo {
3943 RepositoryState::Local {
3944 backend,
3945 environment,
3946 ..
3947 } => {
3948 backend
3949 .pull(
3950 branch.to_string(),
3951 remote.to_string(),
3952 askpass,
3953 environment.clone(),
3954 cx,
3955 )
3956 .await
3957 }
3958 RepositoryState::Remote { project_id, client } => {
3959 askpass_delegates.lock().insert(askpass_id, askpass);
3960 let _defer = util::defer(|| {
3961 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3962 debug_assert!(askpass_delegate.is_some());
3963 });
3964 let response = client
3965 .request(proto::Pull {
3966 project_id: project_id.0,
3967 repository_id: id.to_proto(),
3968 askpass_id,
3969 branch_name: branch.to_string(),
3970 remote_name: remote.to_string(),
3971 })
3972 .await
3973 .context("sending pull request")?;
3974
3975 Ok(RemoteCommandOutput {
3976 stdout: response.stdout,
3977 stderr: response.stderr,
3978 })
3979 }
3980 }
3981 },
3982 )
3983 }
3984
3985 fn spawn_set_index_text_job(
3986 &mut self,
3987 path: RepoPath,
3988 content: Option<String>,
3989 hunk_staging_operation_count: Option<usize>,
3990 cx: &mut Context<Self>,
3991 ) -> oneshot::Receiver<anyhow::Result<()>> {
3992 let id = self.id;
3993 let this = cx.weak_entity();
3994 let git_store = self.git_store.clone();
3995 self.send_keyed_job(
3996 Some(GitJobKey::WriteIndex(path.clone())),
3997 None,
3998 move |git_repo, mut cx| async move {
3999 log::debug!("start updating index text for buffer {}", path.display());
4000 match git_repo {
4001 RepositoryState::Local {
4002 backend,
4003 environment,
4004 ..
4005 } => {
4006 backend
4007 .set_index_text(path.clone(), content, environment.clone())
4008 .await?;
4009 }
4010 RepositoryState::Remote { project_id, client } => {
4011 client
4012 .request(proto::SetIndexText {
4013 project_id: project_id.0,
4014 repository_id: id.to_proto(),
4015 path: path.as_ref().to_proto(),
4016 text: content,
4017 })
4018 .await?;
4019 }
4020 }
4021 log::debug!("finish updating index text for buffer {}", path.display());
4022
4023 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4024 let project_path = this
4025 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4026 .ok()
4027 .flatten();
4028 git_store.update(&mut cx, |git_store, cx| {
4029 let buffer_id = git_store
4030 .buffer_store
4031 .read(cx)
4032 .get_by_path(&project_path?)?
4033 .read(cx)
4034 .remote_id();
4035 let diff_state = git_store.diffs.get(&buffer_id)?;
4036 diff_state.update(cx, |diff_state, _| {
4037 diff_state.hunk_staging_operation_count_as_of_write =
4038 hunk_staging_operation_count;
4039 });
4040 Some(())
4041 })?;
4042 }
4043 Ok(())
4044 },
4045 )
4046 }
4047
4048 pub fn get_remotes(
4049 &mut self,
4050 branch_name: Option<String>,
4051 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4052 let id = self.id;
4053 self.send_job(None, move |repo, _cx| async move {
4054 match repo {
4055 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4056 RepositoryState::Remote { project_id, client } => {
4057 let response = client
4058 .request(proto::GetRemotes {
4059 project_id: project_id.0,
4060 repository_id: id.to_proto(),
4061 branch_name,
4062 })
4063 .await?;
4064
4065 let remotes = response
4066 .remotes
4067 .into_iter()
4068 .map(|remotes| git::repository::Remote {
4069 name: remotes.name.into(),
4070 })
4071 .collect();
4072
4073 Ok(remotes)
4074 }
4075 }
4076 })
4077 }
4078
4079 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4080 let id = self.id;
4081 self.send_job(None, move |repo, _| async move {
4082 match repo {
4083 RepositoryState::Local { backend, .. } => backend.branches().await,
4084 RepositoryState::Remote { project_id, client } => {
4085 let response = client
4086 .request(proto::GitGetBranches {
4087 project_id: project_id.0,
4088 repository_id: id.to_proto(),
4089 })
4090 .await?;
4091
4092 let branches = response
4093 .branches
4094 .into_iter()
4095 .map(|branch| proto_to_branch(&branch))
4096 .collect();
4097
4098 Ok(branches)
4099 }
4100 }
4101 })
4102 }
4103
4104 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4105 let id = self.id;
4106 self.send_job(None, move |repo, _| async move {
4107 match repo {
4108 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4109 RepositoryState::Remote { project_id, client } => {
4110 let response = client
4111 .request(proto::GetDefaultBranch {
4112 project_id: project_id.0,
4113 repository_id: id.to_proto(),
4114 })
4115 .await?;
4116
4117 anyhow::Ok(response.branch.map(SharedString::from))
4118 }
4119 }
4120 })
4121 }
4122
4123 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4124 let id = self.id;
4125 self.send_job(None, move |repo, _cx| async move {
4126 match repo {
4127 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4128 RepositoryState::Remote { project_id, client } => {
4129 let response = client
4130 .request(proto::GitDiff {
4131 project_id: project_id.0,
4132 repository_id: id.to_proto(),
4133 diff_type: match diff_type {
4134 DiffType::HeadToIndex => {
4135 proto::git_diff::DiffType::HeadToIndex.into()
4136 }
4137 DiffType::HeadToWorktree => {
4138 proto::git_diff::DiffType::HeadToWorktree.into()
4139 }
4140 },
4141 })
4142 .await?;
4143
4144 Ok(response.diff)
4145 }
4146 }
4147 })
4148 }
4149
4150 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4151 let id = self.id;
4152 self.send_job(
4153 Some(format!("git switch -c {branch_name}").into()),
4154 move |repo, _cx| async move {
4155 match repo {
4156 RepositoryState::Local { backend, .. } => {
4157 backend.create_branch(branch_name).await
4158 }
4159 RepositoryState::Remote { project_id, client } => {
4160 client
4161 .request(proto::GitCreateBranch {
4162 project_id: project_id.0,
4163 repository_id: id.to_proto(),
4164 branch_name,
4165 })
4166 .await?;
4167
4168 Ok(())
4169 }
4170 }
4171 },
4172 )
4173 }
4174
4175 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4176 let id = self.id;
4177 self.send_job(
4178 Some(format!("git switch {branch_name}").into()),
4179 move |repo, _cx| async move {
4180 match repo {
4181 RepositoryState::Local { backend, .. } => {
4182 backend.change_branch(branch_name).await
4183 }
4184 RepositoryState::Remote { project_id, client } => {
4185 client
4186 .request(proto::GitChangeBranch {
4187 project_id: project_id.0,
4188 repository_id: id.to_proto(),
4189 branch_name,
4190 })
4191 .await?;
4192
4193 Ok(())
4194 }
4195 }
4196 },
4197 )
4198 }
4199
4200 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4201 let id = self.id;
4202 self.send_job(None, move |repo, _cx| async move {
4203 match repo {
4204 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4205 RepositoryState::Remote { project_id, client } => {
4206 let response = client
4207 .request(proto::CheckForPushedCommits {
4208 project_id: project_id.0,
4209 repository_id: id.to_proto(),
4210 })
4211 .await?;
4212
4213 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4214
4215 Ok(branches)
4216 }
4217 }
4218 })
4219 }
4220
4221 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4222 self.send_job(None, |repo, _cx| async move {
4223 match repo {
4224 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4225 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4226 }
4227 })
4228 }
4229
4230 pub fn restore_checkpoint(
4231 &mut self,
4232 checkpoint: GitRepositoryCheckpoint,
4233 ) -> oneshot::Receiver<Result<()>> {
4234 self.send_job(None, move |repo, _cx| async move {
4235 match repo {
4236 RepositoryState::Local { backend, .. } => {
4237 backend.restore_checkpoint(checkpoint).await
4238 }
4239 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4240 }
4241 })
4242 }
4243
4244 pub(crate) fn apply_remote_update(
4245 &mut self,
4246 update: proto::UpdateRepository,
4247 is_new: bool,
4248 cx: &mut Context<Self>,
4249 ) -> Result<()> {
4250 let conflicted_paths = TreeSet::from_ordered_entries(
4251 update
4252 .current_merge_conflicts
4253 .into_iter()
4254 .map(|path| RepoPath(Path::new(&path).into())),
4255 );
4256 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4257 self.snapshot.head_commit = update
4258 .head_commit_details
4259 .as_ref()
4260 .map(proto_to_commit_details);
4261
4262 self.snapshot.merge.conflicted_paths = conflicted_paths;
4263 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4264
4265 let edits = update
4266 .removed_statuses
4267 .into_iter()
4268 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4269 .chain(
4270 update
4271 .updated_statuses
4272 .into_iter()
4273 .filter_map(|updated_status| {
4274 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4275 }),
4276 )
4277 .collect::<Vec<_>>();
4278 self.snapshot.statuses_by_path.edit(edits, &());
4279 if update.is_last_update {
4280 self.snapshot.scan_id = update.scan_id;
4281 }
4282 cx.emit(RepositoryEvent::Updated {
4283 full_scan: true,
4284 new_instance: is_new,
4285 });
4286 Ok(())
4287 }
4288
4289 pub fn compare_checkpoints(
4290 &mut self,
4291 left: GitRepositoryCheckpoint,
4292 right: GitRepositoryCheckpoint,
4293 ) -> oneshot::Receiver<Result<bool>> {
4294 self.send_job(None, move |repo, _cx| async move {
4295 match repo {
4296 RepositoryState::Local { backend, .. } => {
4297 backend.compare_checkpoints(left, right).await
4298 }
4299 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4300 }
4301 })
4302 }
4303
4304 pub fn diff_checkpoints(
4305 &mut self,
4306 base_checkpoint: GitRepositoryCheckpoint,
4307 target_checkpoint: GitRepositoryCheckpoint,
4308 ) -> oneshot::Receiver<Result<String>> {
4309 self.send_job(None, move |repo, _cx| async move {
4310 match repo {
4311 RepositoryState::Local { backend, .. } => {
4312 backend
4313 .diff_checkpoints(base_checkpoint, target_checkpoint)
4314 .await
4315 }
4316 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4317 }
4318 })
4319 }
4320
4321 fn schedule_scan(
4322 &mut self,
4323 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4324 cx: &mut Context<Self>,
4325 ) {
4326 let this = cx.weak_entity();
4327 let _ = self.send_keyed_job(
4328 Some(GitJobKey::ReloadGitState),
4329 None,
4330 |state, mut cx| async move {
4331 log::debug!("run scheduled git status scan");
4332
4333 let Some(this) = this.upgrade() else {
4334 return Ok(());
4335 };
4336 let RepositoryState::Local { backend, .. } = state else {
4337 bail!("not a local repository")
4338 };
4339 let (snapshot, events) = this
4340 .update(&mut cx, |this, _| {
4341 this.paths_needing_status_update.clear();
4342 compute_snapshot(
4343 this.id,
4344 this.work_directory_abs_path.clone(),
4345 this.snapshot.clone(),
4346 backend.clone(),
4347 )
4348 })?
4349 .await?;
4350 this.update(&mut cx, |this, cx| {
4351 this.snapshot = snapshot.clone();
4352 for event in events {
4353 cx.emit(event);
4354 }
4355 })?;
4356 if let Some(updates_tx) = updates_tx {
4357 updates_tx
4358 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4359 .ok();
4360 }
4361 Ok(())
4362 },
4363 );
4364 }
4365
4366 fn spawn_local_git_worker(
4367 work_directory_abs_path: Arc<Path>,
4368 dot_git_abs_path: Arc<Path>,
4369 _repository_dir_abs_path: Arc<Path>,
4370 _common_dir_abs_path: Arc<Path>,
4371 project_environment: WeakEntity<ProjectEnvironment>,
4372 fs: Arc<dyn Fs>,
4373 cx: &mut Context<Self>,
4374 ) -> mpsc::UnboundedSender<GitJob> {
4375 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4376
4377 cx.spawn(async move |_, cx| {
4378 let environment = project_environment
4379 .upgrade()
4380 .context("missing project environment")?
4381 .update(cx, |project_environment, cx| {
4382 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4383 })?
4384 .await
4385 .unwrap_or_else(|| {
4386 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4387 HashMap::default()
4388 });
4389 let backend = cx
4390 .background_spawn(async move {
4391 fs.open_repo(&dot_git_abs_path)
4392 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4393 })
4394 .await?;
4395
4396 if let Some(git_hosting_provider_registry) =
4397 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4398 {
4399 git_hosting_providers::register_additional_providers(
4400 git_hosting_provider_registry,
4401 backend.clone(),
4402 );
4403 }
4404
4405 let state = RepositoryState::Local {
4406 backend,
4407 environment: Arc::new(environment),
4408 };
4409 let mut jobs = VecDeque::new();
4410 loop {
4411 while let Ok(Some(next_job)) = job_rx.try_next() {
4412 jobs.push_back(next_job);
4413 }
4414
4415 if let Some(job) = jobs.pop_front() {
4416 if let Some(current_key) = &job.key
4417 && jobs
4418 .iter()
4419 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4420 {
4421 continue;
4422 }
4423 (job.job)(state.clone(), cx).await;
4424 } else if let Some(job) = job_rx.next().await {
4425 jobs.push_back(job);
4426 } else {
4427 break;
4428 }
4429 }
4430 anyhow::Ok(())
4431 })
4432 .detach_and_log_err(cx);
4433
4434 job_tx
4435 }
4436
4437 fn spawn_remote_git_worker(
4438 project_id: ProjectId,
4439 client: AnyProtoClient,
4440 cx: &mut Context<Self>,
4441 ) -> mpsc::UnboundedSender<GitJob> {
4442 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4443
4444 cx.spawn(async move |_, cx| {
4445 let state = RepositoryState::Remote { project_id, client };
4446 let mut jobs = VecDeque::new();
4447 loop {
4448 while let Ok(Some(next_job)) = job_rx.try_next() {
4449 jobs.push_back(next_job);
4450 }
4451
4452 if let Some(job) = jobs.pop_front() {
4453 if let Some(current_key) = &job.key
4454 && jobs
4455 .iter()
4456 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4457 {
4458 continue;
4459 }
4460 (job.job)(state.clone(), cx).await;
4461 } else if let Some(job) = job_rx.next().await {
4462 jobs.push_back(job);
4463 } else {
4464 break;
4465 }
4466 }
4467 anyhow::Ok(())
4468 })
4469 .detach_and_log_err(cx);
4470
4471 job_tx
4472 }
4473
4474 fn load_staged_text(
4475 &mut self,
4476 buffer_id: BufferId,
4477 repo_path: RepoPath,
4478 cx: &App,
4479 ) -> Task<Result<Option<String>>> {
4480 let rx = self.send_job(None, move |state, _| async move {
4481 match state {
4482 RepositoryState::Local { backend, .. } => {
4483 anyhow::Ok(backend.load_index_text(repo_path).await)
4484 }
4485 RepositoryState::Remote { project_id, client } => {
4486 let response = client
4487 .request(proto::OpenUnstagedDiff {
4488 project_id: project_id.to_proto(),
4489 buffer_id: buffer_id.to_proto(),
4490 })
4491 .await?;
4492 Ok(response.staged_text)
4493 }
4494 }
4495 });
4496 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4497 }
4498
4499 fn load_committed_text(
4500 &mut self,
4501 buffer_id: BufferId,
4502 repo_path: RepoPath,
4503 cx: &App,
4504 ) -> Task<Result<DiffBasesChange>> {
4505 let rx = self.send_job(None, move |state, _| async move {
4506 match state {
4507 RepositoryState::Local { backend, .. } => {
4508 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4509 let staged_text = backend.load_index_text(repo_path).await;
4510 let diff_bases_change = if committed_text == staged_text {
4511 DiffBasesChange::SetBoth(committed_text)
4512 } else {
4513 DiffBasesChange::SetEach {
4514 index: staged_text,
4515 head: committed_text,
4516 }
4517 };
4518 anyhow::Ok(diff_bases_change)
4519 }
4520 RepositoryState::Remote { project_id, client } => {
4521 use proto::open_uncommitted_diff_response::Mode;
4522
4523 let response = client
4524 .request(proto::OpenUncommittedDiff {
4525 project_id: project_id.to_proto(),
4526 buffer_id: buffer_id.to_proto(),
4527 })
4528 .await?;
4529 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4530 let bases = match mode {
4531 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4532 Mode::IndexAndHead => DiffBasesChange::SetEach {
4533 head: response.committed_text,
4534 index: response.staged_text,
4535 },
4536 };
4537 Ok(bases)
4538 }
4539 }
4540 });
4541
4542 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4543 }
4544
4545 fn paths_changed(
4546 &mut self,
4547 paths: Vec<RepoPath>,
4548 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4549 cx: &mut Context<Self>,
4550 ) {
4551 self.paths_needing_status_update.extend(paths);
4552
4553 let this = cx.weak_entity();
4554 let _ = self.send_keyed_job(
4555 Some(GitJobKey::RefreshStatuses),
4556 None,
4557 |state, mut cx| async move {
4558 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4559 (
4560 this.snapshot.clone(),
4561 mem::take(&mut this.paths_needing_status_update),
4562 )
4563 })?;
4564 let RepositoryState::Local { backend, .. } = state else {
4565 bail!("not a local repository")
4566 };
4567
4568 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4569 if paths.is_empty() {
4570 return Ok(());
4571 }
4572 let statuses = backend.status(&paths).await?;
4573
4574 let changed_path_statuses = cx
4575 .background_spawn(async move {
4576 let mut changed_path_statuses = Vec::new();
4577 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4578 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4579
4580 for (repo_path, status) in &*statuses.entries {
4581 changed_paths.remove(repo_path);
4582 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4583 && cursor.item().is_some_and(|entry| entry.status == *status)
4584 {
4585 continue;
4586 }
4587
4588 changed_path_statuses.push(Edit::Insert(StatusEntry {
4589 repo_path: repo_path.clone(),
4590 status: *status,
4591 }));
4592 }
4593 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4594 for path in changed_paths.into_iter() {
4595 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4596 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4597 }
4598 }
4599 changed_path_statuses
4600 })
4601 .await;
4602
4603 this.update(&mut cx, |this, cx| {
4604 if !changed_path_statuses.is_empty() {
4605 this.snapshot
4606 .statuses_by_path
4607 .edit(changed_path_statuses, &());
4608 this.snapshot.scan_id += 1;
4609 if let Some(updates_tx) = updates_tx {
4610 updates_tx
4611 .unbounded_send(DownstreamUpdate::UpdateRepository(
4612 this.snapshot.clone(),
4613 ))
4614 .ok();
4615 }
4616 }
4617 cx.emit(RepositoryEvent::Updated {
4618 full_scan: false,
4619 new_instance: false,
4620 });
4621 })
4622 },
4623 );
4624 }
4625
4626 /// currently running git command and when it started
4627 pub fn current_job(&self) -> Option<JobInfo> {
4628 self.active_jobs.values().next().cloned()
4629 }
4630
4631 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4632 self.send_job(None, |_, _| async {})
4633 }
4634}
4635
4636fn get_permalink_in_rust_registry_src(
4637 provider_registry: Arc<GitHostingProviderRegistry>,
4638 path: PathBuf,
4639 selection: Range<u32>,
4640) -> Result<url::Url> {
4641 #[derive(Deserialize)]
4642 struct CargoVcsGit {
4643 sha1: String,
4644 }
4645
4646 #[derive(Deserialize)]
4647 struct CargoVcsInfo {
4648 git: CargoVcsGit,
4649 path_in_vcs: String,
4650 }
4651
4652 #[derive(Deserialize)]
4653 struct CargoPackage {
4654 repository: String,
4655 }
4656
4657 #[derive(Deserialize)]
4658 struct CargoToml {
4659 package: CargoPackage,
4660 }
4661
4662 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4663 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4664 Some((dir, json))
4665 }) else {
4666 bail!("No .cargo_vcs_info.json found in parent directories")
4667 };
4668 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4669 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4670 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4671 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4672 .context("parsing package.repository field of manifest")?;
4673 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4674 let permalink = provider.build_permalink(
4675 remote,
4676 BuildPermalinkParams {
4677 sha: &cargo_vcs_info.git.sha1,
4678 path: &path.to_string_lossy(),
4679 selection: Some(selection),
4680 },
4681 );
4682 Ok(permalink)
4683}
4684
4685fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4686 let Some(blame) = blame else {
4687 return proto::BlameBufferResponse {
4688 blame_response: None,
4689 };
4690 };
4691
4692 let entries = blame
4693 .entries
4694 .into_iter()
4695 .map(|entry| proto::BlameEntry {
4696 sha: entry.sha.as_bytes().into(),
4697 start_line: entry.range.start,
4698 end_line: entry.range.end,
4699 original_line_number: entry.original_line_number,
4700 author: entry.author,
4701 author_mail: entry.author_mail,
4702 author_time: entry.author_time,
4703 author_tz: entry.author_tz,
4704 committer: entry.committer_name,
4705 committer_mail: entry.committer_email,
4706 committer_time: entry.committer_time,
4707 committer_tz: entry.committer_tz,
4708 summary: entry.summary,
4709 previous: entry.previous,
4710 filename: entry.filename,
4711 })
4712 .collect::<Vec<_>>();
4713
4714 let messages = blame
4715 .messages
4716 .into_iter()
4717 .map(|(oid, message)| proto::CommitMessage {
4718 oid: oid.as_bytes().into(),
4719 message,
4720 })
4721 .collect::<Vec<_>>();
4722
4723 proto::BlameBufferResponse {
4724 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4725 entries,
4726 messages,
4727 remote_url: blame.remote_url,
4728 }),
4729 }
4730}
4731
4732fn deserialize_blame_buffer_response(
4733 response: proto::BlameBufferResponse,
4734) -> Option<git::blame::Blame> {
4735 let response = response.blame_response?;
4736 let entries = response
4737 .entries
4738 .into_iter()
4739 .filter_map(|entry| {
4740 Some(git::blame::BlameEntry {
4741 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4742 range: entry.start_line..entry.end_line,
4743 original_line_number: entry.original_line_number,
4744 committer_name: entry.committer,
4745 committer_time: entry.committer_time,
4746 committer_tz: entry.committer_tz,
4747 committer_email: entry.committer_mail,
4748 author: entry.author,
4749 author_mail: entry.author_mail,
4750 author_time: entry.author_time,
4751 author_tz: entry.author_tz,
4752 summary: entry.summary,
4753 previous: entry.previous,
4754 filename: entry.filename,
4755 })
4756 })
4757 .collect::<Vec<_>>();
4758
4759 let messages = response
4760 .messages
4761 .into_iter()
4762 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4763 .collect::<HashMap<_, _>>();
4764
4765 Some(Blame {
4766 entries,
4767 messages,
4768 remote_url: response.remote_url,
4769 })
4770}
4771
4772fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4773 proto::Branch {
4774 is_head: branch.is_head,
4775 ref_name: branch.ref_name.to_string(),
4776 unix_timestamp: branch
4777 .most_recent_commit
4778 .as_ref()
4779 .map(|commit| commit.commit_timestamp as u64),
4780 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4781 ref_name: upstream.ref_name.to_string(),
4782 tracking: upstream
4783 .tracking
4784 .status()
4785 .map(|upstream| proto::UpstreamTracking {
4786 ahead: upstream.ahead as u64,
4787 behind: upstream.behind as u64,
4788 }),
4789 }),
4790 most_recent_commit: branch
4791 .most_recent_commit
4792 .as_ref()
4793 .map(|commit| proto::CommitSummary {
4794 sha: commit.sha.to_string(),
4795 subject: commit.subject.to_string(),
4796 commit_timestamp: commit.commit_timestamp,
4797 }),
4798 }
4799}
4800
4801fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4802 git::repository::Branch {
4803 is_head: proto.is_head,
4804 ref_name: proto.ref_name.clone().into(),
4805 upstream: proto
4806 .upstream
4807 .as_ref()
4808 .map(|upstream| git::repository::Upstream {
4809 ref_name: upstream.ref_name.to_string().into(),
4810 tracking: upstream
4811 .tracking
4812 .as_ref()
4813 .map(|tracking| {
4814 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4815 ahead: tracking.ahead as u32,
4816 behind: tracking.behind as u32,
4817 })
4818 })
4819 .unwrap_or(git::repository::UpstreamTracking::Gone),
4820 }),
4821 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4822 git::repository::CommitSummary {
4823 sha: commit.sha.to_string().into(),
4824 subject: commit.subject.to_string().into(),
4825 commit_timestamp: commit.commit_timestamp,
4826 has_parent: true,
4827 }
4828 }),
4829 }
4830}
4831
4832fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4833 proto::GitCommitDetails {
4834 sha: commit.sha.to_string(),
4835 message: commit.message.to_string(),
4836 commit_timestamp: commit.commit_timestamp,
4837 author_email: commit.author_email.to_string(),
4838 author_name: commit.author_name.to_string(),
4839 }
4840}
4841
4842fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4843 CommitDetails {
4844 sha: proto.sha.clone().into(),
4845 message: proto.message.clone().into(),
4846 commit_timestamp: proto.commit_timestamp,
4847 author_email: proto.author_email.clone().into(),
4848 author_name: proto.author_name.clone().into(),
4849 }
4850}
4851
4852async fn compute_snapshot(
4853 id: RepositoryId,
4854 work_directory_abs_path: Arc<Path>,
4855 prev_snapshot: RepositorySnapshot,
4856 backend: Arc<dyn GitRepository>,
4857) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4858 let mut events = Vec::new();
4859 let branches = backend.branches().await?;
4860 let branch = branches.into_iter().find(|branch| branch.is_head);
4861 let statuses = backend
4862 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
4863 .await?;
4864 let statuses_by_path = SumTree::from_iter(
4865 statuses
4866 .entries
4867 .iter()
4868 .map(|(repo_path, status)| StatusEntry {
4869 repo_path: repo_path.clone(),
4870 status: *status,
4871 }),
4872 &(),
4873 );
4874 let (merge_details, merge_heads_changed) =
4875 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
4876 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
4877
4878 if merge_heads_changed
4879 || branch != prev_snapshot.branch
4880 || statuses_by_path != prev_snapshot.statuses_by_path
4881 {
4882 events.push(RepositoryEvent::Updated {
4883 full_scan: true,
4884 new_instance: false,
4885 });
4886 }
4887
4888 // Cache merge conflict paths so they don't change from staging/unstaging,
4889 // until the merge heads change (at commit time, etc.).
4890 if merge_heads_changed {
4891 events.push(RepositoryEvent::MergeHeadsChanged);
4892 }
4893
4894 // Useful when branch is None in detached head state
4895 let head_commit = match backend.head_sha().await {
4896 Some(head_sha) => backend.show(head_sha).await.log_err(),
4897 None => None,
4898 };
4899
4900 // Used by edit prediction data collection
4901 let remote_origin_url = backend.remote_url("origin");
4902 let remote_upstream_url = backend.remote_url("upstream");
4903
4904 let snapshot = RepositorySnapshot {
4905 id,
4906 statuses_by_path,
4907 work_directory_abs_path,
4908 scan_id: prev_snapshot.scan_id + 1,
4909 branch,
4910 head_commit,
4911 merge: merge_details,
4912 remote_origin_url,
4913 remote_upstream_url,
4914 };
4915
4916 Ok((snapshot, events))
4917}
4918
4919fn status_from_proto(
4920 simple_status: i32,
4921 status: Option<proto::GitFileStatus>,
4922) -> anyhow::Result<FileStatus> {
4923 use proto::git_file_status::Variant;
4924
4925 let Some(variant) = status.and_then(|status| status.variant) else {
4926 let code = proto::GitStatus::from_i32(simple_status)
4927 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
4928 let result = match code {
4929 proto::GitStatus::Added => TrackedStatus {
4930 worktree_status: StatusCode::Added,
4931 index_status: StatusCode::Unmodified,
4932 }
4933 .into(),
4934 proto::GitStatus::Modified => TrackedStatus {
4935 worktree_status: StatusCode::Modified,
4936 index_status: StatusCode::Unmodified,
4937 }
4938 .into(),
4939 proto::GitStatus::Conflict => UnmergedStatus {
4940 first_head: UnmergedStatusCode::Updated,
4941 second_head: UnmergedStatusCode::Updated,
4942 }
4943 .into(),
4944 proto::GitStatus::Deleted => TrackedStatus {
4945 worktree_status: StatusCode::Deleted,
4946 index_status: StatusCode::Unmodified,
4947 }
4948 .into(),
4949 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
4950 };
4951 return Ok(result);
4952 };
4953
4954 let result = match variant {
4955 Variant::Untracked(_) => FileStatus::Untracked,
4956 Variant::Ignored(_) => FileStatus::Ignored,
4957 Variant::Unmerged(unmerged) => {
4958 let [first_head, second_head] =
4959 [unmerged.first_head, unmerged.second_head].map(|head| {
4960 let code = proto::GitStatus::from_i32(head)
4961 .with_context(|| format!("Invalid git status code: {head}"))?;
4962 let result = match code {
4963 proto::GitStatus::Added => UnmergedStatusCode::Added,
4964 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4965 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4966 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
4967 };
4968 Ok(result)
4969 });
4970 let [first_head, second_head] = [first_head?, second_head?];
4971 UnmergedStatus {
4972 first_head,
4973 second_head,
4974 }
4975 .into()
4976 }
4977 Variant::Tracked(tracked) => {
4978 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4979 .map(|status| {
4980 let code = proto::GitStatus::from_i32(status)
4981 .with_context(|| format!("Invalid git status code: {status}"))?;
4982 let result = match code {
4983 proto::GitStatus::Modified => StatusCode::Modified,
4984 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4985 proto::GitStatus::Added => StatusCode::Added,
4986 proto::GitStatus::Deleted => StatusCode::Deleted,
4987 proto::GitStatus::Renamed => StatusCode::Renamed,
4988 proto::GitStatus::Copied => StatusCode::Copied,
4989 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4990 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
4991 };
4992 Ok(result)
4993 });
4994 let [index_status, worktree_status] = [index_status?, worktree_status?];
4995 TrackedStatus {
4996 index_status,
4997 worktree_status,
4998 }
4999 .into()
5000 }
5001 };
5002 Ok(result)
5003}
5004
5005fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5006 use proto::git_file_status::{Tracked, Unmerged, Variant};
5007
5008 let variant = match status {
5009 FileStatus::Untracked => Variant::Untracked(Default::default()),
5010 FileStatus::Ignored => Variant::Ignored(Default::default()),
5011 FileStatus::Unmerged(UnmergedStatus {
5012 first_head,
5013 second_head,
5014 }) => Variant::Unmerged(Unmerged {
5015 first_head: unmerged_status_to_proto(first_head),
5016 second_head: unmerged_status_to_proto(second_head),
5017 }),
5018 FileStatus::Tracked(TrackedStatus {
5019 index_status,
5020 worktree_status,
5021 }) => Variant::Tracked(Tracked {
5022 index_status: tracked_status_to_proto(index_status),
5023 worktree_status: tracked_status_to_proto(worktree_status),
5024 }),
5025 };
5026 proto::GitFileStatus {
5027 variant: Some(variant),
5028 }
5029}
5030
5031fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5032 match code {
5033 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5034 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5035 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5036 }
5037}
5038
5039fn tracked_status_to_proto(code: StatusCode) -> i32 {
5040 match code {
5041 StatusCode::Added => proto::GitStatus::Added as _,
5042 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5043 StatusCode::Modified => proto::GitStatus::Modified as _,
5044 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5045 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5046 StatusCode::Copied => proto::GitStatus::Copied as _,
5047 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5048 }
5049}