1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 status::{
32 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
33 },
34};
35use gpui::{
36 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
37 WeakEntity,
38};
39use language::{
40 Buffer, BufferEvent, Language, LanguageRegistry,
41 proto::{deserialize_version, serialize_version},
42};
43use parking_lot::Mutex;
44use postage::stream::Stream as _;
45use rpc::{
46 AnyProtoClient, TypedEnvelope,
47 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
48};
49use serde::Deserialize;
50use std::{
51 cmp::Ordering,
52 collections::{BTreeSet, VecDeque},
53 future::Future,
54 mem,
55 ops::Range,
56 path::{Path, PathBuf},
57 sync::{
58 Arc,
59 atomic::{self, AtomicU64},
60 },
61 time::Instant,
62};
63use sum_tree::{Edit, SumTree, TreeSet};
64use text::{Bias, BufferId};
65use util::{ResultExt, debug_panic, post_inc};
66use worktree::{
67 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
68 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
69};
70
71pub struct GitStore {
72 state: GitStoreState,
73 buffer_store: Entity<BufferStore>,
74 worktree_store: Entity<WorktreeStore>,
75 repositories: HashMap<RepositoryId, Entity<Repository>>,
76 active_repo_id: Option<RepositoryId>,
77 #[allow(clippy::type_complexity)]
78 loading_diffs:
79 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
80 diffs: HashMap<BufferId, Entity<BufferGitState>>,
81 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
82 _subscriptions: Vec<Subscription>,
83}
84
85#[derive(Default)]
86struct SharedDiffs {
87 unstaged: Option<Entity<BufferDiff>>,
88 uncommitted: Option<Entity<BufferDiff>>,
89}
90
91struct BufferGitState {
92 unstaged_diff: Option<WeakEntity<BufferDiff>>,
93 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
94 conflict_set: Option<WeakEntity<ConflictSet>>,
95 recalculate_diff_task: Option<Task<Result<()>>>,
96 reparse_conflict_markers_task: Option<Task<Result<()>>>,
97 language: Option<Arc<Language>>,
98 language_registry: Option<Arc<LanguageRegistry>>,
99 conflict_updated_futures: Vec<oneshot::Sender<()>>,
100 recalculating_tx: postage::watch::Sender<bool>,
101
102 /// These operation counts are used to ensure that head and index text
103 /// values read from the git repository are up-to-date with any hunk staging
104 /// operations that have been performed on the BufferDiff.
105 ///
106 /// The operation count is incremented immediately when the user initiates a
107 /// hunk stage/unstage operation. Then, upon finishing writing the new index
108 /// text do disk, the `operation count as of write` is updated to reflect
109 /// the operation count that prompted the write.
110 hunk_staging_operation_count: usize,
111 hunk_staging_operation_count_as_of_write: usize,
112
113 head_text: Option<Arc<String>>,
114 index_text: Option<Arc<String>>,
115 head_changed: bool,
116 index_changed: bool,
117 language_changed: bool,
118}
119
120#[derive(Clone, Debug)]
121enum DiffBasesChange {
122 SetIndex(Option<String>),
123 SetHead(Option<String>),
124 SetEach {
125 index: Option<String>,
126 head: Option<String>,
127 },
128 SetBoth(Option<String>),
129}
130
131#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
132enum DiffKind {
133 Unstaged,
134 Uncommitted,
135}
136
137enum GitStoreState {
138 Local {
139 next_repository_id: Arc<AtomicU64>,
140 downstream: Option<LocalDownstreamState>,
141 project_environment: Entity<ProjectEnvironment>,
142 fs: Arc<dyn Fs>,
143 },
144 Ssh {
145 upstream_client: AnyProtoClient,
146 upstream_project_id: ProjectId,
147 downstream: Option<(AnyProtoClient, ProjectId)>,
148 },
149 Remote {
150 upstream_client: AnyProtoClient,
151 upstream_project_id: ProjectId,
152 },
153}
154
155enum DownstreamUpdate {
156 UpdateRepository(RepositorySnapshot),
157 RemoveRepository(RepositoryId),
158}
159
160struct LocalDownstreamState {
161 client: AnyProtoClient,
162 project_id: ProjectId,
163 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
164 _task: Task<Result<()>>,
165}
166
167#[derive(Clone, Debug)]
168pub struct GitStoreCheckpoint {
169 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
170}
171
172#[derive(Clone, Debug, PartialEq, Eq)]
173pub struct StatusEntry {
174 pub repo_path: RepoPath,
175 pub status: FileStatus,
176}
177
178impl StatusEntry {
179 fn to_proto(&self) -> proto::StatusEntry {
180 let simple_status = match self.status {
181 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
182 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
183 FileStatus::Tracked(TrackedStatus {
184 index_status,
185 worktree_status,
186 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
187 worktree_status
188 } else {
189 index_status
190 }),
191 };
192
193 proto::StatusEntry {
194 repo_path: self.repo_path.as_ref().to_proto(),
195 simple_status,
196 status: Some(status_to_proto(self.status)),
197 }
198 }
199}
200
201impl TryFrom<proto::StatusEntry> for StatusEntry {
202 type Error = anyhow::Error;
203
204 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
205 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
206 let status = status_from_proto(value.simple_status, value.status)?;
207 Ok(Self { repo_path, status })
208 }
209}
210
211impl sum_tree::Item for StatusEntry {
212 type Summary = PathSummary<GitSummary>;
213
214 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
215 PathSummary {
216 max_path: self.repo_path.0.clone(),
217 item_summary: self.status.summary(),
218 }
219 }
220}
221
222impl sum_tree::KeyedItem for StatusEntry {
223 type Key = PathKey;
224
225 fn key(&self) -> Self::Key {
226 PathKey(self.repo_path.0.clone())
227 }
228}
229
230#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
231pub struct RepositoryId(pub u64);
232
233#[derive(Clone, Debug, Default, PartialEq, Eq)]
234pub struct MergeDetails {
235 pub conflicted_paths: TreeSet<RepoPath>,
236 pub message: Option<SharedString>,
237 pub heads: Vec<Option<SharedString>>,
238}
239
240#[derive(Clone, Debug, PartialEq, Eq)]
241pub struct RepositorySnapshot {
242 pub id: RepositoryId,
243 pub statuses_by_path: SumTree<StatusEntry>,
244 pub work_directory_abs_path: Arc<Path>,
245 pub branch: Option<Branch>,
246 pub head_commit: Option<CommitDetails>,
247 pub scan_id: u64,
248 pub merge: MergeDetails,
249 pub remote_origin_url: Option<String>,
250 pub remote_upstream_url: Option<String>,
251}
252
253type JobId = u64;
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct JobInfo {
257 pub start: Instant,
258 pub message: SharedString,
259}
260
261pub struct Repository {
262 this: WeakEntity<Self>,
263 snapshot: RepositorySnapshot,
264 commit_message_buffer: Option<Entity<Buffer>>,
265 git_store: WeakEntity<GitStore>,
266 // For a local repository, holds paths that have had worktree events since the last status scan completed,
267 // and that should be examined during the next status scan.
268 paths_needing_status_update: BTreeSet<RepoPath>,
269 job_sender: mpsc::UnboundedSender<GitJob>,
270 active_jobs: HashMap<JobId, JobInfo>,
271 job_id: JobId,
272 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
273 latest_askpass_id: u64,
274}
275
276impl std::ops::Deref for Repository {
277 type Target = RepositorySnapshot;
278
279 fn deref(&self) -> &Self::Target {
280 &self.snapshot
281 }
282}
283
284#[derive(Clone)]
285pub enum RepositoryState {
286 Local {
287 backend: Arc<dyn GitRepository>,
288 environment: Arc<HashMap<String, String>>,
289 },
290 Remote {
291 project_id: ProjectId,
292 client: AnyProtoClient,
293 },
294}
295
296#[derive(Clone, Debug)]
297pub enum RepositoryEvent {
298 Updated { full_scan: bool, new_instance: bool },
299 MergeHeadsChanged,
300}
301
302#[derive(Clone, Debug)]
303pub struct JobsUpdated;
304
305#[derive(Debug)]
306pub enum GitStoreEvent {
307 ActiveRepositoryChanged(Option<RepositoryId>),
308 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
309 RepositoryAdded(RepositoryId),
310 RepositoryRemoved(RepositoryId),
311 IndexWriteError(anyhow::Error),
312 JobsUpdated,
313 ConflictsUpdated,
314}
315
316impl EventEmitter<RepositoryEvent> for Repository {}
317impl EventEmitter<JobsUpdated> for Repository {}
318impl EventEmitter<GitStoreEvent> for GitStore {}
319
320pub struct GitJob {
321 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
322 key: Option<GitJobKey>,
323}
324
325#[derive(PartialEq, Eq)]
326enum GitJobKey {
327 WriteIndex(RepoPath),
328 ReloadBufferDiffBases,
329 RefreshStatuses,
330 ReloadGitState,
331}
332
333impl GitStore {
334 pub fn local(
335 worktree_store: &Entity<WorktreeStore>,
336 buffer_store: Entity<BufferStore>,
337 environment: Entity<ProjectEnvironment>,
338 fs: Arc<dyn Fs>,
339 cx: &mut Context<Self>,
340 ) -> Self {
341 Self::new(
342 worktree_store.clone(),
343 buffer_store,
344 GitStoreState::Local {
345 next_repository_id: Arc::new(AtomicU64::new(1)),
346 downstream: None,
347 project_environment: environment,
348 fs,
349 },
350 cx,
351 )
352 }
353
354 pub fn remote(
355 worktree_store: &Entity<WorktreeStore>,
356 buffer_store: Entity<BufferStore>,
357 upstream_client: AnyProtoClient,
358 project_id: ProjectId,
359 cx: &mut Context<Self>,
360 ) -> Self {
361 Self::new(
362 worktree_store.clone(),
363 buffer_store,
364 GitStoreState::Remote {
365 upstream_client,
366 upstream_project_id: project_id,
367 },
368 cx,
369 )
370 }
371
372 pub fn ssh(
373 worktree_store: &Entity<WorktreeStore>,
374 buffer_store: Entity<BufferStore>,
375 upstream_client: AnyProtoClient,
376 cx: &mut Context<Self>,
377 ) -> Self {
378 Self::new(
379 worktree_store.clone(),
380 buffer_store,
381 GitStoreState::Ssh {
382 upstream_client,
383 upstream_project_id: ProjectId(SSH_PROJECT_ID),
384 downstream: None,
385 },
386 cx,
387 )
388 }
389
390 fn new(
391 worktree_store: Entity<WorktreeStore>,
392 buffer_store: Entity<BufferStore>,
393 state: GitStoreState,
394 cx: &mut Context<Self>,
395 ) -> Self {
396 let _subscriptions = vec![
397 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
398 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
399 ];
400
401 GitStore {
402 state,
403 buffer_store,
404 worktree_store,
405 repositories: HashMap::default(),
406 active_repo_id: None,
407 _subscriptions,
408 loading_diffs: HashMap::default(),
409 shared_diffs: HashMap::default(),
410 diffs: HashMap::default(),
411 }
412 }
413
414 pub fn init(client: &AnyProtoClient) {
415 client.add_entity_request_handler(Self::handle_get_remotes);
416 client.add_entity_request_handler(Self::handle_get_branches);
417 client.add_entity_request_handler(Self::handle_get_default_branch);
418 client.add_entity_request_handler(Self::handle_change_branch);
419 client.add_entity_request_handler(Self::handle_create_branch);
420 client.add_entity_request_handler(Self::handle_git_init);
421 client.add_entity_request_handler(Self::handle_push);
422 client.add_entity_request_handler(Self::handle_pull);
423 client.add_entity_request_handler(Self::handle_fetch);
424 client.add_entity_request_handler(Self::handle_stage);
425 client.add_entity_request_handler(Self::handle_unstage);
426 client.add_entity_request_handler(Self::handle_stash);
427 client.add_entity_request_handler(Self::handle_stash_pop);
428 client.add_entity_request_handler(Self::handle_commit);
429 client.add_entity_request_handler(Self::handle_reset);
430 client.add_entity_request_handler(Self::handle_show);
431 client.add_entity_request_handler(Self::handle_load_commit_diff);
432 client.add_entity_request_handler(Self::handle_checkout_files);
433 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
434 client.add_entity_request_handler(Self::handle_set_index_text);
435 client.add_entity_request_handler(Self::handle_askpass);
436 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
437 client.add_entity_request_handler(Self::handle_git_diff);
438 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
439 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
440 client.add_entity_message_handler(Self::handle_update_diff_bases);
441 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
442 client.add_entity_request_handler(Self::handle_blame_buffer);
443 client.add_entity_message_handler(Self::handle_update_repository);
444 client.add_entity_message_handler(Self::handle_remove_repository);
445 client.add_entity_request_handler(Self::handle_git_clone);
446 }
447
448 pub fn is_local(&self) -> bool {
449 matches!(self.state, GitStoreState::Local { .. })
450 }
451
452 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
453 match &mut self.state {
454 GitStoreState::Ssh {
455 downstream: downstream_client,
456 ..
457 } => {
458 for repo in self.repositories.values() {
459 let update = repo.read(cx).snapshot.initial_update(project_id);
460 for update in split_repository_update(update) {
461 client.send(update).log_err();
462 }
463 }
464 *downstream_client = Some((client, ProjectId(project_id)));
465 }
466 GitStoreState::Local {
467 downstream: downstream_client,
468 ..
469 } => {
470 let mut snapshots = HashMap::default();
471 let (updates_tx, mut updates_rx) = mpsc::unbounded();
472 for repo in self.repositories.values() {
473 updates_tx
474 .unbounded_send(DownstreamUpdate::UpdateRepository(
475 repo.read(cx).snapshot.clone(),
476 ))
477 .ok();
478 }
479 *downstream_client = Some(LocalDownstreamState {
480 client: client.clone(),
481 project_id: ProjectId(project_id),
482 updates_tx,
483 _task: cx.spawn(async move |this, cx| {
484 cx.background_spawn(async move {
485 while let Some(update) = updates_rx.next().await {
486 match update {
487 DownstreamUpdate::UpdateRepository(snapshot) => {
488 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
489 {
490 let update =
491 snapshot.build_update(old_snapshot, project_id);
492 *old_snapshot = snapshot;
493 for update in split_repository_update(update) {
494 client.send(update)?;
495 }
496 } else {
497 let update = snapshot.initial_update(project_id);
498 for update in split_repository_update(update) {
499 client.send(update)?;
500 }
501 snapshots.insert(snapshot.id, snapshot);
502 }
503 }
504 DownstreamUpdate::RemoveRepository(id) => {
505 client.send(proto::RemoveRepository {
506 project_id,
507 id: id.to_proto(),
508 })?;
509 }
510 }
511 }
512 anyhow::Ok(())
513 })
514 .await
515 .ok();
516 this.update(cx, |this, _| {
517 if let GitStoreState::Local {
518 downstream: downstream_client,
519 ..
520 } = &mut this.state
521 {
522 downstream_client.take();
523 } else {
524 unreachable!("unshared called on remote store");
525 }
526 })
527 }),
528 });
529 }
530 GitStoreState::Remote { .. } => {
531 debug_panic!("shared called on remote store");
532 }
533 }
534 }
535
536 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
537 match &mut self.state {
538 GitStoreState::Local {
539 downstream: downstream_client,
540 ..
541 } => {
542 downstream_client.take();
543 }
544 GitStoreState::Ssh {
545 downstream: downstream_client,
546 ..
547 } => {
548 downstream_client.take();
549 }
550 GitStoreState::Remote { .. } => {
551 debug_panic!("unshared called on remote store");
552 }
553 }
554 self.shared_diffs.clear();
555 }
556
557 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
558 self.shared_diffs.remove(peer_id);
559 }
560
561 pub fn active_repository(&self) -> Option<Entity<Repository>> {
562 self.active_repo_id
563 .as_ref()
564 .map(|id| self.repositories[id].clone())
565 }
566
567 pub fn open_unstaged_diff(
568 &mut self,
569 buffer: Entity<Buffer>,
570 cx: &mut Context<Self>,
571 ) -> Task<Result<Entity<BufferDiff>>> {
572 let buffer_id = buffer.read(cx).remote_id();
573 if let Some(diff_state) = self.diffs.get(&buffer_id)
574 && let Some(unstaged_diff) = diff_state
575 .read(cx)
576 .unstaged_diff
577 .as_ref()
578 .and_then(|weak| weak.upgrade())
579 {
580 if let Some(task) =
581 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
582 {
583 return cx.background_executor().spawn(async move {
584 task.await;
585 Ok(unstaged_diff)
586 });
587 }
588 return Task::ready(Ok(unstaged_diff));
589 }
590
591 let Some((repo, repo_path)) =
592 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
593 else {
594 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
595 };
596
597 let task = self
598 .loading_diffs
599 .entry((buffer_id, DiffKind::Unstaged))
600 .or_insert_with(|| {
601 let staged_text = repo.update(cx, |repo, cx| {
602 repo.load_staged_text(buffer_id, repo_path, cx)
603 });
604 cx.spawn(async move |this, cx| {
605 Self::open_diff_internal(
606 this,
607 DiffKind::Unstaged,
608 staged_text.await.map(DiffBasesChange::SetIndex),
609 buffer,
610 cx,
611 )
612 .await
613 .map_err(Arc::new)
614 })
615 .shared()
616 })
617 .clone();
618
619 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
620 }
621
622 pub fn open_uncommitted_diff(
623 &mut self,
624 buffer: Entity<Buffer>,
625 cx: &mut Context<Self>,
626 ) -> Task<Result<Entity<BufferDiff>>> {
627 let buffer_id = buffer.read(cx).remote_id();
628
629 if let Some(diff_state) = self.diffs.get(&buffer_id)
630 && let Some(uncommitted_diff) = diff_state
631 .read(cx)
632 .uncommitted_diff
633 .as_ref()
634 .and_then(|weak| weak.upgrade())
635 {
636 if let Some(task) =
637 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
638 {
639 return cx.background_executor().spawn(async move {
640 task.await;
641 Ok(uncommitted_diff)
642 });
643 }
644 return Task::ready(Ok(uncommitted_diff));
645 }
646
647 let Some((repo, repo_path)) =
648 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
649 else {
650 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
651 };
652
653 let task = self
654 .loading_diffs
655 .entry((buffer_id, DiffKind::Uncommitted))
656 .or_insert_with(|| {
657 let changes = repo.update(cx, |repo, cx| {
658 repo.load_committed_text(buffer_id, repo_path, cx)
659 });
660
661 cx.spawn(async move |this, cx| {
662 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
663 .await
664 .map_err(Arc::new)
665 })
666 .shared()
667 })
668 .clone();
669
670 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
671 }
672
673 async fn open_diff_internal(
674 this: WeakEntity<Self>,
675 kind: DiffKind,
676 texts: Result<DiffBasesChange>,
677 buffer_entity: Entity<Buffer>,
678 cx: &mut AsyncApp,
679 ) -> Result<Entity<BufferDiff>> {
680 let diff_bases_change = match texts {
681 Err(e) => {
682 this.update(cx, |this, cx| {
683 let buffer = buffer_entity.read(cx);
684 let buffer_id = buffer.remote_id();
685 this.loading_diffs.remove(&(buffer_id, kind));
686 })?;
687 return Err(e);
688 }
689 Ok(change) => change,
690 };
691
692 this.update(cx, |this, cx| {
693 let buffer = buffer_entity.read(cx);
694 let buffer_id = buffer.remote_id();
695 let language = buffer.language().cloned();
696 let language_registry = buffer.language_registry();
697 let text_snapshot = buffer.text_snapshot();
698 this.loading_diffs.remove(&(buffer_id, kind));
699
700 let git_store = cx.weak_entity();
701 let diff_state = this
702 .diffs
703 .entry(buffer_id)
704 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
705
706 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
707
708 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
709 diff_state.update(cx, |diff_state, cx| {
710 diff_state.language = language;
711 diff_state.language_registry = language_registry;
712
713 match kind {
714 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
715 DiffKind::Uncommitted => {
716 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
717 diff
718 } else {
719 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
720 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
721 unstaged_diff
722 };
723
724 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
725 diff_state.uncommitted_diff = Some(diff.downgrade())
726 }
727 }
728
729 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
730 let rx = diff_state.wait_for_recalculation();
731
732 anyhow::Ok(async move {
733 if let Some(rx) = rx {
734 rx.await;
735 }
736 Ok(diff)
737 })
738 })
739 })??
740 .await
741 }
742
743 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
744 let diff_state = self.diffs.get(&buffer_id)?;
745 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
746 }
747
748 pub fn get_uncommitted_diff(
749 &self,
750 buffer_id: BufferId,
751 cx: &App,
752 ) -> Option<Entity<BufferDiff>> {
753 let diff_state = self.diffs.get(&buffer_id)?;
754 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
755 }
756
757 pub fn open_conflict_set(
758 &mut self,
759 buffer: Entity<Buffer>,
760 cx: &mut Context<Self>,
761 ) -> Entity<ConflictSet> {
762 log::debug!("open conflict set");
763 let buffer_id = buffer.read(cx).remote_id();
764
765 if let Some(git_state) = self.diffs.get(&buffer_id)
766 && let Some(conflict_set) = git_state
767 .read(cx)
768 .conflict_set
769 .as_ref()
770 .and_then(|weak| weak.upgrade())
771 {
772 let conflict_set = conflict_set.clone();
773 let buffer_snapshot = buffer.read(cx).text_snapshot();
774
775 git_state.update(cx, |state, cx| {
776 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
777 });
778
779 return conflict_set;
780 }
781
782 let is_unmerged = self
783 .repository_and_path_for_buffer_id(buffer_id, cx)
784 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
785 let git_store = cx.weak_entity();
786 let buffer_git_state = self
787 .diffs
788 .entry(buffer_id)
789 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
790 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
791
792 self._subscriptions
793 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
794 cx.emit(GitStoreEvent::ConflictsUpdated);
795 }));
796
797 buffer_git_state.update(cx, |state, cx| {
798 state.conflict_set = Some(conflict_set.downgrade());
799 let buffer_snapshot = buffer.read(cx).text_snapshot();
800 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
801 });
802
803 conflict_set
804 }
805
806 pub fn project_path_git_status(
807 &self,
808 project_path: &ProjectPath,
809 cx: &App,
810 ) -> Option<FileStatus> {
811 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
812 Some(repo.read(cx).status_for_path(&repo_path)?.status)
813 }
814
815 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
816 let mut work_directory_abs_paths = Vec::new();
817 let mut checkpoints = Vec::new();
818 for repository in self.repositories.values() {
819 repository.update(cx, |repository, _| {
820 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
821 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
822 });
823 }
824
825 cx.background_executor().spawn(async move {
826 let checkpoints = future::try_join_all(checkpoints).await?;
827 Ok(GitStoreCheckpoint {
828 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
829 .into_iter()
830 .zip(checkpoints)
831 .collect(),
832 })
833 })
834 }
835
836 pub fn restore_checkpoint(
837 &self,
838 checkpoint: GitStoreCheckpoint,
839 cx: &mut App,
840 ) -> Task<Result<()>> {
841 let repositories_by_work_dir_abs_path = self
842 .repositories
843 .values()
844 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
845 .collect::<HashMap<_, _>>();
846
847 let mut tasks = Vec::new();
848 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
849 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
850 let restore = repository.update(cx, |repository, _| {
851 repository.restore_checkpoint(checkpoint)
852 });
853 tasks.push(async move { restore.await? });
854 }
855 }
856 cx.background_spawn(async move {
857 future::try_join_all(tasks).await?;
858 Ok(())
859 })
860 }
861
862 /// Compares two checkpoints, returning true if they are equal.
863 pub fn compare_checkpoints(
864 &self,
865 left: GitStoreCheckpoint,
866 mut right: GitStoreCheckpoint,
867 cx: &mut App,
868 ) -> Task<Result<bool>> {
869 let repositories_by_work_dir_abs_path = self
870 .repositories
871 .values()
872 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
873 .collect::<HashMap<_, _>>();
874
875 let mut tasks = Vec::new();
876 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
877 if let Some(right_checkpoint) = right
878 .checkpoints_by_work_dir_abs_path
879 .remove(&work_dir_abs_path)
880 {
881 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
882 {
883 let compare = repository.update(cx, |repository, _| {
884 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
885 });
886
887 tasks.push(async move { compare.await? });
888 }
889 } else {
890 return Task::ready(Ok(false));
891 }
892 }
893 cx.background_spawn(async move {
894 Ok(future::try_join_all(tasks)
895 .await?
896 .into_iter()
897 .all(|result| result))
898 })
899 }
900
901 /// Blames a buffer.
902 pub fn blame_buffer(
903 &self,
904 buffer: &Entity<Buffer>,
905 version: Option<clock::Global>,
906 cx: &mut App,
907 ) -> Task<Result<Option<Blame>>> {
908 let buffer = buffer.read(cx);
909 let Some((repo, repo_path)) =
910 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
911 else {
912 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
913 };
914 let content = match &version {
915 Some(version) => buffer.rope_for_version(version).clone(),
916 None => buffer.as_rope().clone(),
917 };
918 let version = version.unwrap_or(buffer.version());
919 let buffer_id = buffer.remote_id();
920
921 let rx = repo.update(cx, |repo, _| {
922 repo.send_job(None, move |state, _| async move {
923 match state {
924 RepositoryState::Local { backend, .. } => backend
925 .blame(repo_path.clone(), content)
926 .await
927 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
928 .map(Some),
929 RepositoryState::Remote { project_id, client } => {
930 let response = client
931 .request(proto::BlameBuffer {
932 project_id: project_id.to_proto(),
933 buffer_id: buffer_id.into(),
934 version: serialize_version(&version),
935 })
936 .await?;
937 Ok(deserialize_blame_buffer_response(response))
938 }
939 }
940 })
941 });
942
943 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
944 }
945
946 pub fn get_permalink_to_line(
947 &self,
948 buffer: &Entity<Buffer>,
949 selection: Range<u32>,
950 cx: &mut App,
951 ) -> Task<Result<url::Url>> {
952 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
953 return Task::ready(Err(anyhow!("buffer has no file")));
954 };
955
956 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
957 &(file.worktree.read(cx).id(), file.path.clone()).into(),
958 cx,
959 ) else {
960 // If we're not in a Git repo, check whether this is a Rust source
961 // file in the Cargo registry (presumably opened with go-to-definition
962 // from a normal Rust file). If so, we can put together a permalink
963 // using crate metadata.
964 if buffer
965 .read(cx)
966 .language()
967 .is_none_or(|lang| lang.name() != "Rust".into())
968 {
969 return Task::ready(Err(anyhow!("no permalink available")));
970 }
971 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
972 return Task::ready(Err(anyhow!("no permalink available")));
973 };
974 return cx.spawn(async move |cx| {
975 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
976 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
977 .context("no permalink available")
978 });
979
980 // TODO remote case
981 };
982
983 let buffer_id = buffer.read(cx).remote_id();
984 let branch = repo.read(cx).branch.clone();
985 let remote = branch
986 .as_ref()
987 .and_then(|b| b.upstream.as_ref())
988 .and_then(|b| b.remote_name())
989 .unwrap_or("origin")
990 .to_string();
991
992 let rx = repo.update(cx, |repo, _| {
993 repo.send_job(None, move |state, cx| async move {
994 match state {
995 RepositoryState::Local { backend, .. } => {
996 let origin_url = backend
997 .remote_url(&remote)
998 .with_context(|| format!("remote \"{remote}\" not found"))?;
999
1000 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1001
1002 let provider_registry =
1003 cx.update(GitHostingProviderRegistry::default_global)?;
1004
1005 let (provider, remote) =
1006 parse_git_remote_url(provider_registry, &origin_url)
1007 .context("parsing Git remote URL")?;
1008
1009 let path = repo_path.to_str().with_context(|| {
1010 format!("converting repo path {repo_path:?} to string")
1011 })?;
1012
1013 Ok(provider.build_permalink(
1014 remote,
1015 BuildPermalinkParams {
1016 sha: &sha,
1017 path,
1018 selection: Some(selection),
1019 },
1020 ))
1021 }
1022 RepositoryState::Remote { project_id, client } => {
1023 let response = client
1024 .request(proto::GetPermalinkToLine {
1025 project_id: project_id.to_proto(),
1026 buffer_id: buffer_id.into(),
1027 selection: Some(proto::Range {
1028 start: selection.start as u64,
1029 end: selection.end as u64,
1030 }),
1031 })
1032 .await?;
1033
1034 url::Url::parse(&response.permalink).context("failed to parse permalink")
1035 }
1036 }
1037 })
1038 });
1039 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1040 }
1041
1042 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1043 match &self.state {
1044 GitStoreState::Local {
1045 downstream: downstream_client,
1046 ..
1047 } => downstream_client
1048 .as_ref()
1049 .map(|state| (state.client.clone(), state.project_id)),
1050 GitStoreState::Ssh {
1051 downstream: downstream_client,
1052 ..
1053 } => downstream_client.clone(),
1054 GitStoreState::Remote { .. } => None,
1055 }
1056 }
1057
1058 fn upstream_client(&self) -> Option<AnyProtoClient> {
1059 match &self.state {
1060 GitStoreState::Local { .. } => None,
1061 GitStoreState::Ssh {
1062 upstream_client, ..
1063 }
1064 | GitStoreState::Remote {
1065 upstream_client, ..
1066 } => Some(upstream_client.clone()),
1067 }
1068 }
1069
1070 fn on_worktree_store_event(
1071 &mut self,
1072 worktree_store: Entity<WorktreeStore>,
1073 event: &WorktreeStoreEvent,
1074 cx: &mut Context<Self>,
1075 ) {
1076 let GitStoreState::Local {
1077 project_environment,
1078 downstream,
1079 next_repository_id,
1080 fs,
1081 } = &self.state
1082 else {
1083 return;
1084 };
1085
1086 match event {
1087 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1088 if let Some(worktree) = self
1089 .worktree_store
1090 .read(cx)
1091 .worktree_for_id(*worktree_id, cx)
1092 {
1093 let paths_by_git_repo =
1094 self.process_updated_entries(&worktree, updated_entries, cx);
1095 let downstream = downstream
1096 .as_ref()
1097 .map(|downstream| downstream.updates_tx.clone());
1098 cx.spawn(async move |_, cx| {
1099 let paths_by_git_repo = paths_by_git_repo.await;
1100 for (repo, paths) in paths_by_git_repo {
1101 repo.update(cx, |repo, cx| {
1102 repo.paths_changed(paths, downstream.clone(), cx);
1103 })
1104 .ok();
1105 }
1106 })
1107 .detach();
1108 }
1109 }
1110 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1111 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1112 else {
1113 return;
1114 };
1115 if !worktree.read(cx).is_visible() {
1116 log::debug!(
1117 "not adding repositories for local worktree {:?} because it's not visible",
1118 worktree.read(cx).abs_path()
1119 );
1120 return;
1121 }
1122 self.update_repositories_from_worktree(
1123 project_environment.clone(),
1124 next_repository_id.clone(),
1125 downstream
1126 .as_ref()
1127 .map(|downstream| downstream.updates_tx.clone()),
1128 changed_repos.clone(),
1129 fs.clone(),
1130 cx,
1131 );
1132 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1133 }
1134 _ => {}
1135 }
1136 }
1137
1138 fn on_repository_event(
1139 &mut self,
1140 repo: Entity<Repository>,
1141 event: &RepositoryEvent,
1142 cx: &mut Context<Self>,
1143 ) {
1144 let id = repo.read(cx).id;
1145 let repo_snapshot = repo.read(cx).snapshot.clone();
1146 for (buffer_id, diff) in self.diffs.iter() {
1147 if let Some((buffer_repo, repo_path)) =
1148 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1149 && buffer_repo == repo
1150 {
1151 diff.update(cx, |diff, cx| {
1152 if let Some(conflict_set) = &diff.conflict_set {
1153 let conflict_status_changed =
1154 conflict_set.update(cx, |conflict_set, cx| {
1155 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1156 conflict_set.set_has_conflict(has_conflict, cx)
1157 })?;
1158 if conflict_status_changed {
1159 let buffer_store = self.buffer_store.read(cx);
1160 if let Some(buffer) = buffer_store.get(*buffer_id) {
1161 let _ = diff
1162 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1163 }
1164 }
1165 }
1166 anyhow::Ok(())
1167 })
1168 .ok();
1169 }
1170 }
1171 cx.emit(GitStoreEvent::RepositoryUpdated(
1172 id,
1173 event.clone(),
1174 self.active_repo_id == Some(id),
1175 ))
1176 }
1177
1178 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1179 cx.emit(GitStoreEvent::JobsUpdated)
1180 }
1181
1182 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1183 fn update_repositories_from_worktree(
1184 &mut self,
1185 project_environment: Entity<ProjectEnvironment>,
1186 next_repository_id: Arc<AtomicU64>,
1187 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1188 updated_git_repositories: UpdatedGitRepositoriesSet,
1189 fs: Arc<dyn Fs>,
1190 cx: &mut Context<Self>,
1191 ) {
1192 let mut removed_ids = Vec::new();
1193 for update in updated_git_repositories.iter() {
1194 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1195 let existing_work_directory_abs_path =
1196 repo.read(cx).work_directory_abs_path.clone();
1197 Some(&existing_work_directory_abs_path)
1198 == update.old_work_directory_abs_path.as_ref()
1199 || Some(&existing_work_directory_abs_path)
1200 == update.new_work_directory_abs_path.as_ref()
1201 }) {
1202 if let Some(new_work_directory_abs_path) =
1203 update.new_work_directory_abs_path.clone()
1204 {
1205 existing.update(cx, |existing, cx| {
1206 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1207 existing.schedule_scan(updates_tx.clone(), cx);
1208 });
1209 } else {
1210 removed_ids.push(*id);
1211 }
1212 } else if let UpdatedGitRepository {
1213 new_work_directory_abs_path: Some(work_directory_abs_path),
1214 dot_git_abs_path: Some(dot_git_abs_path),
1215 repository_dir_abs_path: Some(repository_dir_abs_path),
1216 common_dir_abs_path: Some(common_dir_abs_path),
1217 ..
1218 } = update
1219 {
1220 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1221 let git_store = cx.weak_entity();
1222 let repo = cx.new(|cx| {
1223 let mut repo = Repository::local(
1224 id,
1225 work_directory_abs_path.clone(),
1226 dot_git_abs_path.clone(),
1227 repository_dir_abs_path.clone(),
1228 common_dir_abs_path.clone(),
1229 project_environment.downgrade(),
1230 fs.clone(),
1231 git_store,
1232 cx,
1233 );
1234 repo.schedule_scan(updates_tx.clone(), cx);
1235 repo
1236 });
1237 self._subscriptions
1238 .push(cx.subscribe(&repo, Self::on_repository_event));
1239 self._subscriptions
1240 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1241 self.repositories.insert(id, repo);
1242 cx.emit(GitStoreEvent::RepositoryAdded(id));
1243 self.active_repo_id.get_or_insert_with(|| {
1244 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1245 id
1246 });
1247 }
1248 }
1249
1250 for id in removed_ids {
1251 if self.active_repo_id == Some(id) {
1252 self.active_repo_id = None;
1253 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1254 }
1255 self.repositories.remove(&id);
1256 if let Some(updates_tx) = updates_tx.as_ref() {
1257 updates_tx
1258 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1259 .ok();
1260 }
1261 }
1262 }
1263
1264 fn on_buffer_store_event(
1265 &mut self,
1266 _: Entity<BufferStore>,
1267 event: &BufferStoreEvent,
1268 cx: &mut Context<Self>,
1269 ) {
1270 match event {
1271 BufferStoreEvent::BufferAdded(buffer) => {
1272 cx.subscribe(buffer, |this, buffer, event, cx| {
1273 if let BufferEvent::LanguageChanged = event {
1274 let buffer_id = buffer.read(cx).remote_id();
1275 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1276 diff_state.update(cx, |diff_state, cx| {
1277 diff_state.buffer_language_changed(buffer, cx);
1278 });
1279 }
1280 }
1281 })
1282 .detach();
1283 }
1284 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1285 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1286 diffs.remove(buffer_id);
1287 }
1288 }
1289 BufferStoreEvent::BufferDropped(buffer_id) => {
1290 self.diffs.remove(buffer_id);
1291 for diffs in self.shared_diffs.values_mut() {
1292 diffs.remove(buffer_id);
1293 }
1294 }
1295
1296 _ => {}
1297 }
1298 }
1299
1300 pub fn recalculate_buffer_diffs(
1301 &mut self,
1302 buffers: Vec<Entity<Buffer>>,
1303 cx: &mut Context<Self>,
1304 ) -> impl Future<Output = ()> + use<> {
1305 let mut futures = Vec::new();
1306 for buffer in buffers {
1307 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1308 let buffer = buffer.read(cx).text_snapshot();
1309 diff_state.update(cx, |diff_state, cx| {
1310 diff_state.recalculate_diffs(buffer.clone(), cx);
1311 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1312 });
1313 futures.push(diff_state.update(cx, |diff_state, cx| {
1314 diff_state
1315 .reparse_conflict_markers(buffer, cx)
1316 .map(|_| {})
1317 .boxed()
1318 }));
1319 }
1320 }
1321 async move {
1322 futures::future::join_all(futures).await;
1323 }
1324 }
1325
1326 fn on_buffer_diff_event(
1327 &mut self,
1328 diff: Entity<buffer_diff::BufferDiff>,
1329 event: &BufferDiffEvent,
1330 cx: &mut Context<Self>,
1331 ) {
1332 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1333 let buffer_id = diff.read(cx).buffer_id;
1334 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1335 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1336 diff_state.hunk_staging_operation_count += 1;
1337 diff_state.hunk_staging_operation_count
1338 });
1339 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1340 let recv = repo.update(cx, |repo, cx| {
1341 log::debug!("hunks changed for {}", path.display());
1342 repo.spawn_set_index_text_job(
1343 path,
1344 new_index_text.as_ref().map(|rope| rope.to_string()),
1345 Some(hunk_staging_operation_count),
1346 cx,
1347 )
1348 });
1349 let diff = diff.downgrade();
1350 cx.spawn(async move |this, cx| {
1351 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1352 diff.update(cx, |diff, cx| {
1353 diff.clear_pending_hunks(cx);
1354 })
1355 .ok();
1356 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1357 .ok();
1358 }
1359 })
1360 .detach();
1361 }
1362 }
1363 }
1364 }
1365
1366 fn local_worktree_git_repos_changed(
1367 &mut self,
1368 worktree: Entity<Worktree>,
1369 changed_repos: &UpdatedGitRepositoriesSet,
1370 cx: &mut Context<Self>,
1371 ) {
1372 log::debug!("local worktree repos changed");
1373 debug_assert!(worktree.read(cx).is_local());
1374
1375 for repository in self.repositories.values() {
1376 repository.update(cx, |repository, cx| {
1377 let repo_abs_path = &repository.work_directory_abs_path;
1378 if changed_repos.iter().any(|update| {
1379 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1380 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1381 }) {
1382 repository.reload_buffer_diff_bases(cx);
1383 }
1384 });
1385 }
1386 }
1387
1388 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1389 &self.repositories
1390 }
1391
1392 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1393 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1394 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1395 Some(status.status)
1396 }
1397
1398 pub fn repository_and_path_for_buffer_id(
1399 &self,
1400 buffer_id: BufferId,
1401 cx: &App,
1402 ) -> Option<(Entity<Repository>, RepoPath)> {
1403 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1404 let project_path = buffer.read(cx).project_path(cx)?;
1405 self.repository_and_path_for_project_path(&project_path, cx)
1406 }
1407
1408 pub fn repository_and_path_for_project_path(
1409 &self,
1410 path: &ProjectPath,
1411 cx: &App,
1412 ) -> Option<(Entity<Repository>, RepoPath)> {
1413 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1414 self.repositories
1415 .values()
1416 .filter_map(|repo| {
1417 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1418 Some((repo.clone(), repo_path))
1419 })
1420 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1421 }
1422
1423 pub fn git_init(
1424 &self,
1425 path: Arc<Path>,
1426 fallback_branch_name: String,
1427 cx: &App,
1428 ) -> Task<Result<()>> {
1429 match &self.state {
1430 GitStoreState::Local { fs, .. } => {
1431 let fs = fs.clone();
1432 cx.background_executor()
1433 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1434 }
1435 GitStoreState::Ssh {
1436 upstream_client,
1437 upstream_project_id: project_id,
1438 ..
1439 }
1440 | GitStoreState::Remote {
1441 upstream_client,
1442 upstream_project_id: project_id,
1443 ..
1444 } => {
1445 let client = upstream_client.clone();
1446 let project_id = *project_id;
1447 cx.background_executor().spawn(async move {
1448 client
1449 .request(proto::GitInit {
1450 project_id: project_id.0,
1451 abs_path: path.to_string_lossy().to_string(),
1452 fallback_branch_name,
1453 })
1454 .await?;
1455 Ok(())
1456 })
1457 }
1458 }
1459 }
1460
1461 pub fn git_clone(
1462 &self,
1463 repo: String,
1464 path: impl Into<Arc<std::path::Path>>,
1465 cx: &App,
1466 ) -> Task<Result<()>> {
1467 let path = path.into();
1468 match &self.state {
1469 GitStoreState::Local { fs, .. } => {
1470 let fs = fs.clone();
1471 cx.background_executor()
1472 .spawn(async move { fs.git_clone(&repo, &path).await })
1473 }
1474 GitStoreState::Ssh {
1475 upstream_client,
1476 upstream_project_id,
1477 ..
1478 } => {
1479 let request = upstream_client.request(proto::GitClone {
1480 project_id: upstream_project_id.0,
1481 abs_path: path.to_string_lossy().to_string(),
1482 remote_repo: repo,
1483 });
1484
1485 cx.background_spawn(async move {
1486 let result = request.await?;
1487
1488 match result.success {
1489 true => Ok(()),
1490 false => Err(anyhow!("Git Clone failed")),
1491 }
1492 })
1493 }
1494 GitStoreState::Remote { .. } => {
1495 Task::ready(Err(anyhow!("Git Clone isn't supported for remote users")))
1496 }
1497 }
1498 }
1499
1500 async fn handle_update_repository(
1501 this: Entity<Self>,
1502 envelope: TypedEnvelope<proto::UpdateRepository>,
1503 mut cx: AsyncApp,
1504 ) -> Result<()> {
1505 this.update(&mut cx, |this, cx| {
1506 let mut update = envelope.payload;
1507
1508 let id = RepositoryId::from_proto(update.id);
1509 let client = this
1510 .upstream_client()
1511 .context("no upstream client")?
1512 .clone();
1513
1514 let mut is_new = false;
1515 let repo = this.repositories.entry(id).or_insert_with(|| {
1516 is_new = true;
1517 let git_store = cx.weak_entity();
1518 cx.new(|cx| {
1519 Repository::remote(
1520 id,
1521 Path::new(&update.abs_path).into(),
1522 ProjectId(update.project_id),
1523 client,
1524 git_store,
1525 cx,
1526 )
1527 })
1528 });
1529 if is_new {
1530 this._subscriptions
1531 .push(cx.subscribe(repo, Self::on_repository_event))
1532 }
1533
1534 repo.update(cx, {
1535 let update = update.clone();
1536 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1537 })?;
1538
1539 this.active_repo_id.get_or_insert_with(|| {
1540 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1541 id
1542 });
1543
1544 if let Some((client, project_id)) = this.downstream_client() {
1545 update.project_id = project_id.to_proto();
1546 client.send(update).log_err();
1547 }
1548 Ok(())
1549 })?
1550 }
1551
1552 async fn handle_remove_repository(
1553 this: Entity<Self>,
1554 envelope: TypedEnvelope<proto::RemoveRepository>,
1555 mut cx: AsyncApp,
1556 ) -> Result<()> {
1557 this.update(&mut cx, |this, cx| {
1558 let mut update = envelope.payload;
1559 let id = RepositoryId::from_proto(update.id);
1560 this.repositories.remove(&id);
1561 if let Some((client, project_id)) = this.downstream_client() {
1562 update.project_id = project_id.to_proto();
1563 client.send(update).log_err();
1564 }
1565 if this.active_repo_id == Some(id) {
1566 this.active_repo_id = None;
1567 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1568 }
1569 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1570 })
1571 }
1572
1573 async fn handle_git_init(
1574 this: Entity<Self>,
1575 envelope: TypedEnvelope<proto::GitInit>,
1576 cx: AsyncApp,
1577 ) -> Result<proto::Ack> {
1578 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1579 let name = envelope.payload.fallback_branch_name;
1580 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1581 .await?;
1582
1583 Ok(proto::Ack {})
1584 }
1585
1586 async fn handle_git_clone(
1587 this: Entity<Self>,
1588 envelope: TypedEnvelope<proto::GitClone>,
1589 cx: AsyncApp,
1590 ) -> Result<proto::GitCloneResponse> {
1591 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1592 let repo_name = envelope.payload.remote_repo;
1593 let result = cx
1594 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1595 .await;
1596
1597 Ok(proto::GitCloneResponse {
1598 success: result.is_ok(),
1599 })
1600 }
1601
1602 async fn handle_fetch(
1603 this: Entity<Self>,
1604 envelope: TypedEnvelope<proto::Fetch>,
1605 mut cx: AsyncApp,
1606 ) -> Result<proto::RemoteMessageResponse> {
1607 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1608 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1609 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1610 let askpass_id = envelope.payload.askpass_id;
1611
1612 let askpass = make_remote_delegate(
1613 this,
1614 envelope.payload.project_id,
1615 repository_id,
1616 askpass_id,
1617 &mut cx,
1618 );
1619
1620 let remote_output = repository_handle
1621 .update(&mut cx, |repository_handle, cx| {
1622 repository_handle.fetch(fetch_options, askpass, cx)
1623 })?
1624 .await??;
1625
1626 Ok(proto::RemoteMessageResponse {
1627 stdout: remote_output.stdout,
1628 stderr: remote_output.stderr,
1629 })
1630 }
1631
1632 async fn handle_push(
1633 this: Entity<Self>,
1634 envelope: TypedEnvelope<proto::Push>,
1635 mut cx: AsyncApp,
1636 ) -> Result<proto::RemoteMessageResponse> {
1637 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1638 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1639
1640 let askpass_id = envelope.payload.askpass_id;
1641 let askpass = make_remote_delegate(
1642 this,
1643 envelope.payload.project_id,
1644 repository_id,
1645 askpass_id,
1646 &mut cx,
1647 );
1648
1649 let options = envelope
1650 .payload
1651 .options
1652 .as_ref()
1653 .map(|_| match envelope.payload.options() {
1654 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1655 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1656 });
1657
1658 let branch_name = envelope.payload.branch_name.into();
1659 let remote_name = envelope.payload.remote_name.into();
1660
1661 let remote_output = repository_handle
1662 .update(&mut cx, |repository_handle, cx| {
1663 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1664 })?
1665 .await??;
1666 Ok(proto::RemoteMessageResponse {
1667 stdout: remote_output.stdout,
1668 stderr: remote_output.stderr,
1669 })
1670 }
1671
1672 async fn handle_pull(
1673 this: Entity<Self>,
1674 envelope: TypedEnvelope<proto::Pull>,
1675 mut cx: AsyncApp,
1676 ) -> Result<proto::RemoteMessageResponse> {
1677 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1678 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1679 let askpass_id = envelope.payload.askpass_id;
1680 let askpass = make_remote_delegate(
1681 this,
1682 envelope.payload.project_id,
1683 repository_id,
1684 askpass_id,
1685 &mut cx,
1686 );
1687
1688 let branch_name = envelope.payload.branch_name.into();
1689 let remote_name = envelope.payload.remote_name.into();
1690
1691 let remote_message = repository_handle
1692 .update(&mut cx, |repository_handle, cx| {
1693 repository_handle.pull(branch_name, remote_name, askpass, cx)
1694 })?
1695 .await??;
1696
1697 Ok(proto::RemoteMessageResponse {
1698 stdout: remote_message.stdout,
1699 stderr: remote_message.stderr,
1700 })
1701 }
1702
1703 async fn handle_stage(
1704 this: Entity<Self>,
1705 envelope: TypedEnvelope<proto::Stage>,
1706 mut cx: AsyncApp,
1707 ) -> Result<proto::Ack> {
1708 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1709 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1710
1711 let entries = envelope
1712 .payload
1713 .paths
1714 .into_iter()
1715 .map(PathBuf::from)
1716 .map(RepoPath::new)
1717 .collect();
1718
1719 repository_handle
1720 .update(&mut cx, |repository_handle, cx| {
1721 repository_handle.stage_entries(entries, cx)
1722 })?
1723 .await?;
1724 Ok(proto::Ack {})
1725 }
1726
1727 async fn handle_unstage(
1728 this: Entity<Self>,
1729 envelope: TypedEnvelope<proto::Unstage>,
1730 mut cx: AsyncApp,
1731 ) -> Result<proto::Ack> {
1732 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1733 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1734
1735 let entries = envelope
1736 .payload
1737 .paths
1738 .into_iter()
1739 .map(PathBuf::from)
1740 .map(RepoPath::new)
1741 .collect();
1742
1743 repository_handle
1744 .update(&mut cx, |repository_handle, cx| {
1745 repository_handle.unstage_entries(entries, cx)
1746 })?
1747 .await?;
1748
1749 Ok(proto::Ack {})
1750 }
1751
1752 async fn handle_stash(
1753 this: Entity<Self>,
1754 envelope: TypedEnvelope<proto::Stash>,
1755 mut cx: AsyncApp,
1756 ) -> Result<proto::Ack> {
1757 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1758 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1759
1760 let entries = envelope
1761 .payload
1762 .paths
1763 .into_iter()
1764 .map(PathBuf::from)
1765 .map(RepoPath::new)
1766 .collect();
1767
1768 repository_handle
1769 .update(&mut cx, |repository_handle, cx| {
1770 repository_handle.stash_entries(entries, cx)
1771 })?
1772 .await?;
1773
1774 Ok(proto::Ack {})
1775 }
1776
1777 async fn handle_stash_pop(
1778 this: Entity<Self>,
1779 envelope: TypedEnvelope<proto::StashPop>,
1780 mut cx: AsyncApp,
1781 ) -> Result<proto::Ack> {
1782 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1783 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1784
1785 repository_handle
1786 .update(&mut cx, |repository_handle, cx| {
1787 repository_handle.stash_pop(cx)
1788 })?
1789 .await?;
1790
1791 Ok(proto::Ack {})
1792 }
1793
1794 async fn handle_set_index_text(
1795 this: Entity<Self>,
1796 envelope: TypedEnvelope<proto::SetIndexText>,
1797 mut cx: AsyncApp,
1798 ) -> Result<proto::Ack> {
1799 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1800 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1801 let repo_path = RepoPath::from_str(&envelope.payload.path);
1802
1803 repository_handle
1804 .update(&mut cx, |repository_handle, cx| {
1805 repository_handle.spawn_set_index_text_job(
1806 repo_path,
1807 envelope.payload.text,
1808 None,
1809 cx,
1810 )
1811 })?
1812 .await??;
1813 Ok(proto::Ack {})
1814 }
1815
1816 async fn handle_commit(
1817 this: Entity<Self>,
1818 envelope: TypedEnvelope<proto::Commit>,
1819 mut cx: AsyncApp,
1820 ) -> Result<proto::Ack> {
1821 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1822 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1823
1824 let message = SharedString::from(envelope.payload.message);
1825 let name = envelope.payload.name.map(SharedString::from);
1826 let email = envelope.payload.email.map(SharedString::from);
1827 let options = envelope.payload.options.unwrap_or_default();
1828
1829 repository_handle
1830 .update(&mut cx, |repository_handle, cx| {
1831 repository_handle.commit(
1832 message,
1833 name.zip(email),
1834 CommitOptions {
1835 amend: options.amend,
1836 signoff: options.signoff,
1837 },
1838 cx,
1839 )
1840 })?
1841 .await??;
1842 Ok(proto::Ack {})
1843 }
1844
1845 async fn handle_get_remotes(
1846 this: Entity<Self>,
1847 envelope: TypedEnvelope<proto::GetRemotes>,
1848 mut cx: AsyncApp,
1849 ) -> Result<proto::GetRemotesResponse> {
1850 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1851 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1852
1853 let branch_name = envelope.payload.branch_name;
1854
1855 let remotes = repository_handle
1856 .update(&mut cx, |repository_handle, _| {
1857 repository_handle.get_remotes(branch_name)
1858 })?
1859 .await??;
1860
1861 Ok(proto::GetRemotesResponse {
1862 remotes: remotes
1863 .into_iter()
1864 .map(|remotes| proto::get_remotes_response::Remote {
1865 name: remotes.name.to_string(),
1866 })
1867 .collect::<Vec<_>>(),
1868 })
1869 }
1870
1871 async fn handle_get_branches(
1872 this: Entity<Self>,
1873 envelope: TypedEnvelope<proto::GitGetBranches>,
1874 mut cx: AsyncApp,
1875 ) -> Result<proto::GitBranchesResponse> {
1876 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1877 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1878
1879 let branches = repository_handle
1880 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1881 .await??;
1882
1883 Ok(proto::GitBranchesResponse {
1884 branches: branches
1885 .into_iter()
1886 .map(|branch| branch_to_proto(&branch))
1887 .collect::<Vec<_>>(),
1888 })
1889 }
1890 async fn handle_get_default_branch(
1891 this: Entity<Self>,
1892 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1893 mut cx: AsyncApp,
1894 ) -> Result<proto::GetDefaultBranchResponse> {
1895 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1896 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1897
1898 let branch = repository_handle
1899 .update(&mut cx, |repository_handle, _| {
1900 repository_handle.default_branch()
1901 })?
1902 .await??
1903 .map(Into::into);
1904
1905 Ok(proto::GetDefaultBranchResponse { branch })
1906 }
1907 async fn handle_create_branch(
1908 this: Entity<Self>,
1909 envelope: TypedEnvelope<proto::GitCreateBranch>,
1910 mut cx: AsyncApp,
1911 ) -> Result<proto::Ack> {
1912 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1913 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1914 let branch_name = envelope.payload.branch_name;
1915
1916 repository_handle
1917 .update(&mut cx, |repository_handle, _| {
1918 repository_handle.create_branch(branch_name)
1919 })?
1920 .await??;
1921
1922 Ok(proto::Ack {})
1923 }
1924
1925 async fn handle_change_branch(
1926 this: Entity<Self>,
1927 envelope: TypedEnvelope<proto::GitChangeBranch>,
1928 mut cx: AsyncApp,
1929 ) -> Result<proto::Ack> {
1930 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1931 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1932 let branch_name = envelope.payload.branch_name;
1933
1934 repository_handle
1935 .update(&mut cx, |repository_handle, _| {
1936 repository_handle.change_branch(branch_name)
1937 })?
1938 .await??;
1939
1940 Ok(proto::Ack {})
1941 }
1942
1943 async fn handle_show(
1944 this: Entity<Self>,
1945 envelope: TypedEnvelope<proto::GitShow>,
1946 mut cx: AsyncApp,
1947 ) -> Result<proto::GitCommitDetails> {
1948 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1949 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1950
1951 let commit = repository_handle
1952 .update(&mut cx, |repository_handle, _| {
1953 repository_handle.show(envelope.payload.commit)
1954 })?
1955 .await??;
1956 Ok(proto::GitCommitDetails {
1957 sha: commit.sha.into(),
1958 message: commit.message.into(),
1959 commit_timestamp: commit.commit_timestamp,
1960 author_email: commit.author_email.into(),
1961 author_name: commit.author_name.into(),
1962 })
1963 }
1964
1965 async fn handle_load_commit_diff(
1966 this: Entity<Self>,
1967 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1968 mut cx: AsyncApp,
1969 ) -> Result<proto::LoadCommitDiffResponse> {
1970 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1971 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1972
1973 let commit_diff = repository_handle
1974 .update(&mut cx, |repository_handle, _| {
1975 repository_handle.load_commit_diff(envelope.payload.commit)
1976 })?
1977 .await??;
1978 Ok(proto::LoadCommitDiffResponse {
1979 files: commit_diff
1980 .files
1981 .into_iter()
1982 .map(|file| proto::CommitFile {
1983 path: file.path.to_string(),
1984 old_text: file.old_text,
1985 new_text: file.new_text,
1986 })
1987 .collect(),
1988 })
1989 }
1990
1991 async fn handle_reset(
1992 this: Entity<Self>,
1993 envelope: TypedEnvelope<proto::GitReset>,
1994 mut cx: AsyncApp,
1995 ) -> Result<proto::Ack> {
1996 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1997 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1998
1999 let mode = match envelope.payload.mode() {
2000 git_reset::ResetMode::Soft => ResetMode::Soft,
2001 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2002 };
2003
2004 repository_handle
2005 .update(&mut cx, |repository_handle, cx| {
2006 repository_handle.reset(envelope.payload.commit, mode, cx)
2007 })?
2008 .await??;
2009 Ok(proto::Ack {})
2010 }
2011
2012 async fn handle_checkout_files(
2013 this: Entity<Self>,
2014 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2015 mut cx: AsyncApp,
2016 ) -> Result<proto::Ack> {
2017 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2018 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2019 let paths = envelope
2020 .payload
2021 .paths
2022 .iter()
2023 .map(|s| RepoPath::from_str(s))
2024 .collect();
2025
2026 repository_handle
2027 .update(&mut cx, |repository_handle, cx| {
2028 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2029 })?
2030 .await??;
2031 Ok(proto::Ack {})
2032 }
2033
2034 async fn handle_open_commit_message_buffer(
2035 this: Entity<Self>,
2036 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2037 mut cx: AsyncApp,
2038 ) -> Result<proto::OpenBufferResponse> {
2039 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2040 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2041 let buffer = repository
2042 .update(&mut cx, |repository, cx| {
2043 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2044 })?
2045 .await?;
2046
2047 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2048 this.update(&mut cx, |this, cx| {
2049 this.buffer_store.update(cx, |buffer_store, cx| {
2050 buffer_store
2051 .create_buffer_for_peer(
2052 &buffer,
2053 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2054 cx,
2055 )
2056 .detach_and_log_err(cx);
2057 })
2058 })?;
2059
2060 Ok(proto::OpenBufferResponse {
2061 buffer_id: buffer_id.to_proto(),
2062 })
2063 }
2064
2065 async fn handle_askpass(
2066 this: Entity<Self>,
2067 envelope: TypedEnvelope<proto::AskPassRequest>,
2068 mut cx: AsyncApp,
2069 ) -> Result<proto::AskPassResponse> {
2070 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2071 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2072
2073 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2074 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2075 debug_panic!("no askpass found");
2076 anyhow::bail!("no askpass found");
2077 };
2078
2079 let response = askpass.ask_password(envelope.payload.prompt).await?;
2080
2081 delegates
2082 .lock()
2083 .insert(envelope.payload.askpass_id, askpass);
2084
2085 Ok(proto::AskPassResponse { response })
2086 }
2087
2088 async fn handle_check_for_pushed_commits(
2089 this: Entity<Self>,
2090 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2091 mut cx: AsyncApp,
2092 ) -> Result<proto::CheckForPushedCommitsResponse> {
2093 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2094 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2095
2096 let branches = repository_handle
2097 .update(&mut cx, |repository_handle, _| {
2098 repository_handle.check_for_pushed_commits()
2099 })?
2100 .await??;
2101 Ok(proto::CheckForPushedCommitsResponse {
2102 pushed_to: branches
2103 .into_iter()
2104 .map(|commit| commit.to_string())
2105 .collect(),
2106 })
2107 }
2108
2109 async fn handle_git_diff(
2110 this: Entity<Self>,
2111 envelope: TypedEnvelope<proto::GitDiff>,
2112 mut cx: AsyncApp,
2113 ) -> Result<proto::GitDiffResponse> {
2114 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2115 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2116 let diff_type = match envelope.payload.diff_type() {
2117 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2118 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2119 };
2120
2121 let mut diff = repository_handle
2122 .update(&mut cx, |repository_handle, cx| {
2123 repository_handle.diff(diff_type, cx)
2124 })?
2125 .await??;
2126 const ONE_MB: usize = 1_000_000;
2127 if diff.len() > ONE_MB {
2128 diff = diff.chars().take(ONE_MB).collect()
2129 }
2130
2131 Ok(proto::GitDiffResponse { diff })
2132 }
2133
2134 async fn handle_open_unstaged_diff(
2135 this: Entity<Self>,
2136 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2137 mut cx: AsyncApp,
2138 ) -> Result<proto::OpenUnstagedDiffResponse> {
2139 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2140 let diff = this
2141 .update(&mut cx, |this, cx| {
2142 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2143 Some(this.open_unstaged_diff(buffer, cx))
2144 })?
2145 .context("missing buffer")?
2146 .await?;
2147 this.update(&mut cx, |this, _| {
2148 let shared_diffs = this
2149 .shared_diffs
2150 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2151 .or_default();
2152 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2153 })?;
2154 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2155 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2156 }
2157
2158 async fn handle_open_uncommitted_diff(
2159 this: Entity<Self>,
2160 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2161 mut cx: AsyncApp,
2162 ) -> Result<proto::OpenUncommittedDiffResponse> {
2163 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2164 let diff = this
2165 .update(&mut cx, |this, cx| {
2166 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2167 Some(this.open_uncommitted_diff(buffer, cx))
2168 })?
2169 .context("missing buffer")?
2170 .await?;
2171 this.update(&mut cx, |this, _| {
2172 let shared_diffs = this
2173 .shared_diffs
2174 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2175 .or_default();
2176 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2177 })?;
2178 diff.read_with(&cx, |diff, cx| {
2179 use proto::open_uncommitted_diff_response::Mode;
2180
2181 let unstaged_diff = diff.secondary_diff();
2182 let index_snapshot = unstaged_diff.and_then(|diff| {
2183 let diff = diff.read(cx);
2184 diff.base_text_exists().then(|| diff.base_text())
2185 });
2186
2187 let mode;
2188 let staged_text;
2189 let committed_text;
2190 if diff.base_text_exists() {
2191 let committed_snapshot = diff.base_text();
2192 committed_text = Some(committed_snapshot.text());
2193 if let Some(index_text) = index_snapshot {
2194 if index_text.remote_id() == committed_snapshot.remote_id() {
2195 mode = Mode::IndexMatchesHead;
2196 staged_text = None;
2197 } else {
2198 mode = Mode::IndexAndHead;
2199 staged_text = Some(index_text.text());
2200 }
2201 } else {
2202 mode = Mode::IndexAndHead;
2203 staged_text = None;
2204 }
2205 } else {
2206 mode = Mode::IndexAndHead;
2207 committed_text = None;
2208 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2209 }
2210
2211 proto::OpenUncommittedDiffResponse {
2212 committed_text,
2213 staged_text,
2214 mode: mode.into(),
2215 }
2216 })
2217 }
2218
2219 async fn handle_update_diff_bases(
2220 this: Entity<Self>,
2221 request: TypedEnvelope<proto::UpdateDiffBases>,
2222 mut cx: AsyncApp,
2223 ) -> Result<()> {
2224 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2225 this.update(&mut cx, |this, cx| {
2226 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2227 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2228 {
2229 let buffer = buffer.read(cx).text_snapshot();
2230 diff_state.update(cx, |diff_state, cx| {
2231 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2232 })
2233 }
2234 })
2235 }
2236
2237 async fn handle_blame_buffer(
2238 this: Entity<Self>,
2239 envelope: TypedEnvelope<proto::BlameBuffer>,
2240 mut cx: AsyncApp,
2241 ) -> Result<proto::BlameBufferResponse> {
2242 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2243 let version = deserialize_version(&envelope.payload.version);
2244 let buffer = this.read_with(&cx, |this, cx| {
2245 this.buffer_store.read(cx).get_existing(buffer_id)
2246 })??;
2247 buffer
2248 .update(&mut cx, |buffer, _| {
2249 buffer.wait_for_version(version.clone())
2250 })?
2251 .await?;
2252 let blame = this
2253 .update(&mut cx, |this, cx| {
2254 this.blame_buffer(&buffer, Some(version), cx)
2255 })?
2256 .await?;
2257 Ok(serialize_blame_buffer_response(blame))
2258 }
2259
2260 async fn handle_get_permalink_to_line(
2261 this: Entity<Self>,
2262 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2263 mut cx: AsyncApp,
2264 ) -> Result<proto::GetPermalinkToLineResponse> {
2265 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2266 // let version = deserialize_version(&envelope.payload.version);
2267 let selection = {
2268 let proto_selection = envelope
2269 .payload
2270 .selection
2271 .context("no selection to get permalink for defined")?;
2272 proto_selection.start as u32..proto_selection.end as u32
2273 };
2274 let buffer = this.read_with(&cx, |this, cx| {
2275 this.buffer_store.read(cx).get_existing(buffer_id)
2276 })??;
2277 let permalink = this
2278 .update(&mut cx, |this, cx| {
2279 this.get_permalink_to_line(&buffer, selection, cx)
2280 })?
2281 .await?;
2282 Ok(proto::GetPermalinkToLineResponse {
2283 permalink: permalink.to_string(),
2284 })
2285 }
2286
2287 fn repository_for_request(
2288 this: &Entity<Self>,
2289 id: RepositoryId,
2290 cx: &mut AsyncApp,
2291 ) -> Result<Entity<Repository>> {
2292 this.read_with(cx, |this, _| {
2293 this.repositories
2294 .get(&id)
2295 .context("missing repository handle")
2296 .cloned()
2297 })?
2298 }
2299
2300 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2301 self.repositories
2302 .iter()
2303 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2304 .collect()
2305 }
2306
2307 fn process_updated_entries(
2308 &self,
2309 worktree: &Entity<Worktree>,
2310 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2311 cx: &mut App,
2312 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2313 let mut repo_paths = self
2314 .repositories
2315 .values()
2316 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2317 .collect::<Vec<_>>();
2318 let mut entries: Vec<_> = updated_entries
2319 .iter()
2320 .map(|(path, _, _)| path.clone())
2321 .collect();
2322 entries.sort();
2323 let worktree = worktree.read(cx);
2324
2325 let entries = entries
2326 .into_iter()
2327 .filter_map(|path| worktree.absolutize(&path).ok())
2328 .collect::<Arc<[_]>>();
2329
2330 let executor = cx.background_executor().clone();
2331 cx.background_executor().spawn(async move {
2332 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2333 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2334 let mut tasks = FuturesOrdered::new();
2335 for (repo_path, repo) in repo_paths.into_iter().rev() {
2336 let entries = entries.clone();
2337 let task = executor.spawn(async move {
2338 // Find all repository paths that belong to this repo
2339 let mut ix = entries.partition_point(|path| path < &*repo_path);
2340 if ix == entries.len() {
2341 return None;
2342 };
2343
2344 let mut paths = Vec::new();
2345 // All paths prefixed by a given repo will constitute a continuous range.
2346 while let Some(path) = entries.get(ix)
2347 && let Some(repo_path) =
2348 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, path)
2349 {
2350 paths.push((repo_path, ix));
2351 ix += 1;
2352 }
2353 if paths.is_empty() {
2354 None
2355 } else {
2356 Some((repo, paths))
2357 }
2358 });
2359 tasks.push_back(task);
2360 }
2361
2362 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2363 let mut path_was_used = vec![false; entries.len()];
2364 let tasks = tasks.collect::<Vec<_>>().await;
2365 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2366 // We always want to assign a path to it's innermost repository.
2367 for t in tasks {
2368 let Some((repo, paths)) = t else {
2369 continue;
2370 };
2371 let entry = paths_by_git_repo.entry(repo).or_default();
2372 for (repo_path, ix) in paths {
2373 if path_was_used[ix] {
2374 continue;
2375 }
2376 path_was_used[ix] = true;
2377 entry.push(repo_path);
2378 }
2379 }
2380
2381 paths_by_git_repo
2382 })
2383 }
2384}
2385
2386impl BufferGitState {
2387 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2388 Self {
2389 unstaged_diff: Default::default(),
2390 uncommitted_diff: Default::default(),
2391 recalculate_diff_task: Default::default(),
2392 language: Default::default(),
2393 language_registry: Default::default(),
2394 recalculating_tx: postage::watch::channel_with(false).0,
2395 hunk_staging_operation_count: 0,
2396 hunk_staging_operation_count_as_of_write: 0,
2397 head_text: Default::default(),
2398 index_text: Default::default(),
2399 head_changed: Default::default(),
2400 index_changed: Default::default(),
2401 language_changed: Default::default(),
2402 conflict_updated_futures: Default::default(),
2403 conflict_set: Default::default(),
2404 reparse_conflict_markers_task: Default::default(),
2405 }
2406 }
2407
2408 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2409 self.language = buffer.read(cx).language().cloned();
2410 self.language_changed = true;
2411 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2412 }
2413
2414 fn reparse_conflict_markers(
2415 &mut self,
2416 buffer: text::BufferSnapshot,
2417 cx: &mut Context<Self>,
2418 ) -> oneshot::Receiver<()> {
2419 let (tx, rx) = oneshot::channel();
2420
2421 let Some(conflict_set) = self
2422 .conflict_set
2423 .as_ref()
2424 .and_then(|conflict_set| conflict_set.upgrade())
2425 else {
2426 return rx;
2427 };
2428
2429 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2430 if conflict_set.has_conflict {
2431 Some(conflict_set.snapshot())
2432 } else {
2433 None
2434 }
2435 });
2436
2437 if let Some(old_snapshot) = old_snapshot {
2438 self.conflict_updated_futures.push(tx);
2439 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2440 let (snapshot, changed_range) = cx
2441 .background_spawn(async move {
2442 let new_snapshot = ConflictSet::parse(&buffer);
2443 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2444 (new_snapshot, changed_range)
2445 })
2446 .await;
2447 this.update(cx, |this, cx| {
2448 if let Some(conflict_set) = &this.conflict_set {
2449 conflict_set
2450 .update(cx, |conflict_set, cx| {
2451 conflict_set.set_snapshot(snapshot, changed_range, cx);
2452 })
2453 .ok();
2454 }
2455 let futures = std::mem::take(&mut this.conflict_updated_futures);
2456 for tx in futures {
2457 tx.send(()).ok();
2458 }
2459 })
2460 }))
2461 }
2462
2463 rx
2464 }
2465
2466 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2467 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2468 }
2469
2470 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2471 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2472 }
2473
2474 fn handle_base_texts_updated(
2475 &mut self,
2476 buffer: text::BufferSnapshot,
2477 message: proto::UpdateDiffBases,
2478 cx: &mut Context<Self>,
2479 ) {
2480 use proto::update_diff_bases::Mode;
2481
2482 let Some(mode) = Mode::from_i32(message.mode) else {
2483 return;
2484 };
2485
2486 let diff_bases_change = match mode {
2487 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2488 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2489 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2490 Mode::IndexAndHead => DiffBasesChange::SetEach {
2491 index: message.staged_text,
2492 head: message.committed_text,
2493 },
2494 };
2495
2496 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2497 }
2498
2499 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2500 if *self.recalculating_tx.borrow() {
2501 let mut rx = self.recalculating_tx.subscribe();
2502 Some(async move {
2503 loop {
2504 let is_recalculating = rx.recv().await;
2505 if is_recalculating != Some(true) {
2506 break;
2507 }
2508 }
2509 })
2510 } else {
2511 None
2512 }
2513 }
2514
2515 fn diff_bases_changed(
2516 &mut self,
2517 buffer: text::BufferSnapshot,
2518 diff_bases_change: Option<DiffBasesChange>,
2519 cx: &mut Context<Self>,
2520 ) {
2521 match diff_bases_change {
2522 Some(DiffBasesChange::SetIndex(index)) => {
2523 self.index_text = index.map(|mut index| {
2524 text::LineEnding::normalize(&mut index);
2525 Arc::new(index)
2526 });
2527 self.index_changed = true;
2528 }
2529 Some(DiffBasesChange::SetHead(head)) => {
2530 self.head_text = head.map(|mut head| {
2531 text::LineEnding::normalize(&mut head);
2532 Arc::new(head)
2533 });
2534 self.head_changed = true;
2535 }
2536 Some(DiffBasesChange::SetBoth(text)) => {
2537 let text = text.map(|mut text| {
2538 text::LineEnding::normalize(&mut text);
2539 Arc::new(text)
2540 });
2541 self.head_text = text.clone();
2542 self.index_text = text;
2543 self.head_changed = true;
2544 self.index_changed = true;
2545 }
2546 Some(DiffBasesChange::SetEach { index, head }) => {
2547 self.index_text = index.map(|mut index| {
2548 text::LineEnding::normalize(&mut index);
2549 Arc::new(index)
2550 });
2551 self.index_changed = true;
2552 self.head_text = head.map(|mut head| {
2553 text::LineEnding::normalize(&mut head);
2554 Arc::new(head)
2555 });
2556 self.head_changed = true;
2557 }
2558 None => {}
2559 }
2560
2561 self.recalculate_diffs(buffer, cx)
2562 }
2563
2564 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2565 *self.recalculating_tx.borrow_mut() = true;
2566
2567 let language = self.language.clone();
2568 let language_registry = self.language_registry.clone();
2569 let unstaged_diff = self.unstaged_diff();
2570 let uncommitted_diff = self.uncommitted_diff();
2571 let head = self.head_text.clone();
2572 let index = self.index_text.clone();
2573 let index_changed = self.index_changed;
2574 let head_changed = self.head_changed;
2575 let language_changed = self.language_changed;
2576 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2577 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2578 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2579 (None, None) => true,
2580 _ => false,
2581 };
2582 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2583 log::debug!(
2584 "start recalculating diffs for buffer {}",
2585 buffer.remote_id()
2586 );
2587
2588 let mut new_unstaged_diff = None;
2589 if let Some(unstaged_diff) = &unstaged_diff {
2590 new_unstaged_diff = Some(
2591 BufferDiff::update_diff(
2592 unstaged_diff.clone(),
2593 buffer.clone(),
2594 index,
2595 index_changed,
2596 language_changed,
2597 language.clone(),
2598 language_registry.clone(),
2599 cx,
2600 )
2601 .await?,
2602 );
2603 }
2604
2605 let mut new_uncommitted_diff = None;
2606 if let Some(uncommitted_diff) = &uncommitted_diff {
2607 new_uncommitted_diff = if index_matches_head {
2608 new_unstaged_diff.clone()
2609 } else {
2610 Some(
2611 BufferDiff::update_diff(
2612 uncommitted_diff.clone(),
2613 buffer.clone(),
2614 head,
2615 head_changed,
2616 language_changed,
2617 language.clone(),
2618 language_registry.clone(),
2619 cx,
2620 )
2621 .await?,
2622 )
2623 }
2624 }
2625
2626 let cancel = this.update(cx, |this, _| {
2627 // This checks whether all pending stage/unstage operations
2628 // have quiesced (i.e. both the corresponding write and the
2629 // read of that write have completed). If not, then we cancel
2630 // this recalculation attempt to avoid invalidating pending
2631 // state too quickly; another recalculation will come along
2632 // later and clear the pending state once the state of the index has settled.
2633 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2634 *this.recalculating_tx.borrow_mut() = false;
2635 true
2636 } else {
2637 false
2638 }
2639 })?;
2640 if cancel {
2641 log::debug!(
2642 concat!(
2643 "aborting recalculating diffs for buffer {}",
2644 "due to subsequent hunk operations",
2645 ),
2646 buffer.remote_id()
2647 );
2648 return Ok(());
2649 }
2650
2651 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2652 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2653 {
2654 unstaged_diff.update(cx, |diff, cx| {
2655 if language_changed {
2656 diff.language_changed(cx);
2657 }
2658 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2659 })?
2660 } else {
2661 None
2662 };
2663
2664 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2665 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2666 {
2667 uncommitted_diff.update(cx, |diff, cx| {
2668 if language_changed {
2669 diff.language_changed(cx);
2670 }
2671 diff.set_snapshot_with_secondary(
2672 new_uncommitted_diff,
2673 &buffer,
2674 unstaged_changed_range,
2675 true,
2676 cx,
2677 );
2678 })?;
2679 }
2680
2681 log::debug!(
2682 "finished recalculating diffs for buffer {}",
2683 buffer.remote_id()
2684 );
2685
2686 if let Some(this) = this.upgrade() {
2687 this.update(cx, |this, _| {
2688 this.index_changed = false;
2689 this.head_changed = false;
2690 this.language_changed = false;
2691 *this.recalculating_tx.borrow_mut() = false;
2692 })?;
2693 }
2694
2695 Ok(())
2696 }));
2697 }
2698}
2699
2700fn make_remote_delegate(
2701 this: Entity<GitStore>,
2702 project_id: u64,
2703 repository_id: RepositoryId,
2704 askpass_id: u64,
2705 cx: &mut AsyncApp,
2706) -> AskPassDelegate {
2707 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2708 this.update(cx, |this, cx| {
2709 let Some((client, _)) = this.downstream_client() else {
2710 return;
2711 };
2712 let response = client.request(proto::AskPassRequest {
2713 project_id,
2714 repository_id: repository_id.to_proto(),
2715 askpass_id,
2716 prompt,
2717 });
2718 cx.spawn(async move |_, _| {
2719 tx.send(response.await?.response).ok();
2720 anyhow::Ok(())
2721 })
2722 .detach_and_log_err(cx);
2723 })
2724 .log_err();
2725 })
2726}
2727
2728impl RepositoryId {
2729 pub fn to_proto(self) -> u64 {
2730 self.0
2731 }
2732
2733 pub fn from_proto(id: u64) -> Self {
2734 RepositoryId(id)
2735 }
2736}
2737
2738impl RepositorySnapshot {
2739 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2740 Self {
2741 id,
2742 statuses_by_path: Default::default(),
2743 work_directory_abs_path,
2744 branch: None,
2745 head_commit: None,
2746 scan_id: 0,
2747 merge: Default::default(),
2748 remote_origin_url: None,
2749 remote_upstream_url: None,
2750 }
2751 }
2752
2753 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2754 proto::UpdateRepository {
2755 branch_summary: self.branch.as_ref().map(branch_to_proto),
2756 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2757 updated_statuses: self
2758 .statuses_by_path
2759 .iter()
2760 .map(|entry| entry.to_proto())
2761 .collect(),
2762 removed_statuses: Default::default(),
2763 current_merge_conflicts: self
2764 .merge
2765 .conflicted_paths
2766 .iter()
2767 .map(|repo_path| repo_path.to_proto())
2768 .collect(),
2769 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2770 project_id,
2771 id: self.id.to_proto(),
2772 abs_path: self.work_directory_abs_path.to_proto(),
2773 entry_ids: vec![self.id.to_proto()],
2774 scan_id: self.scan_id,
2775 is_last_update: true,
2776 }
2777 }
2778
2779 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2780 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2781 let mut removed_statuses: Vec<String> = Vec::new();
2782
2783 let mut new_statuses = self.statuses_by_path.iter().peekable();
2784 let mut old_statuses = old.statuses_by_path.iter().peekable();
2785
2786 let mut current_new_entry = new_statuses.next();
2787 let mut current_old_entry = old_statuses.next();
2788 loop {
2789 match (current_new_entry, current_old_entry) {
2790 (Some(new_entry), Some(old_entry)) => {
2791 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2792 Ordering::Less => {
2793 updated_statuses.push(new_entry.to_proto());
2794 current_new_entry = new_statuses.next();
2795 }
2796 Ordering::Equal => {
2797 if new_entry.status != old_entry.status {
2798 updated_statuses.push(new_entry.to_proto());
2799 }
2800 current_old_entry = old_statuses.next();
2801 current_new_entry = new_statuses.next();
2802 }
2803 Ordering::Greater => {
2804 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2805 current_old_entry = old_statuses.next();
2806 }
2807 }
2808 }
2809 (None, Some(old_entry)) => {
2810 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2811 current_old_entry = old_statuses.next();
2812 }
2813 (Some(new_entry), None) => {
2814 updated_statuses.push(new_entry.to_proto());
2815 current_new_entry = new_statuses.next();
2816 }
2817 (None, None) => break,
2818 }
2819 }
2820
2821 proto::UpdateRepository {
2822 branch_summary: self.branch.as_ref().map(branch_to_proto),
2823 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2824 updated_statuses,
2825 removed_statuses,
2826 current_merge_conflicts: self
2827 .merge
2828 .conflicted_paths
2829 .iter()
2830 .map(|path| path.as_ref().to_proto())
2831 .collect(),
2832 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2833 project_id,
2834 id: self.id.to_proto(),
2835 abs_path: self.work_directory_abs_path.to_proto(),
2836 entry_ids: vec![],
2837 scan_id: self.scan_id,
2838 is_last_update: true,
2839 }
2840 }
2841
2842 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2843 self.statuses_by_path.iter().cloned()
2844 }
2845
2846 pub fn status_summary(&self) -> GitSummary {
2847 self.statuses_by_path.summary().item_summary
2848 }
2849
2850 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2851 self.statuses_by_path
2852 .get(&PathKey(path.0.clone()), &())
2853 .cloned()
2854 }
2855
2856 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2857 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2858 }
2859
2860 #[inline]
2861 fn abs_path_to_repo_path_inner(
2862 work_directory_abs_path: &Path,
2863 abs_path: &Path,
2864 ) -> Option<RepoPath> {
2865 abs_path
2866 .strip_prefix(&work_directory_abs_path)
2867 .map(RepoPath::from)
2868 .ok()
2869 }
2870
2871 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2872 self.merge.conflicted_paths.contains(repo_path)
2873 }
2874
2875 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2876 let had_conflict_on_last_merge_head_change =
2877 self.merge.conflicted_paths.contains(repo_path);
2878 let has_conflict_currently = self
2879 .status_for_path(repo_path)
2880 .is_some_and(|entry| entry.status.is_conflicted());
2881 had_conflict_on_last_merge_head_change || has_conflict_currently
2882 }
2883
2884 /// This is the name that will be displayed in the repository selector for this repository.
2885 pub fn display_name(&self) -> SharedString {
2886 self.work_directory_abs_path
2887 .file_name()
2888 .unwrap_or_default()
2889 .to_string_lossy()
2890 .to_string()
2891 .into()
2892 }
2893}
2894
2895impl MergeDetails {
2896 async fn load(
2897 backend: &Arc<dyn GitRepository>,
2898 status: &SumTree<StatusEntry>,
2899 prev_snapshot: &RepositorySnapshot,
2900 ) -> Result<(MergeDetails, bool)> {
2901 log::debug!("load merge details");
2902 let message = backend.merge_message().await;
2903 let heads = backend
2904 .revparse_batch(vec![
2905 "MERGE_HEAD".into(),
2906 "CHERRY_PICK_HEAD".into(),
2907 "REBASE_HEAD".into(),
2908 "REVERT_HEAD".into(),
2909 "APPLY_HEAD".into(),
2910 ])
2911 .await
2912 .log_err()
2913 .unwrap_or_default()
2914 .into_iter()
2915 .map(|opt| opt.map(SharedString::from))
2916 .collect::<Vec<_>>();
2917 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2918 let conflicted_paths = if merge_heads_changed {
2919 let current_conflicted_paths = TreeSet::from_ordered_entries(
2920 status
2921 .iter()
2922 .filter(|entry| entry.status.is_conflicted())
2923 .map(|entry| entry.repo_path.clone()),
2924 );
2925
2926 // It can happen that we run a scan while a lengthy merge is in progress
2927 // that will eventually result in conflicts, but before those conflicts
2928 // are reported by `git status`. Since for the moment we only care about
2929 // the merge heads state for the purposes of tracking conflicts, don't update
2930 // this state until we see some conflicts.
2931 if heads.iter().any(Option::is_some)
2932 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2933 && current_conflicted_paths.is_empty()
2934 {
2935 log::debug!("not updating merge heads because no conflicts found");
2936 return Ok((
2937 MergeDetails {
2938 message: message.map(SharedString::from),
2939 ..prev_snapshot.merge.clone()
2940 },
2941 false,
2942 ));
2943 }
2944
2945 current_conflicted_paths
2946 } else {
2947 prev_snapshot.merge.conflicted_paths.clone()
2948 };
2949 let details = MergeDetails {
2950 conflicted_paths,
2951 message: message.map(SharedString::from),
2952 heads,
2953 };
2954 Ok((details, merge_heads_changed))
2955 }
2956}
2957
2958impl Repository {
2959 pub fn snapshot(&self) -> RepositorySnapshot {
2960 self.snapshot.clone()
2961 }
2962
2963 fn local(
2964 id: RepositoryId,
2965 work_directory_abs_path: Arc<Path>,
2966 dot_git_abs_path: Arc<Path>,
2967 repository_dir_abs_path: Arc<Path>,
2968 common_dir_abs_path: Arc<Path>,
2969 project_environment: WeakEntity<ProjectEnvironment>,
2970 fs: Arc<dyn Fs>,
2971 git_store: WeakEntity<GitStore>,
2972 cx: &mut Context<Self>,
2973 ) -> Self {
2974 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2975 Repository {
2976 this: cx.weak_entity(),
2977 git_store,
2978 snapshot,
2979 commit_message_buffer: None,
2980 askpass_delegates: Default::default(),
2981 paths_needing_status_update: Default::default(),
2982 latest_askpass_id: 0,
2983 job_sender: Repository::spawn_local_git_worker(
2984 work_directory_abs_path,
2985 dot_git_abs_path,
2986 repository_dir_abs_path,
2987 common_dir_abs_path,
2988 project_environment,
2989 fs,
2990 cx,
2991 ),
2992 job_id: 0,
2993 active_jobs: Default::default(),
2994 }
2995 }
2996
2997 fn remote(
2998 id: RepositoryId,
2999 work_directory_abs_path: Arc<Path>,
3000 project_id: ProjectId,
3001 client: AnyProtoClient,
3002 git_store: WeakEntity<GitStore>,
3003 cx: &mut Context<Self>,
3004 ) -> Self {
3005 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
3006 Self {
3007 this: cx.weak_entity(),
3008 snapshot,
3009 commit_message_buffer: None,
3010 git_store,
3011 paths_needing_status_update: Default::default(),
3012 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3013 askpass_delegates: Default::default(),
3014 latest_askpass_id: 0,
3015 active_jobs: Default::default(),
3016 job_id: 0,
3017 }
3018 }
3019
3020 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3021 self.git_store.upgrade()
3022 }
3023
3024 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3025 let this = cx.weak_entity();
3026 let git_store = self.git_store.clone();
3027 let _ = self.send_keyed_job(
3028 Some(GitJobKey::ReloadBufferDiffBases),
3029 None,
3030 |state, mut cx| async move {
3031 let RepositoryState::Local { backend, .. } = state else {
3032 log::error!("tried to recompute diffs for a non-local repository");
3033 return Ok(());
3034 };
3035
3036 let Some(this) = this.upgrade() else {
3037 return Ok(());
3038 };
3039
3040 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3041 git_store.update(cx, |git_store, cx| {
3042 git_store
3043 .diffs
3044 .iter()
3045 .filter_map(|(buffer_id, diff_state)| {
3046 let buffer_store = git_store.buffer_store.read(cx);
3047 let buffer = buffer_store.get(*buffer_id)?;
3048 let file = File::from_dyn(buffer.read(cx).file())?;
3049 let abs_path =
3050 file.worktree.read(cx).absolutize(&file.path).ok()?;
3051 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3052 log::debug!(
3053 "start reload diff bases for repo path {}",
3054 repo_path.0.display()
3055 );
3056 diff_state.update(cx, |diff_state, _| {
3057 let has_unstaged_diff = diff_state
3058 .unstaged_diff
3059 .as_ref()
3060 .is_some_and(|diff| diff.is_upgradable());
3061 let has_uncommitted_diff = diff_state
3062 .uncommitted_diff
3063 .as_ref()
3064 .is_some_and(|set| set.is_upgradable());
3065
3066 Some((
3067 buffer,
3068 repo_path,
3069 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3070 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3071 ))
3072 })
3073 })
3074 .collect::<Vec<_>>()
3075 })
3076 })??;
3077
3078 let buffer_diff_base_changes = cx
3079 .background_spawn(async move {
3080 let mut changes = Vec::new();
3081 for (buffer, repo_path, current_index_text, current_head_text) in
3082 &repo_diff_state_updates
3083 {
3084 let index_text = if current_index_text.is_some() {
3085 backend.load_index_text(repo_path.clone()).await
3086 } else {
3087 None
3088 };
3089 let head_text = if current_head_text.is_some() {
3090 backend.load_committed_text(repo_path.clone()).await
3091 } else {
3092 None
3093 };
3094
3095 let change =
3096 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3097 (Some(current_index), Some(current_head)) => {
3098 let index_changed =
3099 index_text.as_ref() != current_index.as_deref();
3100 let head_changed =
3101 head_text.as_ref() != current_head.as_deref();
3102 if index_changed && head_changed {
3103 if index_text == head_text {
3104 Some(DiffBasesChange::SetBoth(head_text))
3105 } else {
3106 Some(DiffBasesChange::SetEach {
3107 index: index_text,
3108 head: head_text,
3109 })
3110 }
3111 } else if index_changed {
3112 Some(DiffBasesChange::SetIndex(index_text))
3113 } else if head_changed {
3114 Some(DiffBasesChange::SetHead(head_text))
3115 } else {
3116 None
3117 }
3118 }
3119 (Some(current_index), None) => {
3120 let index_changed =
3121 index_text.as_ref() != current_index.as_deref();
3122 index_changed
3123 .then_some(DiffBasesChange::SetIndex(index_text))
3124 }
3125 (None, Some(current_head)) => {
3126 let head_changed =
3127 head_text.as_ref() != current_head.as_deref();
3128 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3129 }
3130 (None, None) => None,
3131 };
3132
3133 changes.push((buffer.clone(), change))
3134 }
3135 changes
3136 })
3137 .await;
3138
3139 git_store.update(&mut cx, |git_store, cx| {
3140 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3141 let buffer_snapshot = buffer.read(cx).text_snapshot();
3142 let buffer_id = buffer_snapshot.remote_id();
3143 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3144 continue;
3145 };
3146
3147 let downstream_client = git_store.downstream_client();
3148 diff_state.update(cx, |diff_state, cx| {
3149 use proto::update_diff_bases::Mode;
3150
3151 if let Some((diff_bases_change, (client, project_id))) =
3152 diff_bases_change.clone().zip(downstream_client)
3153 {
3154 let (staged_text, committed_text, mode) = match diff_bases_change {
3155 DiffBasesChange::SetIndex(index) => {
3156 (index, None, Mode::IndexOnly)
3157 }
3158 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3159 DiffBasesChange::SetEach { index, head } => {
3160 (index, head, Mode::IndexAndHead)
3161 }
3162 DiffBasesChange::SetBoth(text) => {
3163 (None, text, Mode::IndexMatchesHead)
3164 }
3165 };
3166 client
3167 .send(proto::UpdateDiffBases {
3168 project_id: project_id.to_proto(),
3169 buffer_id: buffer_id.to_proto(),
3170 staged_text,
3171 committed_text,
3172 mode: mode as i32,
3173 })
3174 .log_err();
3175 }
3176
3177 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3178 });
3179 }
3180 })
3181 },
3182 );
3183 }
3184
3185 pub fn send_job<F, Fut, R>(
3186 &mut self,
3187 status: Option<SharedString>,
3188 job: F,
3189 ) -> oneshot::Receiver<R>
3190 where
3191 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3192 Fut: Future<Output = R> + 'static,
3193 R: Send + 'static,
3194 {
3195 self.send_keyed_job(None, status, job)
3196 }
3197
3198 fn send_keyed_job<F, Fut, R>(
3199 &mut self,
3200 key: Option<GitJobKey>,
3201 status: Option<SharedString>,
3202 job: F,
3203 ) -> oneshot::Receiver<R>
3204 where
3205 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3206 Fut: Future<Output = R> + 'static,
3207 R: Send + 'static,
3208 {
3209 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3210 let job_id = post_inc(&mut self.job_id);
3211 let this = self.this.clone();
3212 self.job_sender
3213 .unbounded_send(GitJob {
3214 key,
3215 job: Box::new(move |state, cx: &mut AsyncApp| {
3216 let job = job(state, cx.clone());
3217 cx.spawn(async move |cx| {
3218 if let Some(s) = status.clone() {
3219 this.update(cx, |this, cx| {
3220 this.active_jobs.insert(
3221 job_id,
3222 JobInfo {
3223 start: Instant::now(),
3224 message: s.clone(),
3225 },
3226 );
3227
3228 cx.notify();
3229 })
3230 .ok();
3231 }
3232 let result = job.await;
3233
3234 this.update(cx, |this, cx| {
3235 this.active_jobs.remove(&job_id);
3236 cx.notify();
3237 })
3238 .ok();
3239
3240 result_tx.send(result).ok();
3241 })
3242 }),
3243 })
3244 .ok();
3245 result_rx
3246 }
3247
3248 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3249 let Some(git_store) = self.git_store.upgrade() else {
3250 return;
3251 };
3252 let entity = cx.entity();
3253 git_store.update(cx, |git_store, cx| {
3254 let Some((&id, _)) = git_store
3255 .repositories
3256 .iter()
3257 .find(|(_, handle)| *handle == &entity)
3258 else {
3259 return;
3260 };
3261 git_store.active_repo_id = Some(id);
3262 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3263 });
3264 }
3265
3266 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3267 self.snapshot.status()
3268 }
3269
3270 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3271 let git_store = self.git_store.upgrade()?;
3272 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3273 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3274 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3275 Some(ProjectPath {
3276 worktree_id: worktree.read(cx).id(),
3277 path: relative_path.into(),
3278 })
3279 }
3280
3281 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3282 let git_store = self.git_store.upgrade()?;
3283 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3284 let abs_path = worktree_store.absolutize(path, cx)?;
3285 self.snapshot.abs_path_to_repo_path(&abs_path)
3286 }
3287
3288 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3289 other
3290 .read(cx)
3291 .snapshot
3292 .work_directory_abs_path
3293 .starts_with(&self.snapshot.work_directory_abs_path)
3294 }
3295
3296 pub fn open_commit_buffer(
3297 &mut self,
3298 languages: Option<Arc<LanguageRegistry>>,
3299 buffer_store: Entity<BufferStore>,
3300 cx: &mut Context<Self>,
3301 ) -> Task<Result<Entity<Buffer>>> {
3302 let id = self.id;
3303 if let Some(buffer) = self.commit_message_buffer.clone() {
3304 return Task::ready(Ok(buffer));
3305 }
3306 let this = cx.weak_entity();
3307
3308 let rx = self.send_job(None, move |state, mut cx| async move {
3309 let Some(this) = this.upgrade() else {
3310 bail!("git store was dropped");
3311 };
3312 match state {
3313 RepositoryState::Local { .. } => {
3314 this.update(&mut cx, |_, cx| {
3315 Self::open_local_commit_buffer(languages, buffer_store, cx)
3316 })?
3317 .await
3318 }
3319 RepositoryState::Remote { project_id, client } => {
3320 let request = client.request(proto::OpenCommitMessageBuffer {
3321 project_id: project_id.0,
3322 repository_id: id.to_proto(),
3323 });
3324 let response = request.await.context("requesting to open commit buffer")?;
3325 let buffer_id = BufferId::new(response.buffer_id)?;
3326 let buffer = buffer_store
3327 .update(&mut cx, |buffer_store, cx| {
3328 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3329 })?
3330 .await?;
3331 if let Some(language_registry) = languages {
3332 let git_commit_language =
3333 language_registry.language_for_name("Git Commit").await?;
3334 buffer.update(&mut cx, |buffer, cx| {
3335 buffer.set_language(Some(git_commit_language), cx);
3336 })?;
3337 }
3338 this.update(&mut cx, |this, _| {
3339 this.commit_message_buffer = Some(buffer.clone());
3340 })?;
3341 Ok(buffer)
3342 }
3343 }
3344 });
3345
3346 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3347 }
3348
3349 fn open_local_commit_buffer(
3350 language_registry: Option<Arc<LanguageRegistry>>,
3351 buffer_store: Entity<BufferStore>,
3352 cx: &mut Context<Self>,
3353 ) -> Task<Result<Entity<Buffer>>> {
3354 cx.spawn(async move |repository, cx| {
3355 let buffer = buffer_store
3356 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
3357 .await?;
3358
3359 if let Some(language_registry) = language_registry {
3360 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3361 buffer.update(cx, |buffer, cx| {
3362 buffer.set_language(Some(git_commit_language), cx);
3363 })?;
3364 }
3365
3366 repository.update(cx, |repository, _| {
3367 repository.commit_message_buffer = Some(buffer.clone());
3368 })?;
3369 Ok(buffer)
3370 })
3371 }
3372
3373 pub fn checkout_files(
3374 &mut self,
3375 commit: &str,
3376 paths: Vec<RepoPath>,
3377 _cx: &mut App,
3378 ) -> oneshot::Receiver<Result<()>> {
3379 let commit = commit.to_string();
3380 let id = self.id;
3381
3382 self.send_job(
3383 Some(format!("git checkout {}", commit).into()),
3384 move |git_repo, _| async move {
3385 match git_repo {
3386 RepositoryState::Local {
3387 backend,
3388 environment,
3389 ..
3390 } => {
3391 backend
3392 .checkout_files(commit, paths, environment.clone())
3393 .await
3394 }
3395 RepositoryState::Remote { project_id, client } => {
3396 client
3397 .request(proto::GitCheckoutFiles {
3398 project_id: project_id.0,
3399 repository_id: id.to_proto(),
3400 commit,
3401 paths: paths
3402 .into_iter()
3403 .map(|p| p.to_string_lossy().to_string())
3404 .collect(),
3405 })
3406 .await?;
3407
3408 Ok(())
3409 }
3410 }
3411 },
3412 )
3413 }
3414
3415 pub fn reset(
3416 &mut self,
3417 commit: String,
3418 reset_mode: ResetMode,
3419 _cx: &mut App,
3420 ) -> oneshot::Receiver<Result<()>> {
3421 let commit = commit.to_string();
3422 let id = self.id;
3423
3424 self.send_job(None, move |git_repo, _| async move {
3425 match git_repo {
3426 RepositoryState::Local {
3427 backend,
3428 environment,
3429 ..
3430 } => backend.reset(commit, reset_mode, environment).await,
3431 RepositoryState::Remote { project_id, client } => {
3432 client
3433 .request(proto::GitReset {
3434 project_id: project_id.0,
3435 repository_id: id.to_proto(),
3436 commit,
3437 mode: match reset_mode {
3438 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3439 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3440 },
3441 })
3442 .await?;
3443
3444 Ok(())
3445 }
3446 }
3447 })
3448 }
3449
3450 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3451 let id = self.id;
3452 self.send_job(None, move |git_repo, _cx| async move {
3453 match git_repo {
3454 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3455 RepositoryState::Remote { project_id, client } => {
3456 let resp = client
3457 .request(proto::GitShow {
3458 project_id: project_id.0,
3459 repository_id: id.to_proto(),
3460 commit,
3461 })
3462 .await?;
3463
3464 Ok(CommitDetails {
3465 sha: resp.sha.into(),
3466 message: resp.message.into(),
3467 commit_timestamp: resp.commit_timestamp,
3468 author_email: resp.author_email.into(),
3469 author_name: resp.author_name.into(),
3470 })
3471 }
3472 }
3473 })
3474 }
3475
3476 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3477 let id = self.id;
3478 self.send_job(None, move |git_repo, cx| async move {
3479 match git_repo {
3480 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3481 RepositoryState::Remote {
3482 client, project_id, ..
3483 } => {
3484 let response = client
3485 .request(proto::LoadCommitDiff {
3486 project_id: project_id.0,
3487 repository_id: id.to_proto(),
3488 commit,
3489 })
3490 .await?;
3491 Ok(CommitDiff {
3492 files: response
3493 .files
3494 .into_iter()
3495 .map(|file| CommitFile {
3496 path: Path::new(&file.path).into(),
3497 old_text: file.old_text,
3498 new_text: file.new_text,
3499 })
3500 .collect(),
3501 })
3502 }
3503 }
3504 })
3505 }
3506
3507 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3508 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3509 }
3510
3511 pub fn stage_entries(
3512 &self,
3513 entries: Vec<RepoPath>,
3514 cx: &mut Context<Self>,
3515 ) -> Task<anyhow::Result<()>> {
3516 if entries.is_empty() {
3517 return Task::ready(Ok(()));
3518 }
3519 let id = self.id;
3520
3521 let mut save_futures = Vec::new();
3522 if let Some(buffer_store) = self.buffer_store(cx) {
3523 buffer_store.update(cx, |buffer_store, cx| {
3524 for path in &entries {
3525 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3526 continue;
3527 };
3528 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3529 && buffer
3530 .read(cx)
3531 .file()
3532 .is_some_and(|file| file.disk_state().exists())
3533 {
3534 save_futures.push(buffer_store.save_buffer(buffer, cx));
3535 }
3536 }
3537 })
3538 }
3539
3540 cx.spawn(async move |this, cx| {
3541 for save_future in save_futures {
3542 save_future.await?;
3543 }
3544
3545 this.update(cx, |this, _| {
3546 this.send_job(None, move |git_repo, _cx| async move {
3547 match git_repo {
3548 RepositoryState::Local {
3549 backend,
3550 environment,
3551 ..
3552 } => backend.stage_paths(entries, environment.clone()).await,
3553 RepositoryState::Remote { project_id, client } => {
3554 client
3555 .request(proto::Stage {
3556 project_id: project_id.0,
3557 repository_id: id.to_proto(),
3558 paths: entries
3559 .into_iter()
3560 .map(|repo_path| repo_path.as_ref().to_proto())
3561 .collect(),
3562 })
3563 .await
3564 .context("sending stage request")?;
3565
3566 Ok(())
3567 }
3568 }
3569 })
3570 })?
3571 .await??;
3572
3573 Ok(())
3574 })
3575 }
3576
3577 pub fn unstage_entries(
3578 &self,
3579 entries: Vec<RepoPath>,
3580 cx: &mut Context<Self>,
3581 ) -> Task<anyhow::Result<()>> {
3582 if entries.is_empty() {
3583 return Task::ready(Ok(()));
3584 }
3585 let id = self.id;
3586
3587 let mut save_futures = Vec::new();
3588 if let Some(buffer_store) = self.buffer_store(cx) {
3589 buffer_store.update(cx, |buffer_store, cx| {
3590 for path in &entries {
3591 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3592 continue;
3593 };
3594 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3595 && buffer
3596 .read(cx)
3597 .file()
3598 .is_some_and(|file| file.disk_state().exists())
3599 {
3600 save_futures.push(buffer_store.save_buffer(buffer, cx));
3601 }
3602 }
3603 })
3604 }
3605
3606 cx.spawn(async move |this, cx| {
3607 for save_future in save_futures {
3608 save_future.await?;
3609 }
3610
3611 this.update(cx, |this, _| {
3612 this.send_job(None, move |git_repo, _cx| async move {
3613 match git_repo {
3614 RepositoryState::Local {
3615 backend,
3616 environment,
3617 ..
3618 } => backend.unstage_paths(entries, environment).await,
3619 RepositoryState::Remote { project_id, client } => {
3620 client
3621 .request(proto::Unstage {
3622 project_id: project_id.0,
3623 repository_id: id.to_proto(),
3624 paths: entries
3625 .into_iter()
3626 .map(|repo_path| repo_path.as_ref().to_proto())
3627 .collect(),
3628 })
3629 .await
3630 .context("sending unstage request")?;
3631
3632 Ok(())
3633 }
3634 }
3635 })
3636 })?
3637 .await??;
3638
3639 Ok(())
3640 })
3641 }
3642
3643 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3644 let to_stage = self
3645 .cached_status()
3646 .filter(|entry| !entry.status.staging().is_fully_staged())
3647 .map(|entry| entry.repo_path.clone())
3648 .collect();
3649 self.stage_entries(to_stage, cx)
3650 }
3651
3652 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3653 let to_unstage = self
3654 .cached_status()
3655 .filter(|entry| entry.status.staging().has_staged())
3656 .map(|entry| entry.repo_path.clone())
3657 .collect();
3658 self.unstage_entries(to_unstage, cx)
3659 }
3660
3661 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3662 let to_stash = self
3663 .cached_status()
3664 .map(|entry| entry.repo_path.clone())
3665 .collect();
3666
3667 self.stash_entries(to_stash, cx)
3668 }
3669
3670 pub fn stash_entries(
3671 &mut self,
3672 entries: Vec<RepoPath>,
3673 cx: &mut Context<Self>,
3674 ) -> Task<anyhow::Result<()>> {
3675 let id = self.id;
3676
3677 cx.spawn(async move |this, cx| {
3678 this.update(cx, |this, _| {
3679 this.send_job(None, move |git_repo, _cx| async move {
3680 match git_repo {
3681 RepositoryState::Local {
3682 backend,
3683 environment,
3684 ..
3685 } => backend.stash_paths(entries, environment).await,
3686 RepositoryState::Remote { project_id, client } => {
3687 client
3688 .request(proto::Stash {
3689 project_id: project_id.0,
3690 repository_id: id.to_proto(),
3691 paths: entries
3692 .into_iter()
3693 .map(|repo_path| repo_path.as_ref().to_proto())
3694 .collect(),
3695 })
3696 .await
3697 .context("sending stash request")?;
3698 Ok(())
3699 }
3700 }
3701 })
3702 })?
3703 .await??;
3704 Ok(())
3705 })
3706 }
3707
3708 pub fn stash_pop(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3709 let id = self.id;
3710 cx.spawn(async move |this, cx| {
3711 this.update(cx, |this, _| {
3712 this.send_job(None, move |git_repo, _cx| async move {
3713 match git_repo {
3714 RepositoryState::Local {
3715 backend,
3716 environment,
3717 ..
3718 } => backend.stash_pop(environment).await,
3719 RepositoryState::Remote { project_id, client } => {
3720 client
3721 .request(proto::StashPop {
3722 project_id: project_id.0,
3723 repository_id: id.to_proto(),
3724 })
3725 .await
3726 .context("sending stash pop request")?;
3727 Ok(())
3728 }
3729 }
3730 })
3731 })?
3732 .await??;
3733 Ok(())
3734 })
3735 }
3736
3737 pub fn commit(
3738 &mut self,
3739 message: SharedString,
3740 name_and_email: Option<(SharedString, SharedString)>,
3741 options: CommitOptions,
3742 _cx: &mut App,
3743 ) -> oneshot::Receiver<Result<()>> {
3744 let id = self.id;
3745
3746 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3747 match git_repo {
3748 RepositoryState::Local {
3749 backend,
3750 environment,
3751 ..
3752 } => {
3753 backend
3754 .commit(message, name_and_email, options, environment)
3755 .await
3756 }
3757 RepositoryState::Remote { project_id, client } => {
3758 let (name, email) = name_and_email.unzip();
3759 client
3760 .request(proto::Commit {
3761 project_id: project_id.0,
3762 repository_id: id.to_proto(),
3763 message: String::from(message),
3764 name: name.map(String::from),
3765 email: email.map(String::from),
3766 options: Some(proto::commit::CommitOptions {
3767 amend: options.amend,
3768 signoff: options.signoff,
3769 }),
3770 })
3771 .await
3772 .context("sending commit request")?;
3773
3774 Ok(())
3775 }
3776 }
3777 })
3778 }
3779
3780 pub fn fetch(
3781 &mut self,
3782 fetch_options: FetchOptions,
3783 askpass: AskPassDelegate,
3784 _cx: &mut App,
3785 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3786 let askpass_delegates = self.askpass_delegates.clone();
3787 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3788 let id = self.id;
3789
3790 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3791 match git_repo {
3792 RepositoryState::Local {
3793 backend,
3794 environment,
3795 ..
3796 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3797 RepositoryState::Remote { project_id, client } => {
3798 askpass_delegates.lock().insert(askpass_id, askpass);
3799 let _defer = util::defer(|| {
3800 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3801 debug_assert!(askpass_delegate.is_some());
3802 });
3803
3804 let response = client
3805 .request(proto::Fetch {
3806 project_id: project_id.0,
3807 repository_id: id.to_proto(),
3808 askpass_id,
3809 remote: fetch_options.to_proto(),
3810 })
3811 .await
3812 .context("sending fetch request")?;
3813
3814 Ok(RemoteCommandOutput {
3815 stdout: response.stdout,
3816 stderr: response.stderr,
3817 })
3818 }
3819 }
3820 })
3821 }
3822
3823 pub fn push(
3824 &mut self,
3825 branch: SharedString,
3826 remote: SharedString,
3827 options: Option<PushOptions>,
3828 askpass: AskPassDelegate,
3829 cx: &mut Context<Self>,
3830 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3831 let askpass_delegates = self.askpass_delegates.clone();
3832 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3833 let id = self.id;
3834
3835 let args = options
3836 .map(|option| match option {
3837 PushOptions::SetUpstream => " --set-upstream",
3838 PushOptions::Force => " --force-with-lease",
3839 })
3840 .unwrap_or("");
3841
3842 let updates_tx = self
3843 .git_store()
3844 .and_then(|git_store| match &git_store.read(cx).state {
3845 GitStoreState::Local { downstream, .. } => downstream
3846 .as_ref()
3847 .map(|downstream| downstream.updates_tx.clone()),
3848 _ => None,
3849 });
3850
3851 let this = cx.weak_entity();
3852 self.send_job(
3853 Some(format!("git push {} {} {}", args, branch, remote).into()),
3854 move |git_repo, mut cx| async move {
3855 match git_repo {
3856 RepositoryState::Local {
3857 backend,
3858 environment,
3859 ..
3860 } => {
3861 let result = backend
3862 .push(
3863 branch.to_string(),
3864 remote.to_string(),
3865 options,
3866 askpass,
3867 environment.clone(),
3868 cx.clone(),
3869 )
3870 .await;
3871 if result.is_ok() {
3872 let branches = backend.branches().await?;
3873 let branch = branches.into_iter().find(|branch| branch.is_head);
3874 log::info!("head branch after scan is {branch:?}");
3875 let snapshot = this.update(&mut cx, |this, cx| {
3876 this.snapshot.branch = branch;
3877 let snapshot = this.snapshot.clone();
3878 cx.emit(RepositoryEvent::Updated {
3879 full_scan: false,
3880 new_instance: false,
3881 });
3882 snapshot
3883 })?;
3884 if let Some(updates_tx) = updates_tx {
3885 updates_tx
3886 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3887 .ok();
3888 }
3889 }
3890 result
3891 }
3892 RepositoryState::Remote { project_id, client } => {
3893 askpass_delegates.lock().insert(askpass_id, askpass);
3894 let _defer = util::defer(|| {
3895 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3896 debug_assert!(askpass_delegate.is_some());
3897 });
3898 let response = client
3899 .request(proto::Push {
3900 project_id: project_id.0,
3901 repository_id: id.to_proto(),
3902 askpass_id,
3903 branch_name: branch.to_string(),
3904 remote_name: remote.to_string(),
3905 options: options.map(|options| match options {
3906 PushOptions::Force => proto::push::PushOptions::Force,
3907 PushOptions::SetUpstream => {
3908 proto::push::PushOptions::SetUpstream
3909 }
3910 }
3911 as i32),
3912 })
3913 .await
3914 .context("sending push request")?;
3915
3916 Ok(RemoteCommandOutput {
3917 stdout: response.stdout,
3918 stderr: response.stderr,
3919 })
3920 }
3921 }
3922 },
3923 )
3924 }
3925
3926 pub fn pull(
3927 &mut self,
3928 branch: SharedString,
3929 remote: SharedString,
3930 askpass: AskPassDelegate,
3931 _cx: &mut App,
3932 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3933 let askpass_delegates = self.askpass_delegates.clone();
3934 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3935 let id = self.id;
3936
3937 self.send_job(
3938 Some(format!("git pull {} {}", remote, branch).into()),
3939 move |git_repo, cx| async move {
3940 match git_repo {
3941 RepositoryState::Local {
3942 backend,
3943 environment,
3944 ..
3945 } => {
3946 backend
3947 .pull(
3948 branch.to_string(),
3949 remote.to_string(),
3950 askpass,
3951 environment.clone(),
3952 cx,
3953 )
3954 .await
3955 }
3956 RepositoryState::Remote { project_id, client } => {
3957 askpass_delegates.lock().insert(askpass_id, askpass);
3958 let _defer = util::defer(|| {
3959 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3960 debug_assert!(askpass_delegate.is_some());
3961 });
3962 let response = client
3963 .request(proto::Pull {
3964 project_id: project_id.0,
3965 repository_id: id.to_proto(),
3966 askpass_id,
3967 branch_name: branch.to_string(),
3968 remote_name: remote.to_string(),
3969 })
3970 .await
3971 .context("sending pull request")?;
3972
3973 Ok(RemoteCommandOutput {
3974 stdout: response.stdout,
3975 stderr: response.stderr,
3976 })
3977 }
3978 }
3979 },
3980 )
3981 }
3982
3983 fn spawn_set_index_text_job(
3984 &mut self,
3985 path: RepoPath,
3986 content: Option<String>,
3987 hunk_staging_operation_count: Option<usize>,
3988 cx: &mut Context<Self>,
3989 ) -> oneshot::Receiver<anyhow::Result<()>> {
3990 let id = self.id;
3991 let this = cx.weak_entity();
3992 let git_store = self.git_store.clone();
3993 self.send_keyed_job(
3994 Some(GitJobKey::WriteIndex(path.clone())),
3995 None,
3996 move |git_repo, mut cx| async move {
3997 log::debug!("start updating index text for buffer {}", path.display());
3998 match git_repo {
3999 RepositoryState::Local {
4000 backend,
4001 environment,
4002 ..
4003 } => {
4004 backend
4005 .set_index_text(path.clone(), content, environment.clone())
4006 .await?;
4007 }
4008 RepositoryState::Remote { project_id, client } => {
4009 client
4010 .request(proto::SetIndexText {
4011 project_id: project_id.0,
4012 repository_id: id.to_proto(),
4013 path: path.as_ref().to_proto(),
4014 text: content,
4015 })
4016 .await?;
4017 }
4018 }
4019 log::debug!("finish updating index text for buffer {}", path.display());
4020
4021 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4022 let project_path = this
4023 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4024 .ok()
4025 .flatten();
4026 git_store.update(&mut cx, |git_store, cx| {
4027 let buffer_id = git_store
4028 .buffer_store
4029 .read(cx)
4030 .get_by_path(&project_path?)?
4031 .read(cx)
4032 .remote_id();
4033 let diff_state = git_store.diffs.get(&buffer_id)?;
4034 diff_state.update(cx, |diff_state, _| {
4035 diff_state.hunk_staging_operation_count_as_of_write =
4036 hunk_staging_operation_count;
4037 });
4038 Some(())
4039 })?;
4040 }
4041 Ok(())
4042 },
4043 )
4044 }
4045
4046 pub fn get_remotes(
4047 &mut self,
4048 branch_name: Option<String>,
4049 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4050 let id = self.id;
4051 self.send_job(None, move |repo, _cx| async move {
4052 match repo {
4053 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4054 RepositoryState::Remote { project_id, client } => {
4055 let response = client
4056 .request(proto::GetRemotes {
4057 project_id: project_id.0,
4058 repository_id: id.to_proto(),
4059 branch_name,
4060 })
4061 .await?;
4062
4063 let remotes = response
4064 .remotes
4065 .into_iter()
4066 .map(|remotes| git::repository::Remote {
4067 name: remotes.name.into(),
4068 })
4069 .collect();
4070
4071 Ok(remotes)
4072 }
4073 }
4074 })
4075 }
4076
4077 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4078 let id = self.id;
4079 self.send_job(None, move |repo, _| async move {
4080 match repo {
4081 RepositoryState::Local { backend, .. } => backend.branches().await,
4082 RepositoryState::Remote { project_id, client } => {
4083 let response = client
4084 .request(proto::GitGetBranches {
4085 project_id: project_id.0,
4086 repository_id: id.to_proto(),
4087 })
4088 .await?;
4089
4090 let branches = response
4091 .branches
4092 .into_iter()
4093 .map(|branch| proto_to_branch(&branch))
4094 .collect();
4095
4096 Ok(branches)
4097 }
4098 }
4099 })
4100 }
4101
4102 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4103 let id = self.id;
4104 self.send_job(None, move |repo, _| async move {
4105 match repo {
4106 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4107 RepositoryState::Remote { project_id, client } => {
4108 let response = client
4109 .request(proto::GetDefaultBranch {
4110 project_id: project_id.0,
4111 repository_id: id.to_proto(),
4112 })
4113 .await?;
4114
4115 anyhow::Ok(response.branch.map(SharedString::from))
4116 }
4117 }
4118 })
4119 }
4120
4121 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4122 let id = self.id;
4123 self.send_job(None, move |repo, _cx| async move {
4124 match repo {
4125 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4126 RepositoryState::Remote { project_id, client } => {
4127 let response = client
4128 .request(proto::GitDiff {
4129 project_id: project_id.0,
4130 repository_id: id.to_proto(),
4131 diff_type: match diff_type {
4132 DiffType::HeadToIndex => {
4133 proto::git_diff::DiffType::HeadToIndex.into()
4134 }
4135 DiffType::HeadToWorktree => {
4136 proto::git_diff::DiffType::HeadToWorktree.into()
4137 }
4138 },
4139 })
4140 .await?;
4141
4142 Ok(response.diff)
4143 }
4144 }
4145 })
4146 }
4147
4148 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4149 let id = self.id;
4150 self.send_job(
4151 Some(format!("git switch -c {branch_name}").into()),
4152 move |repo, _cx| async move {
4153 match repo {
4154 RepositoryState::Local { backend, .. } => {
4155 backend.create_branch(branch_name).await
4156 }
4157 RepositoryState::Remote { project_id, client } => {
4158 client
4159 .request(proto::GitCreateBranch {
4160 project_id: project_id.0,
4161 repository_id: id.to_proto(),
4162 branch_name,
4163 })
4164 .await?;
4165
4166 Ok(())
4167 }
4168 }
4169 },
4170 )
4171 }
4172
4173 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4174 let id = self.id;
4175 self.send_job(
4176 Some(format!("git switch {branch_name}").into()),
4177 move |repo, _cx| async move {
4178 match repo {
4179 RepositoryState::Local { backend, .. } => {
4180 backend.change_branch(branch_name).await
4181 }
4182 RepositoryState::Remote { project_id, client } => {
4183 client
4184 .request(proto::GitChangeBranch {
4185 project_id: project_id.0,
4186 repository_id: id.to_proto(),
4187 branch_name,
4188 })
4189 .await?;
4190
4191 Ok(())
4192 }
4193 }
4194 },
4195 )
4196 }
4197
4198 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4199 let id = self.id;
4200 self.send_job(None, move |repo, _cx| async move {
4201 match repo {
4202 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4203 RepositoryState::Remote { project_id, client } => {
4204 let response = client
4205 .request(proto::CheckForPushedCommits {
4206 project_id: project_id.0,
4207 repository_id: id.to_proto(),
4208 })
4209 .await?;
4210
4211 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4212
4213 Ok(branches)
4214 }
4215 }
4216 })
4217 }
4218
4219 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4220 self.send_job(None, |repo, _cx| async move {
4221 match repo {
4222 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4223 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4224 }
4225 })
4226 }
4227
4228 pub fn restore_checkpoint(
4229 &mut self,
4230 checkpoint: GitRepositoryCheckpoint,
4231 ) -> oneshot::Receiver<Result<()>> {
4232 self.send_job(None, move |repo, _cx| async move {
4233 match repo {
4234 RepositoryState::Local { backend, .. } => {
4235 backend.restore_checkpoint(checkpoint).await
4236 }
4237 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4238 }
4239 })
4240 }
4241
4242 pub(crate) fn apply_remote_update(
4243 &mut self,
4244 update: proto::UpdateRepository,
4245 is_new: bool,
4246 cx: &mut Context<Self>,
4247 ) -> Result<()> {
4248 let conflicted_paths = TreeSet::from_ordered_entries(
4249 update
4250 .current_merge_conflicts
4251 .into_iter()
4252 .map(|path| RepoPath(Path::new(&path).into())),
4253 );
4254 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4255 self.snapshot.head_commit = update
4256 .head_commit_details
4257 .as_ref()
4258 .map(proto_to_commit_details);
4259
4260 self.snapshot.merge.conflicted_paths = conflicted_paths;
4261 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4262
4263 let edits = update
4264 .removed_statuses
4265 .into_iter()
4266 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4267 .chain(
4268 update
4269 .updated_statuses
4270 .into_iter()
4271 .filter_map(|updated_status| {
4272 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4273 }),
4274 )
4275 .collect::<Vec<_>>();
4276 self.snapshot.statuses_by_path.edit(edits, &());
4277 if update.is_last_update {
4278 self.snapshot.scan_id = update.scan_id;
4279 }
4280 cx.emit(RepositoryEvent::Updated {
4281 full_scan: true,
4282 new_instance: is_new,
4283 });
4284 Ok(())
4285 }
4286
4287 pub fn compare_checkpoints(
4288 &mut self,
4289 left: GitRepositoryCheckpoint,
4290 right: GitRepositoryCheckpoint,
4291 ) -> oneshot::Receiver<Result<bool>> {
4292 self.send_job(None, move |repo, _cx| async move {
4293 match repo {
4294 RepositoryState::Local { backend, .. } => {
4295 backend.compare_checkpoints(left, right).await
4296 }
4297 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4298 }
4299 })
4300 }
4301
4302 pub fn diff_checkpoints(
4303 &mut self,
4304 base_checkpoint: GitRepositoryCheckpoint,
4305 target_checkpoint: GitRepositoryCheckpoint,
4306 ) -> oneshot::Receiver<Result<String>> {
4307 self.send_job(None, move |repo, _cx| async move {
4308 match repo {
4309 RepositoryState::Local { backend, .. } => {
4310 backend
4311 .diff_checkpoints(base_checkpoint, target_checkpoint)
4312 .await
4313 }
4314 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4315 }
4316 })
4317 }
4318
4319 fn schedule_scan(
4320 &mut self,
4321 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4322 cx: &mut Context<Self>,
4323 ) {
4324 let this = cx.weak_entity();
4325 let _ = self.send_keyed_job(
4326 Some(GitJobKey::ReloadGitState),
4327 None,
4328 |state, mut cx| async move {
4329 log::debug!("run scheduled git status scan");
4330
4331 let Some(this) = this.upgrade() else {
4332 return Ok(());
4333 };
4334 let RepositoryState::Local { backend, .. } = state else {
4335 bail!("not a local repository")
4336 };
4337 let (snapshot, events) = this
4338 .update(&mut cx, |this, _| {
4339 this.paths_needing_status_update.clear();
4340 compute_snapshot(
4341 this.id,
4342 this.work_directory_abs_path.clone(),
4343 this.snapshot.clone(),
4344 backend.clone(),
4345 )
4346 })?
4347 .await?;
4348 this.update(&mut cx, |this, cx| {
4349 this.snapshot = snapshot.clone();
4350 for event in events {
4351 cx.emit(event);
4352 }
4353 })?;
4354 if let Some(updates_tx) = updates_tx {
4355 updates_tx
4356 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4357 .ok();
4358 }
4359 Ok(())
4360 },
4361 );
4362 }
4363
4364 fn spawn_local_git_worker(
4365 work_directory_abs_path: Arc<Path>,
4366 dot_git_abs_path: Arc<Path>,
4367 _repository_dir_abs_path: Arc<Path>,
4368 _common_dir_abs_path: Arc<Path>,
4369 project_environment: WeakEntity<ProjectEnvironment>,
4370 fs: Arc<dyn Fs>,
4371 cx: &mut Context<Self>,
4372 ) -> mpsc::UnboundedSender<GitJob> {
4373 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4374
4375 cx.spawn(async move |_, cx| {
4376 let environment = project_environment
4377 .upgrade()
4378 .context("missing project environment")?
4379 .update(cx, |project_environment, cx| {
4380 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4381 })?
4382 .await
4383 .unwrap_or_else(|| {
4384 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4385 HashMap::default()
4386 });
4387 let backend = cx
4388 .background_spawn(async move {
4389 fs.open_repo(&dot_git_abs_path)
4390 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4391 })
4392 .await?;
4393
4394 if let Some(git_hosting_provider_registry) =
4395 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4396 {
4397 git_hosting_providers::register_additional_providers(
4398 git_hosting_provider_registry,
4399 backend.clone(),
4400 );
4401 }
4402
4403 let state = RepositoryState::Local {
4404 backend,
4405 environment: Arc::new(environment),
4406 };
4407 let mut jobs = VecDeque::new();
4408 loop {
4409 while let Ok(Some(next_job)) = job_rx.try_next() {
4410 jobs.push_back(next_job);
4411 }
4412
4413 if let Some(job) = jobs.pop_front() {
4414 if let Some(current_key) = &job.key
4415 && jobs
4416 .iter()
4417 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4418 {
4419 continue;
4420 }
4421 (job.job)(state.clone(), cx).await;
4422 } else if let Some(job) = job_rx.next().await {
4423 jobs.push_back(job);
4424 } else {
4425 break;
4426 }
4427 }
4428 anyhow::Ok(())
4429 })
4430 .detach_and_log_err(cx);
4431
4432 job_tx
4433 }
4434
4435 fn spawn_remote_git_worker(
4436 project_id: ProjectId,
4437 client: AnyProtoClient,
4438 cx: &mut Context<Self>,
4439 ) -> mpsc::UnboundedSender<GitJob> {
4440 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4441
4442 cx.spawn(async move |_, cx| {
4443 let state = RepositoryState::Remote { project_id, client };
4444 let mut jobs = VecDeque::new();
4445 loop {
4446 while let Ok(Some(next_job)) = job_rx.try_next() {
4447 jobs.push_back(next_job);
4448 }
4449
4450 if let Some(job) = jobs.pop_front() {
4451 if let Some(current_key) = &job.key
4452 && jobs
4453 .iter()
4454 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4455 {
4456 continue;
4457 }
4458 (job.job)(state.clone(), cx).await;
4459 } else if let Some(job) = job_rx.next().await {
4460 jobs.push_back(job);
4461 } else {
4462 break;
4463 }
4464 }
4465 anyhow::Ok(())
4466 })
4467 .detach_and_log_err(cx);
4468
4469 job_tx
4470 }
4471
4472 fn load_staged_text(
4473 &mut self,
4474 buffer_id: BufferId,
4475 repo_path: RepoPath,
4476 cx: &App,
4477 ) -> Task<Result<Option<String>>> {
4478 let rx = self.send_job(None, move |state, _| async move {
4479 match state {
4480 RepositoryState::Local { backend, .. } => {
4481 anyhow::Ok(backend.load_index_text(repo_path).await)
4482 }
4483 RepositoryState::Remote { project_id, client } => {
4484 let response = client
4485 .request(proto::OpenUnstagedDiff {
4486 project_id: project_id.to_proto(),
4487 buffer_id: buffer_id.to_proto(),
4488 })
4489 .await?;
4490 Ok(response.staged_text)
4491 }
4492 }
4493 });
4494 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4495 }
4496
4497 fn load_committed_text(
4498 &mut self,
4499 buffer_id: BufferId,
4500 repo_path: RepoPath,
4501 cx: &App,
4502 ) -> Task<Result<DiffBasesChange>> {
4503 let rx = self.send_job(None, move |state, _| async move {
4504 match state {
4505 RepositoryState::Local { backend, .. } => {
4506 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4507 let staged_text = backend.load_index_text(repo_path).await;
4508 let diff_bases_change = if committed_text == staged_text {
4509 DiffBasesChange::SetBoth(committed_text)
4510 } else {
4511 DiffBasesChange::SetEach {
4512 index: staged_text,
4513 head: committed_text,
4514 }
4515 };
4516 anyhow::Ok(diff_bases_change)
4517 }
4518 RepositoryState::Remote { project_id, client } => {
4519 use proto::open_uncommitted_diff_response::Mode;
4520
4521 let response = client
4522 .request(proto::OpenUncommittedDiff {
4523 project_id: project_id.to_proto(),
4524 buffer_id: buffer_id.to_proto(),
4525 })
4526 .await?;
4527 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4528 let bases = match mode {
4529 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4530 Mode::IndexAndHead => DiffBasesChange::SetEach {
4531 head: response.committed_text,
4532 index: response.staged_text,
4533 },
4534 };
4535 Ok(bases)
4536 }
4537 }
4538 });
4539
4540 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4541 }
4542
4543 fn paths_changed(
4544 &mut self,
4545 paths: Vec<RepoPath>,
4546 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4547 cx: &mut Context<Self>,
4548 ) {
4549 self.paths_needing_status_update.extend(paths);
4550
4551 let this = cx.weak_entity();
4552 let _ = self.send_keyed_job(
4553 Some(GitJobKey::RefreshStatuses),
4554 None,
4555 |state, mut cx| async move {
4556 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4557 (
4558 this.snapshot.clone(),
4559 mem::take(&mut this.paths_needing_status_update),
4560 )
4561 })?;
4562 let RepositoryState::Local { backend, .. } = state else {
4563 bail!("not a local repository")
4564 };
4565
4566 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4567 if paths.is_empty() {
4568 return Ok(());
4569 }
4570 let statuses = backend.status(&paths).await?;
4571
4572 let changed_path_statuses = cx
4573 .background_spawn(async move {
4574 let mut changed_path_statuses = Vec::new();
4575 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4576 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4577
4578 for (repo_path, status) in &*statuses.entries {
4579 changed_paths.remove(repo_path);
4580 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4581 && cursor.item().is_some_and(|entry| entry.status == *status)
4582 {
4583 continue;
4584 }
4585
4586 changed_path_statuses.push(Edit::Insert(StatusEntry {
4587 repo_path: repo_path.clone(),
4588 status: *status,
4589 }));
4590 }
4591 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4592 for path in changed_paths.into_iter() {
4593 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4594 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4595 }
4596 }
4597 changed_path_statuses
4598 })
4599 .await;
4600
4601 this.update(&mut cx, |this, cx| {
4602 if !changed_path_statuses.is_empty() {
4603 this.snapshot
4604 .statuses_by_path
4605 .edit(changed_path_statuses, &());
4606 this.snapshot.scan_id += 1;
4607 if let Some(updates_tx) = updates_tx {
4608 updates_tx
4609 .unbounded_send(DownstreamUpdate::UpdateRepository(
4610 this.snapshot.clone(),
4611 ))
4612 .ok();
4613 }
4614 }
4615 cx.emit(RepositoryEvent::Updated {
4616 full_scan: false,
4617 new_instance: false,
4618 });
4619 })
4620 },
4621 );
4622 }
4623
4624 /// currently running git command and when it started
4625 pub fn current_job(&self) -> Option<JobInfo> {
4626 self.active_jobs.values().next().cloned()
4627 }
4628
4629 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4630 self.send_job(None, |_, _| async {})
4631 }
4632}
4633
4634fn get_permalink_in_rust_registry_src(
4635 provider_registry: Arc<GitHostingProviderRegistry>,
4636 path: PathBuf,
4637 selection: Range<u32>,
4638) -> Result<url::Url> {
4639 #[derive(Deserialize)]
4640 struct CargoVcsGit {
4641 sha1: String,
4642 }
4643
4644 #[derive(Deserialize)]
4645 struct CargoVcsInfo {
4646 git: CargoVcsGit,
4647 path_in_vcs: String,
4648 }
4649
4650 #[derive(Deserialize)]
4651 struct CargoPackage {
4652 repository: String,
4653 }
4654
4655 #[derive(Deserialize)]
4656 struct CargoToml {
4657 package: CargoPackage,
4658 }
4659
4660 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4661 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4662 Some((dir, json))
4663 }) else {
4664 bail!("No .cargo_vcs_info.json found in parent directories")
4665 };
4666 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4667 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4668 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4669 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4670 .context("parsing package.repository field of manifest")?;
4671 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4672 let permalink = provider.build_permalink(
4673 remote,
4674 BuildPermalinkParams {
4675 sha: &cargo_vcs_info.git.sha1,
4676 path: &path.to_string_lossy(),
4677 selection: Some(selection),
4678 },
4679 );
4680 Ok(permalink)
4681}
4682
4683fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4684 let Some(blame) = blame else {
4685 return proto::BlameBufferResponse {
4686 blame_response: None,
4687 };
4688 };
4689
4690 let entries = blame
4691 .entries
4692 .into_iter()
4693 .map(|entry| proto::BlameEntry {
4694 sha: entry.sha.as_bytes().into(),
4695 start_line: entry.range.start,
4696 end_line: entry.range.end,
4697 original_line_number: entry.original_line_number,
4698 author: entry.author,
4699 author_mail: entry.author_mail,
4700 author_time: entry.author_time,
4701 author_tz: entry.author_tz,
4702 committer: entry.committer_name,
4703 committer_mail: entry.committer_email,
4704 committer_time: entry.committer_time,
4705 committer_tz: entry.committer_tz,
4706 summary: entry.summary,
4707 previous: entry.previous,
4708 filename: entry.filename,
4709 })
4710 .collect::<Vec<_>>();
4711
4712 let messages = blame
4713 .messages
4714 .into_iter()
4715 .map(|(oid, message)| proto::CommitMessage {
4716 oid: oid.as_bytes().into(),
4717 message,
4718 })
4719 .collect::<Vec<_>>();
4720
4721 proto::BlameBufferResponse {
4722 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4723 entries,
4724 messages,
4725 remote_url: blame.remote_url,
4726 }),
4727 }
4728}
4729
4730fn deserialize_blame_buffer_response(
4731 response: proto::BlameBufferResponse,
4732) -> Option<git::blame::Blame> {
4733 let response = response.blame_response?;
4734 let entries = response
4735 .entries
4736 .into_iter()
4737 .filter_map(|entry| {
4738 Some(git::blame::BlameEntry {
4739 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4740 range: entry.start_line..entry.end_line,
4741 original_line_number: entry.original_line_number,
4742 committer_name: entry.committer,
4743 committer_time: entry.committer_time,
4744 committer_tz: entry.committer_tz,
4745 committer_email: entry.committer_mail,
4746 author: entry.author,
4747 author_mail: entry.author_mail,
4748 author_time: entry.author_time,
4749 author_tz: entry.author_tz,
4750 summary: entry.summary,
4751 previous: entry.previous,
4752 filename: entry.filename,
4753 })
4754 })
4755 .collect::<Vec<_>>();
4756
4757 let messages = response
4758 .messages
4759 .into_iter()
4760 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4761 .collect::<HashMap<_, _>>();
4762
4763 Some(Blame {
4764 entries,
4765 messages,
4766 remote_url: response.remote_url,
4767 })
4768}
4769
4770fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4771 proto::Branch {
4772 is_head: branch.is_head,
4773 ref_name: branch.ref_name.to_string(),
4774 unix_timestamp: branch
4775 .most_recent_commit
4776 .as_ref()
4777 .map(|commit| commit.commit_timestamp as u64),
4778 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4779 ref_name: upstream.ref_name.to_string(),
4780 tracking: upstream
4781 .tracking
4782 .status()
4783 .map(|upstream| proto::UpstreamTracking {
4784 ahead: upstream.ahead as u64,
4785 behind: upstream.behind as u64,
4786 }),
4787 }),
4788 most_recent_commit: branch
4789 .most_recent_commit
4790 .as_ref()
4791 .map(|commit| proto::CommitSummary {
4792 sha: commit.sha.to_string(),
4793 subject: commit.subject.to_string(),
4794 commit_timestamp: commit.commit_timestamp,
4795 }),
4796 }
4797}
4798
4799fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4800 git::repository::Branch {
4801 is_head: proto.is_head,
4802 ref_name: proto.ref_name.clone().into(),
4803 upstream: proto
4804 .upstream
4805 .as_ref()
4806 .map(|upstream| git::repository::Upstream {
4807 ref_name: upstream.ref_name.to_string().into(),
4808 tracking: upstream
4809 .tracking
4810 .as_ref()
4811 .map(|tracking| {
4812 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4813 ahead: tracking.ahead as u32,
4814 behind: tracking.behind as u32,
4815 })
4816 })
4817 .unwrap_or(git::repository::UpstreamTracking::Gone),
4818 }),
4819 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4820 git::repository::CommitSummary {
4821 sha: commit.sha.to_string().into(),
4822 subject: commit.subject.to_string().into(),
4823 commit_timestamp: commit.commit_timestamp,
4824 has_parent: true,
4825 }
4826 }),
4827 }
4828}
4829
4830fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4831 proto::GitCommitDetails {
4832 sha: commit.sha.to_string(),
4833 message: commit.message.to_string(),
4834 commit_timestamp: commit.commit_timestamp,
4835 author_email: commit.author_email.to_string(),
4836 author_name: commit.author_name.to_string(),
4837 }
4838}
4839
4840fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4841 CommitDetails {
4842 sha: proto.sha.clone().into(),
4843 message: proto.message.clone().into(),
4844 commit_timestamp: proto.commit_timestamp,
4845 author_email: proto.author_email.clone().into(),
4846 author_name: proto.author_name.clone().into(),
4847 }
4848}
4849
4850async fn compute_snapshot(
4851 id: RepositoryId,
4852 work_directory_abs_path: Arc<Path>,
4853 prev_snapshot: RepositorySnapshot,
4854 backend: Arc<dyn GitRepository>,
4855) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4856 let mut events = Vec::new();
4857 let branches = backend.branches().await?;
4858 let branch = branches.into_iter().find(|branch| branch.is_head);
4859 let statuses = backend
4860 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
4861 .await?;
4862 let statuses_by_path = SumTree::from_iter(
4863 statuses
4864 .entries
4865 .iter()
4866 .map(|(repo_path, status)| StatusEntry {
4867 repo_path: repo_path.clone(),
4868 status: *status,
4869 }),
4870 &(),
4871 );
4872 let (merge_details, merge_heads_changed) =
4873 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
4874 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
4875
4876 if merge_heads_changed
4877 || branch != prev_snapshot.branch
4878 || statuses_by_path != prev_snapshot.statuses_by_path
4879 {
4880 events.push(RepositoryEvent::Updated {
4881 full_scan: true,
4882 new_instance: false,
4883 });
4884 }
4885
4886 // Cache merge conflict paths so they don't change from staging/unstaging,
4887 // until the merge heads change (at commit time, etc.).
4888 if merge_heads_changed {
4889 events.push(RepositoryEvent::MergeHeadsChanged);
4890 }
4891
4892 // Useful when branch is None in detached head state
4893 let head_commit = match backend.head_sha().await {
4894 Some(head_sha) => backend.show(head_sha).await.log_err(),
4895 None => None,
4896 };
4897
4898 // Used by edit prediction data collection
4899 let remote_origin_url = backend.remote_url("origin");
4900 let remote_upstream_url = backend.remote_url("upstream");
4901
4902 let snapshot = RepositorySnapshot {
4903 id,
4904 statuses_by_path,
4905 work_directory_abs_path,
4906 scan_id: prev_snapshot.scan_id + 1,
4907 branch,
4908 head_commit,
4909 merge: merge_details,
4910 remote_origin_url,
4911 remote_upstream_url,
4912 };
4913
4914 Ok((snapshot, events))
4915}
4916
4917fn status_from_proto(
4918 simple_status: i32,
4919 status: Option<proto::GitFileStatus>,
4920) -> anyhow::Result<FileStatus> {
4921 use proto::git_file_status::Variant;
4922
4923 let Some(variant) = status.and_then(|status| status.variant) else {
4924 let code = proto::GitStatus::from_i32(simple_status)
4925 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
4926 let result = match code {
4927 proto::GitStatus::Added => TrackedStatus {
4928 worktree_status: StatusCode::Added,
4929 index_status: StatusCode::Unmodified,
4930 }
4931 .into(),
4932 proto::GitStatus::Modified => TrackedStatus {
4933 worktree_status: StatusCode::Modified,
4934 index_status: StatusCode::Unmodified,
4935 }
4936 .into(),
4937 proto::GitStatus::Conflict => UnmergedStatus {
4938 first_head: UnmergedStatusCode::Updated,
4939 second_head: UnmergedStatusCode::Updated,
4940 }
4941 .into(),
4942 proto::GitStatus::Deleted => TrackedStatus {
4943 worktree_status: StatusCode::Deleted,
4944 index_status: StatusCode::Unmodified,
4945 }
4946 .into(),
4947 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
4948 };
4949 return Ok(result);
4950 };
4951
4952 let result = match variant {
4953 Variant::Untracked(_) => FileStatus::Untracked,
4954 Variant::Ignored(_) => FileStatus::Ignored,
4955 Variant::Unmerged(unmerged) => {
4956 let [first_head, second_head] =
4957 [unmerged.first_head, unmerged.second_head].map(|head| {
4958 let code = proto::GitStatus::from_i32(head)
4959 .with_context(|| format!("Invalid git status code: {head}"))?;
4960 let result = match code {
4961 proto::GitStatus::Added => UnmergedStatusCode::Added,
4962 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4963 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4964 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
4965 };
4966 Ok(result)
4967 });
4968 let [first_head, second_head] = [first_head?, second_head?];
4969 UnmergedStatus {
4970 first_head,
4971 second_head,
4972 }
4973 .into()
4974 }
4975 Variant::Tracked(tracked) => {
4976 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4977 .map(|status| {
4978 let code = proto::GitStatus::from_i32(status)
4979 .with_context(|| format!("Invalid git status code: {status}"))?;
4980 let result = match code {
4981 proto::GitStatus::Modified => StatusCode::Modified,
4982 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4983 proto::GitStatus::Added => StatusCode::Added,
4984 proto::GitStatus::Deleted => StatusCode::Deleted,
4985 proto::GitStatus::Renamed => StatusCode::Renamed,
4986 proto::GitStatus::Copied => StatusCode::Copied,
4987 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4988 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
4989 };
4990 Ok(result)
4991 });
4992 let [index_status, worktree_status] = [index_status?, worktree_status?];
4993 TrackedStatus {
4994 index_status,
4995 worktree_status,
4996 }
4997 .into()
4998 }
4999 };
5000 Ok(result)
5001}
5002
5003fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5004 use proto::git_file_status::{Tracked, Unmerged, Variant};
5005
5006 let variant = match status {
5007 FileStatus::Untracked => Variant::Untracked(Default::default()),
5008 FileStatus::Ignored => Variant::Ignored(Default::default()),
5009 FileStatus::Unmerged(UnmergedStatus {
5010 first_head,
5011 second_head,
5012 }) => Variant::Unmerged(Unmerged {
5013 first_head: unmerged_status_to_proto(first_head),
5014 second_head: unmerged_status_to_proto(second_head),
5015 }),
5016 FileStatus::Tracked(TrackedStatus {
5017 index_status,
5018 worktree_status,
5019 }) => Variant::Tracked(Tracked {
5020 index_status: tracked_status_to_proto(index_status),
5021 worktree_status: tracked_status_to_proto(worktree_status),
5022 }),
5023 };
5024 proto::GitFileStatus {
5025 variant: Some(variant),
5026 }
5027}
5028
5029fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5030 match code {
5031 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5032 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5033 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5034 }
5035}
5036
5037fn tracked_status_to_proto(code: StatusCode) -> i32 {
5038 match code {
5039 StatusCode::Added => proto::GitStatus::Added as _,
5040 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5041 StatusCode::Modified => proto::GitStatus::Modified as _,
5042 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5043 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5044 StatusCode::Copied => proto::GitStatus::Copied as _,
5045 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5046 }
5047}