1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 status::{
32 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
33 },
34};
35use gpui::{
36 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
37 WeakEntity,
38};
39use language::{
40 Buffer, BufferEvent, Language, LanguageRegistry,
41 proto::{deserialize_version, serialize_version},
42};
43use parking_lot::Mutex;
44use postage::stream::Stream as _;
45use rpc::{
46 AnyProtoClient, TypedEnvelope,
47 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
48};
49use serde::Deserialize;
50use std::{
51 cmp::Ordering,
52 collections::{BTreeSet, VecDeque},
53 future::Future,
54 mem,
55 ops::Range,
56 path::{Path, PathBuf},
57 sync::{
58 Arc,
59 atomic::{self, AtomicU64},
60 },
61 time::Instant,
62};
63use sum_tree::{Edit, SumTree, TreeSet};
64use text::{Bias, BufferId};
65use util::{ResultExt, debug_panic, post_inc};
66use worktree::{
67 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
68 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
69};
70
71pub struct GitStore {
72 state: GitStoreState,
73 buffer_store: Entity<BufferStore>,
74 worktree_store: Entity<WorktreeStore>,
75 repositories: HashMap<RepositoryId, Entity<Repository>>,
76 active_repo_id: Option<RepositoryId>,
77 #[allow(clippy::type_complexity)]
78 loading_diffs:
79 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
80 diffs: HashMap<BufferId, Entity<BufferGitState>>,
81 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
82 _subscriptions: Vec<Subscription>,
83}
84
85#[derive(Default)]
86struct SharedDiffs {
87 unstaged: Option<Entity<BufferDiff>>,
88 uncommitted: Option<Entity<BufferDiff>>,
89}
90
91struct BufferGitState {
92 unstaged_diff: Option<WeakEntity<BufferDiff>>,
93 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
94 conflict_set: Option<WeakEntity<ConflictSet>>,
95 recalculate_diff_task: Option<Task<Result<()>>>,
96 reparse_conflict_markers_task: Option<Task<Result<()>>>,
97 language: Option<Arc<Language>>,
98 language_registry: Option<Arc<LanguageRegistry>>,
99 conflict_updated_futures: Vec<oneshot::Sender<()>>,
100 recalculating_tx: postage::watch::Sender<bool>,
101
102 /// These operation counts are used to ensure that head and index text
103 /// values read from the git repository are up-to-date with any hunk staging
104 /// operations that have been performed on the BufferDiff.
105 ///
106 /// The operation count is incremented immediately when the user initiates a
107 /// hunk stage/unstage operation. Then, upon finishing writing the new index
108 /// text do disk, the `operation count as of write` is updated to reflect
109 /// the operation count that prompted the write.
110 hunk_staging_operation_count: usize,
111 hunk_staging_operation_count_as_of_write: usize,
112
113 head_text: Option<Arc<String>>,
114 index_text: Option<Arc<String>>,
115 head_changed: bool,
116 index_changed: bool,
117 language_changed: bool,
118}
119
120#[derive(Clone, Debug)]
121enum DiffBasesChange {
122 SetIndex(Option<String>),
123 SetHead(Option<String>),
124 SetEach {
125 index: Option<String>,
126 head: Option<String>,
127 },
128 SetBoth(Option<String>),
129}
130
131#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
132enum DiffKind {
133 Unstaged,
134 Uncommitted,
135}
136
137enum GitStoreState {
138 Local {
139 next_repository_id: Arc<AtomicU64>,
140 downstream: Option<LocalDownstreamState>,
141 project_environment: Entity<ProjectEnvironment>,
142 fs: Arc<dyn Fs>,
143 },
144 Ssh {
145 upstream_client: AnyProtoClient,
146 upstream_project_id: ProjectId,
147 downstream: Option<(AnyProtoClient, ProjectId)>,
148 },
149 Remote {
150 upstream_client: AnyProtoClient,
151 upstream_project_id: ProjectId,
152 },
153}
154
155enum DownstreamUpdate {
156 UpdateRepository(RepositorySnapshot),
157 RemoveRepository(RepositoryId),
158}
159
160struct LocalDownstreamState {
161 client: AnyProtoClient,
162 project_id: ProjectId,
163 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
164 _task: Task<Result<()>>,
165}
166
167#[derive(Clone, Debug)]
168pub struct GitStoreCheckpoint {
169 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
170}
171
172#[derive(Clone, Debug, PartialEq, Eq)]
173pub struct StatusEntry {
174 pub repo_path: RepoPath,
175 pub status: FileStatus,
176}
177
178impl StatusEntry {
179 fn to_proto(&self) -> proto::StatusEntry {
180 let simple_status = match self.status {
181 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
182 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
183 FileStatus::Tracked(TrackedStatus {
184 index_status,
185 worktree_status,
186 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
187 worktree_status
188 } else {
189 index_status
190 }),
191 };
192
193 proto::StatusEntry {
194 repo_path: self.repo_path.as_ref().to_proto(),
195 simple_status,
196 status: Some(status_to_proto(self.status)),
197 }
198 }
199}
200
201impl TryFrom<proto::StatusEntry> for StatusEntry {
202 type Error = anyhow::Error;
203
204 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
205 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
206 let status = status_from_proto(value.simple_status, value.status)?;
207 Ok(Self { repo_path, status })
208 }
209}
210
211impl sum_tree::Item for StatusEntry {
212 type Summary = PathSummary<GitSummary>;
213
214 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
215 PathSummary {
216 max_path: self.repo_path.0.clone(),
217 item_summary: self.status.summary(),
218 }
219 }
220}
221
222impl sum_tree::KeyedItem for StatusEntry {
223 type Key = PathKey;
224
225 fn key(&self) -> Self::Key {
226 PathKey(self.repo_path.0.clone())
227 }
228}
229
230#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
231pub struct RepositoryId(pub u64);
232
233#[derive(Clone, Debug, Default, PartialEq, Eq)]
234pub struct MergeDetails {
235 pub conflicted_paths: TreeSet<RepoPath>,
236 pub message: Option<SharedString>,
237 pub heads: Vec<Option<SharedString>>,
238}
239
240#[derive(Clone, Debug, PartialEq, Eq)]
241pub struct RepositorySnapshot {
242 pub id: RepositoryId,
243 pub statuses_by_path: SumTree<StatusEntry>,
244 pub work_directory_abs_path: Arc<Path>,
245 pub branch: Option<Branch>,
246 pub head_commit: Option<CommitDetails>,
247 pub scan_id: u64,
248 pub merge: MergeDetails,
249 pub remote_origin_url: Option<String>,
250 pub remote_upstream_url: Option<String>,
251}
252
253type JobId = u64;
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct JobInfo {
257 pub start: Instant,
258 pub message: SharedString,
259}
260
261pub struct Repository {
262 this: WeakEntity<Self>,
263 snapshot: RepositorySnapshot,
264 commit_message_buffer: Option<Entity<Buffer>>,
265 git_store: WeakEntity<GitStore>,
266 // For a local repository, holds paths that have had worktree events since the last status scan completed,
267 // and that should be examined during the next status scan.
268 paths_needing_status_update: BTreeSet<RepoPath>,
269 job_sender: mpsc::UnboundedSender<GitJob>,
270 active_jobs: HashMap<JobId, JobInfo>,
271 job_id: JobId,
272 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
273 latest_askpass_id: u64,
274}
275
276impl std::ops::Deref for Repository {
277 type Target = RepositorySnapshot;
278
279 fn deref(&self) -> &Self::Target {
280 &self.snapshot
281 }
282}
283
284#[derive(Clone)]
285pub enum RepositoryState {
286 Local {
287 backend: Arc<dyn GitRepository>,
288 environment: Arc<HashMap<String, String>>,
289 },
290 Remote {
291 project_id: ProjectId,
292 client: AnyProtoClient,
293 },
294}
295
296#[derive(Clone, Debug)]
297pub enum RepositoryEvent {
298 Updated { full_scan: bool, new_instance: bool },
299 MergeHeadsChanged,
300}
301
302#[derive(Clone, Debug)]
303pub struct JobsUpdated;
304
305#[derive(Debug)]
306pub enum GitStoreEvent {
307 ActiveRepositoryChanged(Option<RepositoryId>),
308 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
309 RepositoryAdded(RepositoryId),
310 RepositoryRemoved(RepositoryId),
311 IndexWriteError(anyhow::Error),
312 JobsUpdated,
313 ConflictsUpdated,
314}
315
316impl EventEmitter<RepositoryEvent> for Repository {}
317impl EventEmitter<JobsUpdated> for Repository {}
318impl EventEmitter<GitStoreEvent> for GitStore {}
319
320pub struct GitJob {
321 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
322 key: Option<GitJobKey>,
323}
324
325#[derive(PartialEq, Eq)]
326enum GitJobKey {
327 WriteIndex(RepoPath),
328 ReloadBufferDiffBases,
329 RefreshStatuses,
330 ReloadGitState,
331}
332
333impl GitStore {
334 pub fn local(
335 worktree_store: &Entity<WorktreeStore>,
336 buffer_store: Entity<BufferStore>,
337 environment: Entity<ProjectEnvironment>,
338 fs: Arc<dyn Fs>,
339 cx: &mut Context<Self>,
340 ) -> Self {
341 Self::new(
342 worktree_store.clone(),
343 buffer_store,
344 GitStoreState::Local {
345 next_repository_id: Arc::new(AtomicU64::new(1)),
346 downstream: None,
347 project_environment: environment,
348 fs,
349 },
350 cx,
351 )
352 }
353
354 pub fn remote(
355 worktree_store: &Entity<WorktreeStore>,
356 buffer_store: Entity<BufferStore>,
357 upstream_client: AnyProtoClient,
358 project_id: ProjectId,
359 cx: &mut Context<Self>,
360 ) -> Self {
361 Self::new(
362 worktree_store.clone(),
363 buffer_store,
364 GitStoreState::Remote {
365 upstream_client,
366 upstream_project_id: project_id,
367 },
368 cx,
369 )
370 }
371
372 pub fn ssh(
373 worktree_store: &Entity<WorktreeStore>,
374 buffer_store: Entity<BufferStore>,
375 upstream_client: AnyProtoClient,
376 cx: &mut Context<Self>,
377 ) -> Self {
378 Self::new(
379 worktree_store.clone(),
380 buffer_store,
381 GitStoreState::Ssh {
382 upstream_client,
383 upstream_project_id: ProjectId(SSH_PROJECT_ID),
384 downstream: None,
385 },
386 cx,
387 )
388 }
389
390 fn new(
391 worktree_store: Entity<WorktreeStore>,
392 buffer_store: Entity<BufferStore>,
393 state: GitStoreState,
394 cx: &mut Context<Self>,
395 ) -> Self {
396 let _subscriptions = vec![
397 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
398 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
399 ];
400
401 GitStore {
402 state,
403 buffer_store,
404 worktree_store,
405 repositories: HashMap::default(),
406 active_repo_id: None,
407 _subscriptions,
408 loading_diffs: HashMap::default(),
409 shared_diffs: HashMap::default(),
410 diffs: HashMap::default(),
411 }
412 }
413
414 pub fn init(client: &AnyProtoClient) {
415 client.add_entity_request_handler(Self::handle_get_remotes);
416 client.add_entity_request_handler(Self::handle_get_branches);
417 client.add_entity_request_handler(Self::handle_change_branch);
418 client.add_entity_request_handler(Self::handle_create_branch);
419 client.add_entity_request_handler(Self::handle_git_init);
420 client.add_entity_request_handler(Self::handle_push);
421 client.add_entity_request_handler(Self::handle_pull);
422 client.add_entity_request_handler(Self::handle_fetch);
423 client.add_entity_request_handler(Self::handle_stage);
424 client.add_entity_request_handler(Self::handle_unstage);
425 client.add_entity_request_handler(Self::handle_stash);
426 client.add_entity_request_handler(Self::handle_stash_pop);
427 client.add_entity_request_handler(Self::handle_commit);
428 client.add_entity_request_handler(Self::handle_reset);
429 client.add_entity_request_handler(Self::handle_show);
430 client.add_entity_request_handler(Self::handle_load_commit_diff);
431 client.add_entity_request_handler(Self::handle_checkout_files);
432 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
433 client.add_entity_request_handler(Self::handle_set_index_text);
434 client.add_entity_request_handler(Self::handle_askpass);
435 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
436 client.add_entity_request_handler(Self::handle_git_diff);
437 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
438 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
439 client.add_entity_message_handler(Self::handle_update_diff_bases);
440 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
441 client.add_entity_request_handler(Self::handle_blame_buffer);
442 client.add_entity_message_handler(Self::handle_update_repository);
443 client.add_entity_message_handler(Self::handle_remove_repository);
444 client.add_entity_request_handler(Self::handle_git_clone);
445 }
446
447 pub fn is_local(&self) -> bool {
448 matches!(self.state, GitStoreState::Local { .. })
449 }
450
451 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
452 match &mut self.state {
453 GitStoreState::Ssh {
454 downstream: downstream_client,
455 ..
456 } => {
457 for repo in self.repositories.values() {
458 let update = repo.read(cx).snapshot.initial_update(project_id);
459 for update in split_repository_update(update) {
460 client.send(update).log_err();
461 }
462 }
463 *downstream_client = Some((client, ProjectId(project_id)));
464 }
465 GitStoreState::Local {
466 downstream: downstream_client,
467 ..
468 } => {
469 let mut snapshots = HashMap::default();
470 let (updates_tx, mut updates_rx) = mpsc::unbounded();
471 for repo in self.repositories.values() {
472 updates_tx
473 .unbounded_send(DownstreamUpdate::UpdateRepository(
474 repo.read(cx).snapshot.clone(),
475 ))
476 .ok();
477 }
478 *downstream_client = Some(LocalDownstreamState {
479 client: client.clone(),
480 project_id: ProjectId(project_id),
481 updates_tx,
482 _task: cx.spawn(async move |this, cx| {
483 cx.background_spawn(async move {
484 while let Some(update) = updates_rx.next().await {
485 match update {
486 DownstreamUpdate::UpdateRepository(snapshot) => {
487 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
488 {
489 let update =
490 snapshot.build_update(old_snapshot, project_id);
491 *old_snapshot = snapshot;
492 for update in split_repository_update(update) {
493 client.send(update)?;
494 }
495 } else {
496 let update = snapshot.initial_update(project_id);
497 for update in split_repository_update(update) {
498 client.send(update)?;
499 }
500 snapshots.insert(snapshot.id, snapshot);
501 }
502 }
503 DownstreamUpdate::RemoveRepository(id) => {
504 client.send(proto::RemoveRepository {
505 project_id,
506 id: id.to_proto(),
507 })?;
508 }
509 }
510 }
511 anyhow::Ok(())
512 })
513 .await
514 .ok();
515 this.update(cx, |this, _| {
516 if let GitStoreState::Local {
517 downstream: downstream_client,
518 ..
519 } = &mut this.state
520 {
521 downstream_client.take();
522 } else {
523 unreachable!("unshared called on remote store");
524 }
525 })
526 }),
527 });
528 }
529 GitStoreState::Remote { .. } => {
530 debug_panic!("shared called on remote store");
531 }
532 }
533 }
534
535 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
536 match &mut self.state {
537 GitStoreState::Local {
538 downstream: downstream_client,
539 ..
540 } => {
541 downstream_client.take();
542 }
543 GitStoreState::Ssh {
544 downstream: downstream_client,
545 ..
546 } => {
547 downstream_client.take();
548 }
549 GitStoreState::Remote { .. } => {
550 debug_panic!("unshared called on remote store");
551 }
552 }
553 self.shared_diffs.clear();
554 }
555
556 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
557 self.shared_diffs.remove(peer_id);
558 }
559
560 pub fn active_repository(&self) -> Option<Entity<Repository>> {
561 self.active_repo_id
562 .as_ref()
563 .map(|id| self.repositories[&id].clone())
564 }
565
566 pub fn open_unstaged_diff(
567 &mut self,
568 buffer: Entity<Buffer>,
569 cx: &mut Context<Self>,
570 ) -> Task<Result<Entity<BufferDiff>>> {
571 let buffer_id = buffer.read(cx).remote_id();
572 if let Some(diff_state) = self.diffs.get(&buffer_id) {
573 if let Some(unstaged_diff) = diff_state
574 .read(cx)
575 .unstaged_diff
576 .as_ref()
577 .and_then(|weak| weak.upgrade())
578 {
579 if let Some(task) =
580 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
581 {
582 return cx.background_executor().spawn(async move {
583 task.await;
584 Ok(unstaged_diff)
585 });
586 }
587 return Task::ready(Ok(unstaged_diff));
588 }
589 }
590
591 let Some((repo, repo_path)) =
592 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
593 else {
594 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
595 };
596
597 let task = self
598 .loading_diffs
599 .entry((buffer_id, DiffKind::Unstaged))
600 .or_insert_with(|| {
601 let staged_text = repo.update(cx, |repo, cx| {
602 repo.load_staged_text(buffer_id, repo_path, cx)
603 });
604 cx.spawn(async move |this, cx| {
605 Self::open_diff_internal(
606 this,
607 DiffKind::Unstaged,
608 staged_text.await.map(DiffBasesChange::SetIndex),
609 buffer,
610 cx,
611 )
612 .await
613 .map_err(Arc::new)
614 })
615 .shared()
616 })
617 .clone();
618
619 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
620 }
621
622 pub fn open_uncommitted_diff(
623 &mut self,
624 buffer: Entity<Buffer>,
625 cx: &mut Context<Self>,
626 ) -> Task<Result<Entity<BufferDiff>>> {
627 let buffer_id = buffer.read(cx).remote_id();
628
629 if let Some(diff_state) = self.diffs.get(&buffer_id) {
630 if let Some(uncommitted_diff) = diff_state
631 .read(cx)
632 .uncommitted_diff
633 .as_ref()
634 .and_then(|weak| weak.upgrade())
635 {
636 if let Some(task) =
637 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
638 {
639 return cx.background_executor().spawn(async move {
640 task.await;
641 Ok(uncommitted_diff)
642 });
643 }
644 return Task::ready(Ok(uncommitted_diff));
645 }
646 }
647
648 let Some((repo, repo_path)) =
649 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
650 else {
651 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
652 };
653
654 let task = self
655 .loading_diffs
656 .entry((buffer_id, DiffKind::Uncommitted))
657 .or_insert_with(|| {
658 let changes = repo.update(cx, |repo, cx| {
659 repo.load_committed_text(buffer_id, repo_path, cx)
660 });
661
662 cx.spawn(async move |this, cx| {
663 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
664 .await
665 .map_err(Arc::new)
666 })
667 .shared()
668 })
669 .clone();
670
671 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
672 }
673
674 async fn open_diff_internal(
675 this: WeakEntity<Self>,
676 kind: DiffKind,
677 texts: Result<DiffBasesChange>,
678 buffer_entity: Entity<Buffer>,
679 cx: &mut AsyncApp,
680 ) -> Result<Entity<BufferDiff>> {
681 let diff_bases_change = match texts {
682 Err(e) => {
683 this.update(cx, |this, cx| {
684 let buffer = buffer_entity.read(cx);
685 let buffer_id = buffer.remote_id();
686 this.loading_diffs.remove(&(buffer_id, kind));
687 })?;
688 return Err(e);
689 }
690 Ok(change) => change,
691 };
692
693 this.update(cx, |this, cx| {
694 let buffer = buffer_entity.read(cx);
695 let buffer_id = buffer.remote_id();
696 let language = buffer.language().cloned();
697 let language_registry = buffer.language_registry();
698 let text_snapshot = buffer.text_snapshot();
699 this.loading_diffs.remove(&(buffer_id, kind));
700
701 let git_store = cx.weak_entity();
702 let diff_state = this
703 .diffs
704 .entry(buffer_id)
705 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
706
707 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
708
709 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
710 diff_state.update(cx, |diff_state, cx| {
711 diff_state.language = language;
712 diff_state.language_registry = language_registry;
713
714 match kind {
715 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
716 DiffKind::Uncommitted => {
717 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
718 diff
719 } else {
720 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
721 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
722 unstaged_diff
723 };
724
725 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
726 diff_state.uncommitted_diff = Some(diff.downgrade())
727 }
728 }
729
730 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
731 let rx = diff_state.wait_for_recalculation();
732
733 anyhow::Ok(async move {
734 if let Some(rx) = rx {
735 rx.await;
736 }
737 Ok(diff)
738 })
739 })
740 })??
741 .await
742 }
743
744 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
745 let diff_state = self.diffs.get(&buffer_id)?;
746 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
747 }
748
749 pub fn get_uncommitted_diff(
750 &self,
751 buffer_id: BufferId,
752 cx: &App,
753 ) -> Option<Entity<BufferDiff>> {
754 let diff_state = self.diffs.get(&buffer_id)?;
755 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
756 }
757
758 pub fn open_conflict_set(
759 &mut self,
760 buffer: Entity<Buffer>,
761 cx: &mut Context<Self>,
762 ) -> Entity<ConflictSet> {
763 log::debug!("open conflict set");
764 let buffer_id = buffer.read(cx).remote_id();
765
766 if let Some(git_state) = self.diffs.get(&buffer_id) {
767 if let Some(conflict_set) = git_state
768 .read(cx)
769 .conflict_set
770 .as_ref()
771 .and_then(|weak| weak.upgrade())
772 {
773 let conflict_set = conflict_set.clone();
774 let buffer_snapshot = buffer.read(cx).text_snapshot();
775
776 git_state.update(cx, |state, cx| {
777 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
778 });
779
780 return conflict_set;
781 }
782 }
783
784 let is_unmerged = self
785 .repository_and_path_for_buffer_id(buffer_id, cx)
786 .map_or(false, |(repo, path)| {
787 repo.read(cx).snapshot.has_conflict(&path)
788 });
789 let git_store = cx.weak_entity();
790 let buffer_git_state = self
791 .diffs
792 .entry(buffer_id)
793 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
794 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
795
796 self._subscriptions
797 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
798 cx.emit(GitStoreEvent::ConflictsUpdated);
799 }));
800
801 buffer_git_state.update(cx, |state, cx| {
802 state.conflict_set = Some(conflict_set.downgrade());
803 let buffer_snapshot = buffer.read(cx).text_snapshot();
804 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
805 });
806
807 conflict_set
808 }
809
810 pub fn project_path_git_status(
811 &self,
812 project_path: &ProjectPath,
813 cx: &App,
814 ) -> Option<FileStatus> {
815 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
816 Some(repo.read(cx).status_for_path(&repo_path)?.status)
817 }
818
819 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
820 let mut work_directory_abs_paths = Vec::new();
821 let mut checkpoints = Vec::new();
822 for repository in self.repositories.values() {
823 repository.update(cx, |repository, _| {
824 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
825 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
826 });
827 }
828
829 cx.background_executor().spawn(async move {
830 let checkpoints = future::try_join_all(checkpoints).await?;
831 Ok(GitStoreCheckpoint {
832 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
833 .into_iter()
834 .zip(checkpoints)
835 .collect(),
836 })
837 })
838 }
839
840 pub fn restore_checkpoint(
841 &self,
842 checkpoint: GitStoreCheckpoint,
843 cx: &mut App,
844 ) -> Task<Result<()>> {
845 let repositories_by_work_dir_abs_path = self
846 .repositories
847 .values()
848 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
849 .collect::<HashMap<_, _>>();
850
851 let mut tasks = Vec::new();
852 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
853 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
854 let restore = repository.update(cx, |repository, _| {
855 repository.restore_checkpoint(checkpoint)
856 });
857 tasks.push(async move { restore.await? });
858 }
859 }
860 cx.background_spawn(async move {
861 future::try_join_all(tasks).await?;
862 Ok(())
863 })
864 }
865
866 /// Compares two checkpoints, returning true if they are equal.
867 pub fn compare_checkpoints(
868 &self,
869 left: GitStoreCheckpoint,
870 mut right: GitStoreCheckpoint,
871 cx: &mut App,
872 ) -> Task<Result<bool>> {
873 let repositories_by_work_dir_abs_path = self
874 .repositories
875 .values()
876 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
877 .collect::<HashMap<_, _>>();
878
879 let mut tasks = Vec::new();
880 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
881 if let Some(right_checkpoint) = right
882 .checkpoints_by_work_dir_abs_path
883 .remove(&work_dir_abs_path)
884 {
885 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
886 {
887 let compare = repository.update(cx, |repository, _| {
888 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
889 });
890
891 tasks.push(async move { compare.await? });
892 }
893 } else {
894 return Task::ready(Ok(false));
895 }
896 }
897 cx.background_spawn(async move {
898 Ok(future::try_join_all(tasks)
899 .await?
900 .into_iter()
901 .all(|result| result))
902 })
903 }
904
905 /// Blames a buffer.
906 pub fn blame_buffer(
907 &self,
908 buffer: &Entity<Buffer>,
909 version: Option<clock::Global>,
910 cx: &mut App,
911 ) -> Task<Result<Option<Blame>>> {
912 let buffer = buffer.read(cx);
913 let Some((repo, repo_path)) =
914 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
915 else {
916 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
917 };
918 let content = match &version {
919 Some(version) => buffer.rope_for_version(version).clone(),
920 None => buffer.as_rope().clone(),
921 };
922 let version = version.unwrap_or(buffer.version());
923 let buffer_id = buffer.remote_id();
924
925 let rx = repo.update(cx, |repo, _| {
926 repo.send_job(None, move |state, _| async move {
927 match state {
928 RepositoryState::Local { backend, .. } => backend
929 .blame(repo_path.clone(), content)
930 .await
931 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
932 .map(Some),
933 RepositoryState::Remote { project_id, client } => {
934 let response = client
935 .request(proto::BlameBuffer {
936 project_id: project_id.to_proto(),
937 buffer_id: buffer_id.into(),
938 version: serialize_version(&version),
939 })
940 .await?;
941 Ok(deserialize_blame_buffer_response(response))
942 }
943 }
944 })
945 });
946
947 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
948 }
949
950 pub fn get_permalink_to_line(
951 &self,
952 buffer: &Entity<Buffer>,
953 selection: Range<u32>,
954 cx: &mut App,
955 ) -> Task<Result<url::Url>> {
956 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
957 return Task::ready(Err(anyhow!("buffer has no file")));
958 };
959
960 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
961 &(file.worktree.read(cx).id(), file.path.clone()).into(),
962 cx,
963 ) else {
964 // If we're not in a Git repo, check whether this is a Rust source
965 // file in the Cargo registry (presumably opened with go-to-definition
966 // from a normal Rust file). If so, we can put together a permalink
967 // using crate metadata.
968 if buffer
969 .read(cx)
970 .language()
971 .is_none_or(|lang| lang.name() != "Rust".into())
972 {
973 return Task::ready(Err(anyhow!("no permalink available")));
974 }
975 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
976 return Task::ready(Err(anyhow!("no permalink available")));
977 };
978 return cx.spawn(async move |cx| {
979 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
980 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
981 .context("no permalink available")
982 });
983
984 // TODO remote case
985 };
986
987 let buffer_id = buffer.read(cx).remote_id();
988 let branch = repo.read(cx).branch.clone();
989 let remote = branch
990 .as_ref()
991 .and_then(|b| b.upstream.as_ref())
992 .and_then(|b| b.remote_name())
993 .unwrap_or("origin")
994 .to_string();
995
996 let rx = repo.update(cx, |repo, _| {
997 repo.send_job(None, move |state, cx| async move {
998 match state {
999 RepositoryState::Local { backend, .. } => {
1000 let origin_url = backend
1001 .remote_url(&remote)
1002 .with_context(|| format!("remote \"{remote}\" not found"))?;
1003
1004 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1005
1006 let provider_registry =
1007 cx.update(GitHostingProviderRegistry::default_global)?;
1008
1009 let (provider, remote) =
1010 parse_git_remote_url(provider_registry, &origin_url)
1011 .context("parsing Git remote URL")?;
1012
1013 let path = repo_path.to_str().with_context(|| {
1014 format!("converting repo path {repo_path:?} to string")
1015 })?;
1016
1017 Ok(provider.build_permalink(
1018 remote,
1019 BuildPermalinkParams {
1020 sha: &sha,
1021 path,
1022 selection: Some(selection),
1023 },
1024 ))
1025 }
1026 RepositoryState::Remote { project_id, client } => {
1027 let response = client
1028 .request(proto::GetPermalinkToLine {
1029 project_id: project_id.to_proto(),
1030 buffer_id: buffer_id.into(),
1031 selection: Some(proto::Range {
1032 start: selection.start as u64,
1033 end: selection.end as u64,
1034 }),
1035 })
1036 .await?;
1037
1038 url::Url::parse(&response.permalink).context("failed to parse permalink")
1039 }
1040 }
1041 })
1042 });
1043 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1044 }
1045
1046 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1047 match &self.state {
1048 GitStoreState::Local {
1049 downstream: downstream_client,
1050 ..
1051 } => downstream_client
1052 .as_ref()
1053 .map(|state| (state.client.clone(), state.project_id)),
1054 GitStoreState::Ssh {
1055 downstream: downstream_client,
1056 ..
1057 } => downstream_client.clone(),
1058 GitStoreState::Remote { .. } => None,
1059 }
1060 }
1061
1062 fn upstream_client(&self) -> Option<AnyProtoClient> {
1063 match &self.state {
1064 GitStoreState::Local { .. } => None,
1065 GitStoreState::Ssh {
1066 upstream_client, ..
1067 }
1068 | GitStoreState::Remote {
1069 upstream_client, ..
1070 } => Some(upstream_client.clone()),
1071 }
1072 }
1073
1074 fn on_worktree_store_event(
1075 &mut self,
1076 worktree_store: Entity<WorktreeStore>,
1077 event: &WorktreeStoreEvent,
1078 cx: &mut Context<Self>,
1079 ) {
1080 let GitStoreState::Local {
1081 project_environment,
1082 downstream,
1083 next_repository_id,
1084 fs,
1085 } = &self.state
1086 else {
1087 return;
1088 };
1089
1090 match event {
1091 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1092 if let Some(worktree) = self
1093 .worktree_store
1094 .read(cx)
1095 .worktree_for_id(*worktree_id, cx)
1096 {
1097 let paths_by_git_repo =
1098 self.process_updated_entries(&worktree, updated_entries, cx);
1099 let downstream = downstream
1100 .as_ref()
1101 .map(|downstream| downstream.updates_tx.clone());
1102 cx.spawn(async move |_, cx| {
1103 let paths_by_git_repo = paths_by_git_repo.await;
1104 for (repo, paths) in paths_by_git_repo {
1105 repo.update(cx, |repo, cx| {
1106 repo.paths_changed(paths, downstream.clone(), cx);
1107 })
1108 .ok();
1109 }
1110 })
1111 .detach();
1112 }
1113 }
1114 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1115 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1116 else {
1117 return;
1118 };
1119 if !worktree.read(cx).is_visible() {
1120 log::debug!(
1121 "not adding repositories for local worktree {:?} because it's not visible",
1122 worktree.read(cx).abs_path()
1123 );
1124 return;
1125 }
1126 self.update_repositories_from_worktree(
1127 project_environment.clone(),
1128 next_repository_id.clone(),
1129 downstream
1130 .as_ref()
1131 .map(|downstream| downstream.updates_tx.clone()),
1132 changed_repos.clone(),
1133 fs.clone(),
1134 cx,
1135 );
1136 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1137 }
1138 _ => {}
1139 }
1140 }
1141
1142 fn on_repository_event(
1143 &mut self,
1144 repo: Entity<Repository>,
1145 event: &RepositoryEvent,
1146 cx: &mut Context<Self>,
1147 ) {
1148 let id = repo.read(cx).id;
1149 let repo_snapshot = repo.read(cx).snapshot.clone();
1150 for (buffer_id, diff) in self.diffs.iter() {
1151 if let Some((buffer_repo, repo_path)) =
1152 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1153 {
1154 if buffer_repo == repo {
1155 diff.update(cx, |diff, cx| {
1156 if let Some(conflict_set) = &diff.conflict_set {
1157 let conflict_status_changed =
1158 conflict_set.update(cx, |conflict_set, cx| {
1159 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1160 conflict_set.set_has_conflict(has_conflict, cx)
1161 })?;
1162 if conflict_status_changed {
1163 let buffer_store = self.buffer_store.read(cx);
1164 if let Some(buffer) = buffer_store.get(*buffer_id) {
1165 let _ = diff.reparse_conflict_markers(
1166 buffer.read(cx).text_snapshot(),
1167 cx,
1168 );
1169 }
1170 }
1171 }
1172 anyhow::Ok(())
1173 })
1174 .ok();
1175 }
1176 }
1177 }
1178 cx.emit(GitStoreEvent::RepositoryUpdated(
1179 id,
1180 event.clone(),
1181 self.active_repo_id == Some(id),
1182 ))
1183 }
1184
1185 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1186 cx.emit(GitStoreEvent::JobsUpdated)
1187 }
1188
1189 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1190 fn update_repositories_from_worktree(
1191 &mut self,
1192 project_environment: Entity<ProjectEnvironment>,
1193 next_repository_id: Arc<AtomicU64>,
1194 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1195 updated_git_repositories: UpdatedGitRepositoriesSet,
1196 fs: Arc<dyn Fs>,
1197 cx: &mut Context<Self>,
1198 ) {
1199 let mut removed_ids = Vec::new();
1200 for update in updated_git_repositories.iter() {
1201 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1202 let existing_work_directory_abs_path =
1203 repo.read(cx).work_directory_abs_path.clone();
1204 Some(&existing_work_directory_abs_path)
1205 == update.old_work_directory_abs_path.as_ref()
1206 || Some(&existing_work_directory_abs_path)
1207 == update.new_work_directory_abs_path.as_ref()
1208 }) {
1209 if let Some(new_work_directory_abs_path) =
1210 update.new_work_directory_abs_path.clone()
1211 {
1212 existing.update(cx, |existing, cx| {
1213 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1214 existing.schedule_scan(updates_tx.clone(), cx);
1215 });
1216 } else {
1217 removed_ids.push(*id);
1218 }
1219 } else if let UpdatedGitRepository {
1220 new_work_directory_abs_path: Some(work_directory_abs_path),
1221 dot_git_abs_path: Some(dot_git_abs_path),
1222 repository_dir_abs_path: Some(repository_dir_abs_path),
1223 common_dir_abs_path: Some(common_dir_abs_path),
1224 ..
1225 } = update
1226 {
1227 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1228 let git_store = cx.weak_entity();
1229 let repo = cx.new(|cx| {
1230 let mut repo = Repository::local(
1231 id,
1232 work_directory_abs_path.clone(),
1233 dot_git_abs_path.clone(),
1234 repository_dir_abs_path.clone(),
1235 common_dir_abs_path.clone(),
1236 project_environment.downgrade(),
1237 fs.clone(),
1238 git_store,
1239 cx,
1240 );
1241 repo.schedule_scan(updates_tx.clone(), cx);
1242 repo
1243 });
1244 self._subscriptions
1245 .push(cx.subscribe(&repo, Self::on_repository_event));
1246 self._subscriptions
1247 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1248 self.repositories.insert(id, repo);
1249 cx.emit(GitStoreEvent::RepositoryAdded(id));
1250 self.active_repo_id.get_or_insert_with(|| {
1251 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1252 id
1253 });
1254 }
1255 }
1256
1257 for id in removed_ids {
1258 if self.active_repo_id == Some(id) {
1259 self.active_repo_id = None;
1260 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1261 }
1262 self.repositories.remove(&id);
1263 if let Some(updates_tx) = updates_tx.as_ref() {
1264 updates_tx
1265 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1266 .ok();
1267 }
1268 }
1269 }
1270
1271 fn on_buffer_store_event(
1272 &mut self,
1273 _: Entity<BufferStore>,
1274 event: &BufferStoreEvent,
1275 cx: &mut Context<Self>,
1276 ) {
1277 match event {
1278 BufferStoreEvent::BufferAdded(buffer) => {
1279 cx.subscribe(&buffer, |this, buffer, event, cx| {
1280 if let BufferEvent::LanguageChanged = event {
1281 let buffer_id = buffer.read(cx).remote_id();
1282 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1283 diff_state.update(cx, |diff_state, cx| {
1284 diff_state.buffer_language_changed(buffer, cx);
1285 });
1286 }
1287 }
1288 })
1289 .detach();
1290 }
1291 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1292 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1293 diffs.remove(buffer_id);
1294 }
1295 }
1296 BufferStoreEvent::BufferDropped(buffer_id) => {
1297 self.diffs.remove(&buffer_id);
1298 for diffs in self.shared_diffs.values_mut() {
1299 diffs.remove(buffer_id);
1300 }
1301 }
1302
1303 _ => {}
1304 }
1305 }
1306
1307 pub fn recalculate_buffer_diffs(
1308 &mut self,
1309 buffers: Vec<Entity<Buffer>>,
1310 cx: &mut Context<Self>,
1311 ) -> impl Future<Output = ()> + use<> {
1312 let mut futures = Vec::new();
1313 for buffer in buffers {
1314 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1315 let buffer = buffer.read(cx).text_snapshot();
1316 diff_state.update(cx, |diff_state, cx| {
1317 diff_state.recalculate_diffs(buffer.clone(), cx);
1318 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1319 });
1320 futures.push(diff_state.update(cx, |diff_state, cx| {
1321 diff_state
1322 .reparse_conflict_markers(buffer, cx)
1323 .map(|_| {})
1324 .boxed()
1325 }));
1326 }
1327 }
1328 async move {
1329 futures::future::join_all(futures).await;
1330 }
1331 }
1332
1333 fn on_buffer_diff_event(
1334 &mut self,
1335 diff: Entity<buffer_diff::BufferDiff>,
1336 event: &BufferDiffEvent,
1337 cx: &mut Context<Self>,
1338 ) {
1339 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1340 let buffer_id = diff.read(cx).buffer_id;
1341 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1342 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1343 diff_state.hunk_staging_operation_count += 1;
1344 diff_state.hunk_staging_operation_count
1345 });
1346 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1347 let recv = repo.update(cx, |repo, cx| {
1348 log::debug!("hunks changed for {}", path.display());
1349 repo.spawn_set_index_text_job(
1350 path,
1351 new_index_text.as_ref().map(|rope| rope.to_string()),
1352 Some(hunk_staging_operation_count),
1353 cx,
1354 )
1355 });
1356 let diff = diff.downgrade();
1357 cx.spawn(async move |this, cx| {
1358 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1359 diff.update(cx, |diff, cx| {
1360 diff.clear_pending_hunks(cx);
1361 })
1362 .ok();
1363 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1364 .ok();
1365 }
1366 })
1367 .detach();
1368 }
1369 }
1370 }
1371 }
1372
1373 fn local_worktree_git_repos_changed(
1374 &mut self,
1375 worktree: Entity<Worktree>,
1376 changed_repos: &UpdatedGitRepositoriesSet,
1377 cx: &mut Context<Self>,
1378 ) {
1379 log::debug!("local worktree repos changed");
1380 debug_assert!(worktree.read(cx).is_local());
1381
1382 for repository in self.repositories.values() {
1383 repository.update(cx, |repository, cx| {
1384 let repo_abs_path = &repository.work_directory_abs_path;
1385 if changed_repos.iter().any(|update| {
1386 update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1387 || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1388 }) {
1389 repository.reload_buffer_diff_bases(cx);
1390 }
1391 });
1392 }
1393 }
1394
1395 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1396 &self.repositories
1397 }
1398
1399 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1400 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1401 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1402 Some(status.status)
1403 }
1404
1405 pub fn repository_and_path_for_buffer_id(
1406 &self,
1407 buffer_id: BufferId,
1408 cx: &App,
1409 ) -> Option<(Entity<Repository>, RepoPath)> {
1410 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1411 let project_path = buffer.read(cx).project_path(cx)?;
1412 self.repository_and_path_for_project_path(&project_path, cx)
1413 }
1414
1415 pub fn repository_and_path_for_project_path(
1416 &self,
1417 path: &ProjectPath,
1418 cx: &App,
1419 ) -> Option<(Entity<Repository>, RepoPath)> {
1420 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1421 self.repositories
1422 .values()
1423 .filter_map(|repo| {
1424 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1425 Some((repo.clone(), repo_path))
1426 })
1427 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1428 }
1429
1430 pub fn git_init(
1431 &self,
1432 path: Arc<Path>,
1433 fallback_branch_name: String,
1434 cx: &App,
1435 ) -> Task<Result<()>> {
1436 match &self.state {
1437 GitStoreState::Local { fs, .. } => {
1438 let fs = fs.clone();
1439 cx.background_executor()
1440 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1441 }
1442 GitStoreState::Ssh {
1443 upstream_client,
1444 upstream_project_id: project_id,
1445 ..
1446 }
1447 | GitStoreState::Remote {
1448 upstream_client,
1449 upstream_project_id: project_id,
1450 ..
1451 } => {
1452 let client = upstream_client.clone();
1453 let project_id = *project_id;
1454 cx.background_executor().spawn(async move {
1455 client
1456 .request(proto::GitInit {
1457 project_id: project_id.0,
1458 abs_path: path.to_string_lossy().to_string(),
1459 fallback_branch_name,
1460 })
1461 .await?;
1462 Ok(())
1463 })
1464 }
1465 }
1466 }
1467
1468 pub fn git_clone(
1469 &self,
1470 repo: String,
1471 path: impl Into<Arc<std::path::Path>>,
1472 cx: &App,
1473 ) -> Task<Result<()>> {
1474 let path = path.into();
1475 match &self.state {
1476 GitStoreState::Local { fs, .. } => {
1477 let fs = fs.clone();
1478 cx.background_executor()
1479 .spawn(async move { fs.git_clone(&repo, &path).await })
1480 }
1481 GitStoreState::Ssh {
1482 upstream_client,
1483 upstream_project_id,
1484 ..
1485 } => {
1486 let request = upstream_client.request(proto::GitClone {
1487 project_id: upstream_project_id.0,
1488 abs_path: path.to_string_lossy().to_string(),
1489 remote_repo: repo,
1490 });
1491
1492 cx.background_spawn(async move {
1493 let result = request.await?;
1494
1495 match result.success {
1496 true => Ok(()),
1497 false => Err(anyhow!("Git Clone failed")),
1498 }
1499 })
1500 }
1501 GitStoreState::Remote { .. } => {
1502 Task::ready(Err(anyhow!("Git Clone isn't supported for remote users")))
1503 }
1504 }
1505 }
1506
1507 async fn handle_update_repository(
1508 this: Entity<Self>,
1509 envelope: TypedEnvelope<proto::UpdateRepository>,
1510 mut cx: AsyncApp,
1511 ) -> Result<()> {
1512 this.update(&mut cx, |this, cx| {
1513 let mut update = envelope.payload;
1514
1515 let id = RepositoryId::from_proto(update.id);
1516 let client = this
1517 .upstream_client()
1518 .context("no upstream client")?
1519 .clone();
1520
1521 let mut is_new = false;
1522 let repo = this.repositories.entry(id).or_insert_with(|| {
1523 is_new = true;
1524 let git_store = cx.weak_entity();
1525 cx.new(|cx| {
1526 Repository::remote(
1527 id,
1528 Path::new(&update.abs_path).into(),
1529 ProjectId(update.project_id),
1530 client,
1531 git_store,
1532 cx,
1533 )
1534 })
1535 });
1536 if is_new {
1537 this._subscriptions
1538 .push(cx.subscribe(&repo, Self::on_repository_event))
1539 }
1540
1541 repo.update(cx, {
1542 let update = update.clone();
1543 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1544 })?;
1545
1546 this.active_repo_id.get_or_insert_with(|| {
1547 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1548 id
1549 });
1550
1551 if let Some((client, project_id)) = this.downstream_client() {
1552 update.project_id = project_id.to_proto();
1553 client.send(update).log_err();
1554 }
1555 Ok(())
1556 })?
1557 }
1558
1559 async fn handle_remove_repository(
1560 this: Entity<Self>,
1561 envelope: TypedEnvelope<proto::RemoveRepository>,
1562 mut cx: AsyncApp,
1563 ) -> Result<()> {
1564 this.update(&mut cx, |this, cx| {
1565 let mut update = envelope.payload;
1566 let id = RepositoryId::from_proto(update.id);
1567 this.repositories.remove(&id);
1568 if let Some((client, project_id)) = this.downstream_client() {
1569 update.project_id = project_id.to_proto();
1570 client.send(update).log_err();
1571 }
1572 if this.active_repo_id == Some(id) {
1573 this.active_repo_id = None;
1574 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1575 }
1576 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1577 })
1578 }
1579
1580 async fn handle_git_init(
1581 this: Entity<Self>,
1582 envelope: TypedEnvelope<proto::GitInit>,
1583 cx: AsyncApp,
1584 ) -> Result<proto::Ack> {
1585 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1586 let name = envelope.payload.fallback_branch_name;
1587 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1588 .await?;
1589
1590 Ok(proto::Ack {})
1591 }
1592
1593 async fn handle_git_clone(
1594 this: Entity<Self>,
1595 envelope: TypedEnvelope<proto::GitClone>,
1596 cx: AsyncApp,
1597 ) -> Result<proto::GitCloneResponse> {
1598 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1599 let repo_name = envelope.payload.remote_repo;
1600 let result = cx
1601 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1602 .await;
1603
1604 Ok(proto::GitCloneResponse {
1605 success: result.is_ok(),
1606 })
1607 }
1608
1609 async fn handle_fetch(
1610 this: Entity<Self>,
1611 envelope: TypedEnvelope<proto::Fetch>,
1612 mut cx: AsyncApp,
1613 ) -> Result<proto::RemoteMessageResponse> {
1614 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1615 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1616 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1617 let askpass_id = envelope.payload.askpass_id;
1618
1619 let askpass = make_remote_delegate(
1620 this,
1621 envelope.payload.project_id,
1622 repository_id,
1623 askpass_id,
1624 &mut cx,
1625 );
1626
1627 let remote_output = repository_handle
1628 .update(&mut cx, |repository_handle, cx| {
1629 repository_handle.fetch(fetch_options, askpass, cx)
1630 })?
1631 .await??;
1632
1633 Ok(proto::RemoteMessageResponse {
1634 stdout: remote_output.stdout,
1635 stderr: remote_output.stderr,
1636 })
1637 }
1638
1639 async fn handle_push(
1640 this: Entity<Self>,
1641 envelope: TypedEnvelope<proto::Push>,
1642 mut cx: AsyncApp,
1643 ) -> Result<proto::RemoteMessageResponse> {
1644 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1645 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1646
1647 let askpass_id = envelope.payload.askpass_id;
1648 let askpass = make_remote_delegate(
1649 this,
1650 envelope.payload.project_id,
1651 repository_id,
1652 askpass_id,
1653 &mut cx,
1654 );
1655
1656 let options = envelope
1657 .payload
1658 .options
1659 .as_ref()
1660 .map(|_| match envelope.payload.options() {
1661 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1662 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1663 });
1664
1665 let branch_name = envelope.payload.branch_name.into();
1666 let remote_name = envelope.payload.remote_name.into();
1667
1668 let remote_output = repository_handle
1669 .update(&mut cx, |repository_handle, cx| {
1670 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1671 })?
1672 .await??;
1673 Ok(proto::RemoteMessageResponse {
1674 stdout: remote_output.stdout,
1675 stderr: remote_output.stderr,
1676 })
1677 }
1678
1679 async fn handle_pull(
1680 this: Entity<Self>,
1681 envelope: TypedEnvelope<proto::Pull>,
1682 mut cx: AsyncApp,
1683 ) -> Result<proto::RemoteMessageResponse> {
1684 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1685 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1686 let askpass_id = envelope.payload.askpass_id;
1687 let askpass = make_remote_delegate(
1688 this,
1689 envelope.payload.project_id,
1690 repository_id,
1691 askpass_id,
1692 &mut cx,
1693 );
1694
1695 let branch_name = envelope.payload.branch_name.into();
1696 let remote_name = envelope.payload.remote_name.into();
1697
1698 let remote_message = repository_handle
1699 .update(&mut cx, |repository_handle, cx| {
1700 repository_handle.pull(branch_name, remote_name, askpass, cx)
1701 })?
1702 .await??;
1703
1704 Ok(proto::RemoteMessageResponse {
1705 stdout: remote_message.stdout,
1706 stderr: remote_message.stderr,
1707 })
1708 }
1709
1710 async fn handle_stage(
1711 this: Entity<Self>,
1712 envelope: TypedEnvelope<proto::Stage>,
1713 mut cx: AsyncApp,
1714 ) -> Result<proto::Ack> {
1715 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1716 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1717
1718 let entries = envelope
1719 .payload
1720 .paths
1721 .into_iter()
1722 .map(PathBuf::from)
1723 .map(RepoPath::new)
1724 .collect();
1725
1726 repository_handle
1727 .update(&mut cx, |repository_handle, cx| {
1728 repository_handle.stage_entries(entries, cx)
1729 })?
1730 .await?;
1731 Ok(proto::Ack {})
1732 }
1733
1734 async fn handle_unstage(
1735 this: Entity<Self>,
1736 envelope: TypedEnvelope<proto::Unstage>,
1737 mut cx: AsyncApp,
1738 ) -> Result<proto::Ack> {
1739 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1740 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1741
1742 let entries = envelope
1743 .payload
1744 .paths
1745 .into_iter()
1746 .map(PathBuf::from)
1747 .map(RepoPath::new)
1748 .collect();
1749
1750 repository_handle
1751 .update(&mut cx, |repository_handle, cx| {
1752 repository_handle.unstage_entries(entries, cx)
1753 })?
1754 .await?;
1755
1756 Ok(proto::Ack {})
1757 }
1758
1759 async fn handle_stash(
1760 this: Entity<Self>,
1761 envelope: TypedEnvelope<proto::Stash>,
1762 mut cx: AsyncApp,
1763 ) -> Result<proto::Ack> {
1764 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1765 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1766
1767 let entries = envelope
1768 .payload
1769 .paths
1770 .into_iter()
1771 .map(PathBuf::from)
1772 .map(RepoPath::new)
1773 .collect();
1774
1775 repository_handle
1776 .update(&mut cx, |repository_handle, cx| {
1777 repository_handle.stash_entries(entries, cx)
1778 })?
1779 .await?;
1780
1781 Ok(proto::Ack {})
1782 }
1783
1784 async fn handle_stash_pop(
1785 this: Entity<Self>,
1786 envelope: TypedEnvelope<proto::StashPop>,
1787 mut cx: AsyncApp,
1788 ) -> Result<proto::Ack> {
1789 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1790 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1791
1792 repository_handle
1793 .update(&mut cx, |repository_handle, cx| {
1794 repository_handle.stash_pop(cx)
1795 })?
1796 .await?;
1797
1798 Ok(proto::Ack {})
1799 }
1800
1801 async fn handle_set_index_text(
1802 this: Entity<Self>,
1803 envelope: TypedEnvelope<proto::SetIndexText>,
1804 mut cx: AsyncApp,
1805 ) -> Result<proto::Ack> {
1806 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1807 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1808 let repo_path = RepoPath::from_str(&envelope.payload.path);
1809
1810 repository_handle
1811 .update(&mut cx, |repository_handle, cx| {
1812 repository_handle.spawn_set_index_text_job(
1813 repo_path,
1814 envelope.payload.text,
1815 None,
1816 cx,
1817 )
1818 })?
1819 .await??;
1820 Ok(proto::Ack {})
1821 }
1822
1823 async fn handle_commit(
1824 this: Entity<Self>,
1825 envelope: TypedEnvelope<proto::Commit>,
1826 mut cx: AsyncApp,
1827 ) -> Result<proto::Ack> {
1828 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1829 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1830
1831 let message = SharedString::from(envelope.payload.message);
1832 let name = envelope.payload.name.map(SharedString::from);
1833 let email = envelope.payload.email.map(SharedString::from);
1834 let options = envelope.payload.options.unwrap_or_default();
1835
1836 repository_handle
1837 .update(&mut cx, |repository_handle, cx| {
1838 repository_handle.commit(
1839 message,
1840 name.zip(email),
1841 CommitOptions {
1842 amend: options.amend,
1843 signoff: options.signoff,
1844 },
1845 cx,
1846 )
1847 })?
1848 .await??;
1849 Ok(proto::Ack {})
1850 }
1851
1852 async fn handle_get_remotes(
1853 this: Entity<Self>,
1854 envelope: TypedEnvelope<proto::GetRemotes>,
1855 mut cx: AsyncApp,
1856 ) -> Result<proto::GetRemotesResponse> {
1857 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1858 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1859
1860 let branch_name = envelope.payload.branch_name;
1861
1862 let remotes = repository_handle
1863 .update(&mut cx, |repository_handle, _| {
1864 repository_handle.get_remotes(branch_name)
1865 })?
1866 .await??;
1867
1868 Ok(proto::GetRemotesResponse {
1869 remotes: remotes
1870 .into_iter()
1871 .map(|remotes| proto::get_remotes_response::Remote {
1872 name: remotes.name.to_string(),
1873 })
1874 .collect::<Vec<_>>(),
1875 })
1876 }
1877
1878 async fn handle_get_branches(
1879 this: Entity<Self>,
1880 envelope: TypedEnvelope<proto::GitGetBranches>,
1881 mut cx: AsyncApp,
1882 ) -> Result<proto::GitBranchesResponse> {
1883 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1884 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1885
1886 let branches = repository_handle
1887 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1888 .await??;
1889
1890 Ok(proto::GitBranchesResponse {
1891 branches: branches
1892 .into_iter()
1893 .map(|branch| branch_to_proto(&branch))
1894 .collect::<Vec<_>>(),
1895 })
1896 }
1897 async fn handle_create_branch(
1898 this: Entity<Self>,
1899 envelope: TypedEnvelope<proto::GitCreateBranch>,
1900 mut cx: AsyncApp,
1901 ) -> Result<proto::Ack> {
1902 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1903 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1904 let branch_name = envelope.payload.branch_name;
1905
1906 repository_handle
1907 .update(&mut cx, |repository_handle, _| {
1908 repository_handle.create_branch(branch_name)
1909 })?
1910 .await??;
1911
1912 Ok(proto::Ack {})
1913 }
1914
1915 async fn handle_change_branch(
1916 this: Entity<Self>,
1917 envelope: TypedEnvelope<proto::GitChangeBranch>,
1918 mut cx: AsyncApp,
1919 ) -> Result<proto::Ack> {
1920 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1921 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1922 let branch_name = envelope.payload.branch_name;
1923
1924 repository_handle
1925 .update(&mut cx, |repository_handle, _| {
1926 repository_handle.change_branch(branch_name)
1927 })?
1928 .await??;
1929
1930 Ok(proto::Ack {})
1931 }
1932
1933 async fn handle_show(
1934 this: Entity<Self>,
1935 envelope: TypedEnvelope<proto::GitShow>,
1936 mut cx: AsyncApp,
1937 ) -> Result<proto::GitCommitDetails> {
1938 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1939 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1940
1941 let commit = repository_handle
1942 .update(&mut cx, |repository_handle, _| {
1943 repository_handle.show(envelope.payload.commit)
1944 })?
1945 .await??;
1946 Ok(proto::GitCommitDetails {
1947 sha: commit.sha.into(),
1948 message: commit.message.into(),
1949 commit_timestamp: commit.commit_timestamp,
1950 author_email: commit.author_email.into(),
1951 author_name: commit.author_name.into(),
1952 })
1953 }
1954
1955 async fn handle_load_commit_diff(
1956 this: Entity<Self>,
1957 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1958 mut cx: AsyncApp,
1959 ) -> Result<proto::LoadCommitDiffResponse> {
1960 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1961 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1962
1963 let commit_diff = repository_handle
1964 .update(&mut cx, |repository_handle, _| {
1965 repository_handle.load_commit_diff(envelope.payload.commit)
1966 })?
1967 .await??;
1968 Ok(proto::LoadCommitDiffResponse {
1969 files: commit_diff
1970 .files
1971 .into_iter()
1972 .map(|file| proto::CommitFile {
1973 path: file.path.to_string(),
1974 old_text: file.old_text,
1975 new_text: file.new_text,
1976 })
1977 .collect(),
1978 })
1979 }
1980
1981 async fn handle_reset(
1982 this: Entity<Self>,
1983 envelope: TypedEnvelope<proto::GitReset>,
1984 mut cx: AsyncApp,
1985 ) -> Result<proto::Ack> {
1986 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1987 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1988
1989 let mode = match envelope.payload.mode() {
1990 git_reset::ResetMode::Soft => ResetMode::Soft,
1991 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1992 };
1993
1994 repository_handle
1995 .update(&mut cx, |repository_handle, cx| {
1996 repository_handle.reset(envelope.payload.commit, mode, cx)
1997 })?
1998 .await??;
1999 Ok(proto::Ack {})
2000 }
2001
2002 async fn handle_checkout_files(
2003 this: Entity<Self>,
2004 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2005 mut cx: AsyncApp,
2006 ) -> Result<proto::Ack> {
2007 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2008 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2009 let paths = envelope
2010 .payload
2011 .paths
2012 .iter()
2013 .map(|s| RepoPath::from_str(s))
2014 .collect();
2015
2016 repository_handle
2017 .update(&mut cx, |repository_handle, cx| {
2018 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2019 })?
2020 .await??;
2021 Ok(proto::Ack {})
2022 }
2023
2024 async fn handle_open_commit_message_buffer(
2025 this: Entity<Self>,
2026 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2027 mut cx: AsyncApp,
2028 ) -> Result<proto::OpenBufferResponse> {
2029 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2030 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2031 let buffer = repository
2032 .update(&mut cx, |repository, cx| {
2033 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2034 })?
2035 .await?;
2036
2037 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2038 this.update(&mut cx, |this, cx| {
2039 this.buffer_store.update(cx, |buffer_store, cx| {
2040 buffer_store
2041 .create_buffer_for_peer(
2042 &buffer,
2043 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2044 cx,
2045 )
2046 .detach_and_log_err(cx);
2047 })
2048 })?;
2049
2050 Ok(proto::OpenBufferResponse {
2051 buffer_id: buffer_id.to_proto(),
2052 })
2053 }
2054
2055 async fn handle_askpass(
2056 this: Entity<Self>,
2057 envelope: TypedEnvelope<proto::AskPassRequest>,
2058 mut cx: AsyncApp,
2059 ) -> Result<proto::AskPassResponse> {
2060 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2061 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2062
2063 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2064 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2065 debug_panic!("no askpass found");
2066 anyhow::bail!("no askpass found");
2067 };
2068
2069 let response = askpass.ask_password(envelope.payload.prompt).await?;
2070
2071 delegates
2072 .lock()
2073 .insert(envelope.payload.askpass_id, askpass);
2074
2075 Ok(proto::AskPassResponse { response })
2076 }
2077
2078 async fn handle_check_for_pushed_commits(
2079 this: Entity<Self>,
2080 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2081 mut cx: AsyncApp,
2082 ) -> Result<proto::CheckForPushedCommitsResponse> {
2083 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2084 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2085
2086 let branches = repository_handle
2087 .update(&mut cx, |repository_handle, _| {
2088 repository_handle.check_for_pushed_commits()
2089 })?
2090 .await??;
2091 Ok(proto::CheckForPushedCommitsResponse {
2092 pushed_to: branches
2093 .into_iter()
2094 .map(|commit| commit.to_string())
2095 .collect(),
2096 })
2097 }
2098
2099 async fn handle_git_diff(
2100 this: Entity<Self>,
2101 envelope: TypedEnvelope<proto::GitDiff>,
2102 mut cx: AsyncApp,
2103 ) -> Result<proto::GitDiffResponse> {
2104 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2105 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2106 let diff_type = match envelope.payload.diff_type() {
2107 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2108 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2109 };
2110
2111 let mut diff = repository_handle
2112 .update(&mut cx, |repository_handle, cx| {
2113 repository_handle.diff(diff_type, cx)
2114 })?
2115 .await??;
2116 const ONE_MB: usize = 1_000_000;
2117 if diff.len() > ONE_MB {
2118 diff = diff.chars().take(ONE_MB).collect()
2119 }
2120
2121 Ok(proto::GitDiffResponse { diff })
2122 }
2123
2124 async fn handle_open_unstaged_diff(
2125 this: Entity<Self>,
2126 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2127 mut cx: AsyncApp,
2128 ) -> Result<proto::OpenUnstagedDiffResponse> {
2129 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2130 let diff = this
2131 .update(&mut cx, |this, cx| {
2132 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2133 Some(this.open_unstaged_diff(buffer, cx))
2134 })?
2135 .context("missing buffer")?
2136 .await?;
2137 this.update(&mut cx, |this, _| {
2138 let shared_diffs = this
2139 .shared_diffs
2140 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2141 .or_default();
2142 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2143 })?;
2144 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2145 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2146 }
2147
2148 async fn handle_open_uncommitted_diff(
2149 this: Entity<Self>,
2150 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2151 mut cx: AsyncApp,
2152 ) -> Result<proto::OpenUncommittedDiffResponse> {
2153 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2154 let diff = this
2155 .update(&mut cx, |this, cx| {
2156 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2157 Some(this.open_uncommitted_diff(buffer, cx))
2158 })?
2159 .context("missing buffer")?
2160 .await?;
2161 this.update(&mut cx, |this, _| {
2162 let shared_diffs = this
2163 .shared_diffs
2164 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2165 .or_default();
2166 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2167 })?;
2168 diff.read_with(&cx, |diff, cx| {
2169 use proto::open_uncommitted_diff_response::Mode;
2170
2171 let unstaged_diff = diff.secondary_diff();
2172 let index_snapshot = unstaged_diff.and_then(|diff| {
2173 let diff = diff.read(cx);
2174 diff.base_text_exists().then(|| diff.base_text())
2175 });
2176
2177 let mode;
2178 let staged_text;
2179 let committed_text;
2180 if diff.base_text_exists() {
2181 let committed_snapshot = diff.base_text();
2182 committed_text = Some(committed_snapshot.text());
2183 if let Some(index_text) = index_snapshot {
2184 if index_text.remote_id() == committed_snapshot.remote_id() {
2185 mode = Mode::IndexMatchesHead;
2186 staged_text = None;
2187 } else {
2188 mode = Mode::IndexAndHead;
2189 staged_text = Some(index_text.text());
2190 }
2191 } else {
2192 mode = Mode::IndexAndHead;
2193 staged_text = None;
2194 }
2195 } else {
2196 mode = Mode::IndexAndHead;
2197 committed_text = None;
2198 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2199 }
2200
2201 proto::OpenUncommittedDiffResponse {
2202 committed_text,
2203 staged_text,
2204 mode: mode.into(),
2205 }
2206 })
2207 }
2208
2209 async fn handle_update_diff_bases(
2210 this: Entity<Self>,
2211 request: TypedEnvelope<proto::UpdateDiffBases>,
2212 mut cx: AsyncApp,
2213 ) -> Result<()> {
2214 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2215 this.update(&mut cx, |this, cx| {
2216 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2217 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2218 let buffer = buffer.read(cx).text_snapshot();
2219 diff_state.update(cx, |diff_state, cx| {
2220 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2221 })
2222 }
2223 }
2224 })
2225 }
2226
2227 async fn handle_blame_buffer(
2228 this: Entity<Self>,
2229 envelope: TypedEnvelope<proto::BlameBuffer>,
2230 mut cx: AsyncApp,
2231 ) -> Result<proto::BlameBufferResponse> {
2232 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2233 let version = deserialize_version(&envelope.payload.version);
2234 let buffer = this.read_with(&cx, |this, cx| {
2235 this.buffer_store.read(cx).get_existing(buffer_id)
2236 })??;
2237 buffer
2238 .update(&mut cx, |buffer, _| {
2239 buffer.wait_for_version(version.clone())
2240 })?
2241 .await?;
2242 let blame = this
2243 .update(&mut cx, |this, cx| {
2244 this.blame_buffer(&buffer, Some(version), cx)
2245 })?
2246 .await?;
2247 Ok(serialize_blame_buffer_response(blame))
2248 }
2249
2250 async fn handle_get_permalink_to_line(
2251 this: Entity<Self>,
2252 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2253 mut cx: AsyncApp,
2254 ) -> Result<proto::GetPermalinkToLineResponse> {
2255 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2256 // let version = deserialize_version(&envelope.payload.version);
2257 let selection = {
2258 let proto_selection = envelope
2259 .payload
2260 .selection
2261 .context("no selection to get permalink for defined")?;
2262 proto_selection.start as u32..proto_selection.end as u32
2263 };
2264 let buffer = this.read_with(&cx, |this, cx| {
2265 this.buffer_store.read(cx).get_existing(buffer_id)
2266 })??;
2267 let permalink = this
2268 .update(&mut cx, |this, cx| {
2269 this.get_permalink_to_line(&buffer, selection, cx)
2270 })?
2271 .await?;
2272 Ok(proto::GetPermalinkToLineResponse {
2273 permalink: permalink.to_string(),
2274 })
2275 }
2276
2277 fn repository_for_request(
2278 this: &Entity<Self>,
2279 id: RepositoryId,
2280 cx: &mut AsyncApp,
2281 ) -> Result<Entity<Repository>> {
2282 this.read_with(cx, |this, _| {
2283 this.repositories
2284 .get(&id)
2285 .context("missing repository handle")
2286 .cloned()
2287 })?
2288 }
2289
2290 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2291 self.repositories
2292 .iter()
2293 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2294 .collect()
2295 }
2296
2297 fn process_updated_entries(
2298 &self,
2299 worktree: &Entity<Worktree>,
2300 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2301 cx: &mut App,
2302 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2303 let mut repo_paths = self
2304 .repositories
2305 .values()
2306 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2307 .collect::<Vec<_>>();
2308 let mut entries: Vec<_> = updated_entries
2309 .iter()
2310 .map(|(path, _, _)| path.clone())
2311 .collect();
2312 entries.sort();
2313 let worktree = worktree.read(cx);
2314
2315 let entries = entries
2316 .into_iter()
2317 .filter_map(|path| worktree.absolutize(&path).ok())
2318 .collect::<Arc<[_]>>();
2319
2320 let executor = cx.background_executor().clone();
2321 cx.background_executor().spawn(async move {
2322 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2323 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2324 let mut tasks = FuturesOrdered::new();
2325 for (repo_path, repo) in repo_paths.into_iter().rev() {
2326 let entries = entries.clone();
2327 let task = executor.spawn(async move {
2328 // Find all repository paths that belong to this repo
2329 let mut ix = entries.partition_point(|path| path < &*repo_path);
2330 if ix == entries.len() {
2331 return None;
2332 };
2333
2334 let mut paths = vec![];
2335 // All paths prefixed by a given repo will constitute a continuous range.
2336 while let Some(path) = entries.get(ix)
2337 && let Some(repo_path) =
2338 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, &path)
2339 {
2340 paths.push((repo_path, ix));
2341 ix += 1;
2342 }
2343 Some((repo, paths))
2344 });
2345 tasks.push_back(task);
2346 }
2347
2348 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2349 let mut path_was_used = vec![false; entries.len()];
2350 let tasks = tasks.collect::<Vec<_>>().await;
2351 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2352 // We always want to assign a path to it's innermost repository.
2353 for t in tasks {
2354 let Some((repo, paths)) = t else {
2355 continue;
2356 };
2357 let entry = paths_by_git_repo.entry(repo).or_default();
2358 for (repo_path, ix) in paths {
2359 if path_was_used[ix] {
2360 continue;
2361 }
2362 path_was_used[ix] = true;
2363 entry.push(repo_path);
2364 }
2365 }
2366
2367 paths_by_git_repo
2368 })
2369 }
2370}
2371
2372impl BufferGitState {
2373 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2374 Self {
2375 unstaged_diff: Default::default(),
2376 uncommitted_diff: Default::default(),
2377 recalculate_diff_task: Default::default(),
2378 language: Default::default(),
2379 language_registry: Default::default(),
2380 recalculating_tx: postage::watch::channel_with(false).0,
2381 hunk_staging_operation_count: 0,
2382 hunk_staging_operation_count_as_of_write: 0,
2383 head_text: Default::default(),
2384 index_text: Default::default(),
2385 head_changed: Default::default(),
2386 index_changed: Default::default(),
2387 language_changed: Default::default(),
2388 conflict_updated_futures: Default::default(),
2389 conflict_set: Default::default(),
2390 reparse_conflict_markers_task: Default::default(),
2391 }
2392 }
2393
2394 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2395 self.language = buffer.read(cx).language().cloned();
2396 self.language_changed = true;
2397 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2398 }
2399
2400 fn reparse_conflict_markers(
2401 &mut self,
2402 buffer: text::BufferSnapshot,
2403 cx: &mut Context<Self>,
2404 ) -> oneshot::Receiver<()> {
2405 let (tx, rx) = oneshot::channel();
2406
2407 let Some(conflict_set) = self
2408 .conflict_set
2409 .as_ref()
2410 .and_then(|conflict_set| conflict_set.upgrade())
2411 else {
2412 return rx;
2413 };
2414
2415 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2416 if conflict_set.has_conflict {
2417 Some(conflict_set.snapshot())
2418 } else {
2419 None
2420 }
2421 });
2422
2423 if let Some(old_snapshot) = old_snapshot {
2424 self.conflict_updated_futures.push(tx);
2425 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2426 let (snapshot, changed_range) = cx
2427 .background_spawn(async move {
2428 let new_snapshot = ConflictSet::parse(&buffer);
2429 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2430 (new_snapshot, changed_range)
2431 })
2432 .await;
2433 this.update(cx, |this, cx| {
2434 if let Some(conflict_set) = &this.conflict_set {
2435 conflict_set
2436 .update(cx, |conflict_set, cx| {
2437 conflict_set.set_snapshot(snapshot, changed_range, cx);
2438 })
2439 .ok();
2440 }
2441 let futures = std::mem::take(&mut this.conflict_updated_futures);
2442 for tx in futures {
2443 tx.send(()).ok();
2444 }
2445 })
2446 }))
2447 }
2448
2449 rx
2450 }
2451
2452 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2453 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2454 }
2455
2456 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2457 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2458 }
2459
2460 fn handle_base_texts_updated(
2461 &mut self,
2462 buffer: text::BufferSnapshot,
2463 message: proto::UpdateDiffBases,
2464 cx: &mut Context<Self>,
2465 ) {
2466 use proto::update_diff_bases::Mode;
2467
2468 let Some(mode) = Mode::from_i32(message.mode) else {
2469 return;
2470 };
2471
2472 let diff_bases_change = match mode {
2473 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2474 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2475 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2476 Mode::IndexAndHead => DiffBasesChange::SetEach {
2477 index: message.staged_text,
2478 head: message.committed_text,
2479 },
2480 };
2481
2482 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2483 }
2484
2485 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2486 if *self.recalculating_tx.borrow() {
2487 let mut rx = self.recalculating_tx.subscribe();
2488 return Some(async move {
2489 loop {
2490 let is_recalculating = rx.recv().await;
2491 if is_recalculating != Some(true) {
2492 break;
2493 }
2494 }
2495 });
2496 } else {
2497 None
2498 }
2499 }
2500
2501 fn diff_bases_changed(
2502 &mut self,
2503 buffer: text::BufferSnapshot,
2504 diff_bases_change: Option<DiffBasesChange>,
2505 cx: &mut Context<Self>,
2506 ) {
2507 match diff_bases_change {
2508 Some(DiffBasesChange::SetIndex(index)) => {
2509 self.index_text = index.map(|mut index| {
2510 text::LineEnding::normalize(&mut index);
2511 Arc::new(index)
2512 });
2513 self.index_changed = true;
2514 }
2515 Some(DiffBasesChange::SetHead(head)) => {
2516 self.head_text = head.map(|mut head| {
2517 text::LineEnding::normalize(&mut head);
2518 Arc::new(head)
2519 });
2520 self.head_changed = true;
2521 }
2522 Some(DiffBasesChange::SetBoth(text)) => {
2523 let text = text.map(|mut text| {
2524 text::LineEnding::normalize(&mut text);
2525 Arc::new(text)
2526 });
2527 self.head_text = text.clone();
2528 self.index_text = text;
2529 self.head_changed = true;
2530 self.index_changed = true;
2531 }
2532 Some(DiffBasesChange::SetEach { index, head }) => {
2533 self.index_text = index.map(|mut index| {
2534 text::LineEnding::normalize(&mut index);
2535 Arc::new(index)
2536 });
2537 self.index_changed = true;
2538 self.head_text = head.map(|mut head| {
2539 text::LineEnding::normalize(&mut head);
2540 Arc::new(head)
2541 });
2542 self.head_changed = true;
2543 }
2544 None => {}
2545 }
2546
2547 self.recalculate_diffs(buffer, cx)
2548 }
2549
2550 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2551 *self.recalculating_tx.borrow_mut() = true;
2552
2553 let language = self.language.clone();
2554 let language_registry = self.language_registry.clone();
2555 let unstaged_diff = self.unstaged_diff();
2556 let uncommitted_diff = self.uncommitted_diff();
2557 let head = self.head_text.clone();
2558 let index = self.index_text.clone();
2559 let index_changed = self.index_changed;
2560 let head_changed = self.head_changed;
2561 let language_changed = self.language_changed;
2562 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2563 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2564 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2565 (None, None) => true,
2566 _ => false,
2567 };
2568 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2569 log::debug!(
2570 "start recalculating diffs for buffer {}",
2571 buffer.remote_id()
2572 );
2573
2574 let mut new_unstaged_diff = None;
2575 if let Some(unstaged_diff) = &unstaged_diff {
2576 new_unstaged_diff = Some(
2577 BufferDiff::update_diff(
2578 unstaged_diff.clone(),
2579 buffer.clone(),
2580 index,
2581 index_changed,
2582 language_changed,
2583 language.clone(),
2584 language_registry.clone(),
2585 cx,
2586 )
2587 .await?,
2588 );
2589 }
2590
2591 let mut new_uncommitted_diff = None;
2592 if let Some(uncommitted_diff) = &uncommitted_diff {
2593 new_uncommitted_diff = if index_matches_head {
2594 new_unstaged_diff.clone()
2595 } else {
2596 Some(
2597 BufferDiff::update_diff(
2598 uncommitted_diff.clone(),
2599 buffer.clone(),
2600 head,
2601 head_changed,
2602 language_changed,
2603 language.clone(),
2604 language_registry.clone(),
2605 cx,
2606 )
2607 .await?,
2608 )
2609 }
2610 }
2611
2612 let cancel = this.update(cx, |this, _| {
2613 // This checks whether all pending stage/unstage operations
2614 // have quiesced (i.e. both the corresponding write and the
2615 // read of that write have completed). If not, then we cancel
2616 // this recalculation attempt to avoid invalidating pending
2617 // state too quickly; another recalculation will come along
2618 // later and clear the pending state once the state of the index has settled.
2619 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2620 *this.recalculating_tx.borrow_mut() = false;
2621 true
2622 } else {
2623 false
2624 }
2625 })?;
2626 if cancel {
2627 log::debug!(
2628 concat!(
2629 "aborting recalculating diffs for buffer {}",
2630 "due to subsequent hunk operations",
2631 ),
2632 buffer.remote_id()
2633 );
2634 return Ok(());
2635 }
2636
2637 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2638 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2639 {
2640 unstaged_diff.update(cx, |diff, cx| {
2641 if language_changed {
2642 diff.language_changed(cx);
2643 }
2644 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2645 })?
2646 } else {
2647 None
2648 };
2649
2650 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2651 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2652 {
2653 uncommitted_diff.update(cx, |diff, cx| {
2654 if language_changed {
2655 diff.language_changed(cx);
2656 }
2657 diff.set_snapshot_with_secondary(
2658 new_uncommitted_diff,
2659 &buffer,
2660 unstaged_changed_range,
2661 true,
2662 cx,
2663 );
2664 })?;
2665 }
2666
2667 log::debug!(
2668 "finished recalculating diffs for buffer {}",
2669 buffer.remote_id()
2670 );
2671
2672 if let Some(this) = this.upgrade() {
2673 this.update(cx, |this, _| {
2674 this.index_changed = false;
2675 this.head_changed = false;
2676 this.language_changed = false;
2677 *this.recalculating_tx.borrow_mut() = false;
2678 })?;
2679 }
2680
2681 Ok(())
2682 }));
2683 }
2684}
2685
2686fn make_remote_delegate(
2687 this: Entity<GitStore>,
2688 project_id: u64,
2689 repository_id: RepositoryId,
2690 askpass_id: u64,
2691 cx: &mut AsyncApp,
2692) -> AskPassDelegate {
2693 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2694 this.update(cx, |this, cx| {
2695 let Some((client, _)) = this.downstream_client() else {
2696 return;
2697 };
2698 let response = client.request(proto::AskPassRequest {
2699 project_id,
2700 repository_id: repository_id.to_proto(),
2701 askpass_id,
2702 prompt,
2703 });
2704 cx.spawn(async move |_, _| {
2705 tx.send(response.await?.response).ok();
2706 anyhow::Ok(())
2707 })
2708 .detach_and_log_err(cx);
2709 })
2710 .log_err();
2711 })
2712}
2713
2714impl RepositoryId {
2715 pub fn to_proto(self) -> u64 {
2716 self.0
2717 }
2718
2719 pub fn from_proto(id: u64) -> Self {
2720 RepositoryId(id)
2721 }
2722}
2723
2724impl RepositorySnapshot {
2725 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2726 Self {
2727 id,
2728 statuses_by_path: Default::default(),
2729 work_directory_abs_path,
2730 branch: None,
2731 head_commit: None,
2732 scan_id: 0,
2733 merge: Default::default(),
2734 remote_origin_url: None,
2735 remote_upstream_url: None,
2736 }
2737 }
2738
2739 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2740 proto::UpdateRepository {
2741 branch_summary: self.branch.as_ref().map(branch_to_proto),
2742 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2743 updated_statuses: self
2744 .statuses_by_path
2745 .iter()
2746 .map(|entry| entry.to_proto())
2747 .collect(),
2748 removed_statuses: Default::default(),
2749 current_merge_conflicts: self
2750 .merge
2751 .conflicted_paths
2752 .iter()
2753 .map(|repo_path| repo_path.to_proto())
2754 .collect(),
2755 project_id,
2756 id: self.id.to_proto(),
2757 abs_path: self.work_directory_abs_path.to_proto(),
2758 entry_ids: vec![self.id.to_proto()],
2759 scan_id: self.scan_id,
2760 is_last_update: true,
2761 }
2762 }
2763
2764 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2765 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2766 let mut removed_statuses: Vec<String> = Vec::new();
2767
2768 let mut new_statuses = self.statuses_by_path.iter().peekable();
2769 let mut old_statuses = old.statuses_by_path.iter().peekable();
2770
2771 let mut current_new_entry = new_statuses.next();
2772 let mut current_old_entry = old_statuses.next();
2773 loop {
2774 match (current_new_entry, current_old_entry) {
2775 (Some(new_entry), Some(old_entry)) => {
2776 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2777 Ordering::Less => {
2778 updated_statuses.push(new_entry.to_proto());
2779 current_new_entry = new_statuses.next();
2780 }
2781 Ordering::Equal => {
2782 if new_entry.status != old_entry.status {
2783 updated_statuses.push(new_entry.to_proto());
2784 }
2785 current_old_entry = old_statuses.next();
2786 current_new_entry = new_statuses.next();
2787 }
2788 Ordering::Greater => {
2789 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2790 current_old_entry = old_statuses.next();
2791 }
2792 }
2793 }
2794 (None, Some(old_entry)) => {
2795 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2796 current_old_entry = old_statuses.next();
2797 }
2798 (Some(new_entry), None) => {
2799 updated_statuses.push(new_entry.to_proto());
2800 current_new_entry = new_statuses.next();
2801 }
2802 (None, None) => break,
2803 }
2804 }
2805
2806 proto::UpdateRepository {
2807 branch_summary: self.branch.as_ref().map(branch_to_proto),
2808 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2809 updated_statuses,
2810 removed_statuses,
2811 current_merge_conflicts: self
2812 .merge
2813 .conflicted_paths
2814 .iter()
2815 .map(|path| path.as_ref().to_proto())
2816 .collect(),
2817 project_id,
2818 id: self.id.to_proto(),
2819 abs_path: self.work_directory_abs_path.to_proto(),
2820 entry_ids: vec![],
2821 scan_id: self.scan_id,
2822 is_last_update: true,
2823 }
2824 }
2825
2826 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2827 self.statuses_by_path.iter().cloned()
2828 }
2829
2830 pub fn status_summary(&self) -> GitSummary {
2831 self.statuses_by_path.summary().item_summary
2832 }
2833
2834 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2835 self.statuses_by_path
2836 .get(&PathKey(path.0.clone()), &())
2837 .cloned()
2838 }
2839
2840 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2841 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2842 }
2843
2844 #[inline]
2845 fn abs_path_to_repo_path_inner(
2846 work_directory_abs_path: &Path,
2847 abs_path: &Path,
2848 ) -> Option<RepoPath> {
2849 abs_path
2850 .strip_prefix(&work_directory_abs_path)
2851 .map(RepoPath::from)
2852 .ok()
2853 }
2854
2855 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2856 self.merge.conflicted_paths.contains(&repo_path)
2857 }
2858
2859 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2860 let had_conflict_on_last_merge_head_change =
2861 self.merge.conflicted_paths.contains(&repo_path);
2862 let has_conflict_currently = self
2863 .status_for_path(&repo_path)
2864 .map_or(false, |entry| entry.status.is_conflicted());
2865 had_conflict_on_last_merge_head_change || has_conflict_currently
2866 }
2867
2868 /// This is the name that will be displayed in the repository selector for this repository.
2869 pub fn display_name(&self) -> SharedString {
2870 self.work_directory_abs_path
2871 .file_name()
2872 .unwrap_or_default()
2873 .to_string_lossy()
2874 .to_string()
2875 .into()
2876 }
2877}
2878
2879impl MergeDetails {
2880 async fn load(
2881 backend: &Arc<dyn GitRepository>,
2882 status: &SumTree<StatusEntry>,
2883 prev_snapshot: &RepositorySnapshot,
2884 ) -> Result<(MergeDetails, bool)> {
2885 log::debug!("load merge details");
2886 let message = backend.merge_message().await;
2887 let heads = backend
2888 .revparse_batch(vec![
2889 "MERGE_HEAD".into(),
2890 "CHERRY_PICK_HEAD".into(),
2891 "REBASE_HEAD".into(),
2892 "REVERT_HEAD".into(),
2893 "APPLY_HEAD".into(),
2894 ])
2895 .await
2896 .log_err()
2897 .unwrap_or_default()
2898 .into_iter()
2899 .map(|opt| opt.map(SharedString::from))
2900 .collect::<Vec<_>>();
2901 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2902 let conflicted_paths = if merge_heads_changed {
2903 let current_conflicted_paths = TreeSet::from_ordered_entries(
2904 status
2905 .iter()
2906 .filter(|entry| entry.status.is_conflicted())
2907 .map(|entry| entry.repo_path.clone()),
2908 );
2909
2910 // It can happen that we run a scan while a lengthy merge is in progress
2911 // that will eventually result in conflicts, but before those conflicts
2912 // are reported by `git status`. Since for the moment we only care about
2913 // the merge heads state for the purposes of tracking conflicts, don't update
2914 // this state until we see some conflicts.
2915 if heads.iter().any(Option::is_some)
2916 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2917 && current_conflicted_paths.is_empty()
2918 {
2919 log::debug!("not updating merge heads because no conflicts found");
2920 return Ok((
2921 MergeDetails {
2922 message: message.map(SharedString::from),
2923 ..prev_snapshot.merge.clone()
2924 },
2925 false,
2926 ));
2927 }
2928
2929 current_conflicted_paths
2930 } else {
2931 prev_snapshot.merge.conflicted_paths.clone()
2932 };
2933 let details = MergeDetails {
2934 conflicted_paths,
2935 message: message.map(SharedString::from),
2936 heads,
2937 };
2938 Ok((details, merge_heads_changed))
2939 }
2940}
2941
2942impl Repository {
2943 pub fn snapshot(&self) -> RepositorySnapshot {
2944 self.snapshot.clone()
2945 }
2946
2947 fn local(
2948 id: RepositoryId,
2949 work_directory_abs_path: Arc<Path>,
2950 dot_git_abs_path: Arc<Path>,
2951 repository_dir_abs_path: Arc<Path>,
2952 common_dir_abs_path: Arc<Path>,
2953 project_environment: WeakEntity<ProjectEnvironment>,
2954 fs: Arc<dyn Fs>,
2955 git_store: WeakEntity<GitStore>,
2956 cx: &mut Context<Self>,
2957 ) -> Self {
2958 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2959 Repository {
2960 this: cx.weak_entity(),
2961 git_store,
2962 snapshot,
2963 commit_message_buffer: None,
2964 askpass_delegates: Default::default(),
2965 paths_needing_status_update: Default::default(),
2966 latest_askpass_id: 0,
2967 job_sender: Repository::spawn_local_git_worker(
2968 work_directory_abs_path,
2969 dot_git_abs_path,
2970 repository_dir_abs_path,
2971 common_dir_abs_path,
2972 project_environment,
2973 fs,
2974 cx,
2975 ),
2976 job_id: 0,
2977 active_jobs: Default::default(),
2978 }
2979 }
2980
2981 fn remote(
2982 id: RepositoryId,
2983 work_directory_abs_path: Arc<Path>,
2984 project_id: ProjectId,
2985 client: AnyProtoClient,
2986 git_store: WeakEntity<GitStore>,
2987 cx: &mut Context<Self>,
2988 ) -> Self {
2989 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2990 Self {
2991 this: cx.weak_entity(),
2992 snapshot,
2993 commit_message_buffer: None,
2994 git_store,
2995 paths_needing_status_update: Default::default(),
2996 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2997 askpass_delegates: Default::default(),
2998 latest_askpass_id: 0,
2999 active_jobs: Default::default(),
3000 job_id: 0,
3001 }
3002 }
3003
3004 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3005 self.git_store.upgrade()
3006 }
3007
3008 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3009 let this = cx.weak_entity();
3010 let git_store = self.git_store.clone();
3011 let _ = self.send_keyed_job(
3012 Some(GitJobKey::ReloadBufferDiffBases),
3013 None,
3014 |state, mut cx| async move {
3015 let RepositoryState::Local { backend, .. } = state else {
3016 log::error!("tried to recompute diffs for a non-local repository");
3017 return Ok(());
3018 };
3019
3020 let Some(this) = this.upgrade() else {
3021 return Ok(());
3022 };
3023
3024 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3025 git_store.update(cx, |git_store, cx| {
3026 git_store
3027 .diffs
3028 .iter()
3029 .filter_map(|(buffer_id, diff_state)| {
3030 let buffer_store = git_store.buffer_store.read(cx);
3031 let buffer = buffer_store.get(*buffer_id)?;
3032 let file = File::from_dyn(buffer.read(cx).file())?;
3033 let abs_path =
3034 file.worktree.read(cx).absolutize(&file.path).ok()?;
3035 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3036 log::debug!(
3037 "start reload diff bases for repo path {}",
3038 repo_path.0.display()
3039 );
3040 diff_state.update(cx, |diff_state, _| {
3041 let has_unstaged_diff = diff_state
3042 .unstaged_diff
3043 .as_ref()
3044 .is_some_and(|diff| diff.is_upgradable());
3045 let has_uncommitted_diff = diff_state
3046 .uncommitted_diff
3047 .as_ref()
3048 .is_some_and(|set| set.is_upgradable());
3049
3050 Some((
3051 buffer,
3052 repo_path,
3053 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3054 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3055 ))
3056 })
3057 })
3058 .collect::<Vec<_>>()
3059 })
3060 })??;
3061
3062 let buffer_diff_base_changes = cx
3063 .background_spawn(async move {
3064 let mut changes = Vec::new();
3065 for (buffer, repo_path, current_index_text, current_head_text) in
3066 &repo_diff_state_updates
3067 {
3068 let index_text = if current_index_text.is_some() {
3069 backend.load_index_text(repo_path.clone()).await
3070 } else {
3071 None
3072 };
3073 let head_text = if current_head_text.is_some() {
3074 backend.load_committed_text(repo_path.clone()).await
3075 } else {
3076 None
3077 };
3078
3079 let change =
3080 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3081 (Some(current_index), Some(current_head)) => {
3082 let index_changed =
3083 index_text.as_ref() != current_index.as_deref();
3084 let head_changed =
3085 head_text.as_ref() != current_head.as_deref();
3086 if index_changed && head_changed {
3087 if index_text == head_text {
3088 Some(DiffBasesChange::SetBoth(head_text))
3089 } else {
3090 Some(DiffBasesChange::SetEach {
3091 index: index_text,
3092 head: head_text,
3093 })
3094 }
3095 } else if index_changed {
3096 Some(DiffBasesChange::SetIndex(index_text))
3097 } else if head_changed {
3098 Some(DiffBasesChange::SetHead(head_text))
3099 } else {
3100 None
3101 }
3102 }
3103 (Some(current_index), None) => {
3104 let index_changed =
3105 index_text.as_ref() != current_index.as_deref();
3106 index_changed
3107 .then_some(DiffBasesChange::SetIndex(index_text))
3108 }
3109 (None, Some(current_head)) => {
3110 let head_changed =
3111 head_text.as_ref() != current_head.as_deref();
3112 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3113 }
3114 (None, None) => None,
3115 };
3116
3117 changes.push((buffer.clone(), change))
3118 }
3119 changes
3120 })
3121 .await;
3122
3123 git_store.update(&mut cx, |git_store, cx| {
3124 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3125 let buffer_snapshot = buffer.read(cx).text_snapshot();
3126 let buffer_id = buffer_snapshot.remote_id();
3127 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3128 continue;
3129 };
3130
3131 let downstream_client = git_store.downstream_client();
3132 diff_state.update(cx, |diff_state, cx| {
3133 use proto::update_diff_bases::Mode;
3134
3135 if let Some((diff_bases_change, (client, project_id))) =
3136 diff_bases_change.clone().zip(downstream_client)
3137 {
3138 let (staged_text, committed_text, mode) = match diff_bases_change {
3139 DiffBasesChange::SetIndex(index) => {
3140 (index, None, Mode::IndexOnly)
3141 }
3142 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3143 DiffBasesChange::SetEach { index, head } => {
3144 (index, head, Mode::IndexAndHead)
3145 }
3146 DiffBasesChange::SetBoth(text) => {
3147 (None, text, Mode::IndexMatchesHead)
3148 }
3149 };
3150 client
3151 .send(proto::UpdateDiffBases {
3152 project_id: project_id.to_proto(),
3153 buffer_id: buffer_id.to_proto(),
3154 staged_text,
3155 committed_text,
3156 mode: mode as i32,
3157 })
3158 .log_err();
3159 }
3160
3161 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3162 });
3163 }
3164 })
3165 },
3166 );
3167 }
3168
3169 pub fn send_job<F, Fut, R>(
3170 &mut self,
3171 status: Option<SharedString>,
3172 job: F,
3173 ) -> oneshot::Receiver<R>
3174 where
3175 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3176 Fut: Future<Output = R> + 'static,
3177 R: Send + 'static,
3178 {
3179 self.send_keyed_job(None, status, job)
3180 }
3181
3182 fn send_keyed_job<F, Fut, R>(
3183 &mut self,
3184 key: Option<GitJobKey>,
3185 status: Option<SharedString>,
3186 job: F,
3187 ) -> oneshot::Receiver<R>
3188 where
3189 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3190 Fut: Future<Output = R> + 'static,
3191 R: Send + 'static,
3192 {
3193 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3194 let job_id = post_inc(&mut self.job_id);
3195 let this = self.this.clone();
3196 self.job_sender
3197 .unbounded_send(GitJob {
3198 key,
3199 job: Box::new(move |state, cx: &mut AsyncApp| {
3200 let job = job(state, cx.clone());
3201 cx.spawn(async move |cx| {
3202 if let Some(s) = status.clone() {
3203 this.update(cx, |this, cx| {
3204 this.active_jobs.insert(
3205 job_id,
3206 JobInfo {
3207 start: Instant::now(),
3208 message: s.clone(),
3209 },
3210 );
3211
3212 cx.notify();
3213 })
3214 .ok();
3215 }
3216 let result = job.await;
3217
3218 this.update(cx, |this, cx| {
3219 this.active_jobs.remove(&job_id);
3220 cx.notify();
3221 })
3222 .ok();
3223
3224 result_tx.send(result).ok();
3225 })
3226 }),
3227 })
3228 .ok();
3229 result_rx
3230 }
3231
3232 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3233 let Some(git_store) = self.git_store.upgrade() else {
3234 return;
3235 };
3236 let entity = cx.entity();
3237 git_store.update(cx, |git_store, cx| {
3238 let Some((&id, _)) = git_store
3239 .repositories
3240 .iter()
3241 .find(|(_, handle)| *handle == &entity)
3242 else {
3243 return;
3244 };
3245 git_store.active_repo_id = Some(id);
3246 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3247 });
3248 }
3249
3250 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3251 self.snapshot.status()
3252 }
3253
3254 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3255 let git_store = self.git_store.upgrade()?;
3256 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3257 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3258 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3259 Some(ProjectPath {
3260 worktree_id: worktree.read(cx).id(),
3261 path: relative_path.into(),
3262 })
3263 }
3264
3265 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3266 let git_store = self.git_store.upgrade()?;
3267 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3268 let abs_path = worktree_store.absolutize(path, cx)?;
3269 self.snapshot.abs_path_to_repo_path(&abs_path)
3270 }
3271
3272 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3273 other
3274 .read(cx)
3275 .snapshot
3276 .work_directory_abs_path
3277 .starts_with(&self.snapshot.work_directory_abs_path)
3278 }
3279
3280 pub fn open_commit_buffer(
3281 &mut self,
3282 languages: Option<Arc<LanguageRegistry>>,
3283 buffer_store: Entity<BufferStore>,
3284 cx: &mut Context<Self>,
3285 ) -> Task<Result<Entity<Buffer>>> {
3286 let id = self.id;
3287 if let Some(buffer) = self.commit_message_buffer.clone() {
3288 return Task::ready(Ok(buffer));
3289 }
3290 let this = cx.weak_entity();
3291
3292 let rx = self.send_job(None, move |state, mut cx| async move {
3293 let Some(this) = this.upgrade() else {
3294 bail!("git store was dropped");
3295 };
3296 match state {
3297 RepositoryState::Local { .. } => {
3298 this.update(&mut cx, |_, cx| {
3299 Self::open_local_commit_buffer(languages, buffer_store, cx)
3300 })?
3301 .await
3302 }
3303 RepositoryState::Remote { project_id, client } => {
3304 let request = client.request(proto::OpenCommitMessageBuffer {
3305 project_id: project_id.0,
3306 repository_id: id.to_proto(),
3307 });
3308 let response = request.await.context("requesting to open commit buffer")?;
3309 let buffer_id = BufferId::new(response.buffer_id)?;
3310 let buffer = buffer_store
3311 .update(&mut cx, |buffer_store, cx| {
3312 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3313 })?
3314 .await?;
3315 if let Some(language_registry) = languages {
3316 let git_commit_language =
3317 language_registry.language_for_name("Git Commit").await?;
3318 buffer.update(&mut cx, |buffer, cx| {
3319 buffer.set_language(Some(git_commit_language), cx);
3320 })?;
3321 }
3322 this.update(&mut cx, |this, _| {
3323 this.commit_message_buffer = Some(buffer.clone());
3324 })?;
3325 Ok(buffer)
3326 }
3327 }
3328 });
3329
3330 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3331 }
3332
3333 fn open_local_commit_buffer(
3334 language_registry: Option<Arc<LanguageRegistry>>,
3335 buffer_store: Entity<BufferStore>,
3336 cx: &mut Context<Self>,
3337 ) -> Task<Result<Entity<Buffer>>> {
3338 cx.spawn(async move |repository, cx| {
3339 let buffer = buffer_store
3340 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
3341 .await?;
3342
3343 if let Some(language_registry) = language_registry {
3344 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3345 buffer.update(cx, |buffer, cx| {
3346 buffer.set_language(Some(git_commit_language), cx);
3347 })?;
3348 }
3349
3350 repository.update(cx, |repository, _| {
3351 repository.commit_message_buffer = Some(buffer.clone());
3352 })?;
3353 Ok(buffer)
3354 })
3355 }
3356
3357 pub fn checkout_files(
3358 &mut self,
3359 commit: &str,
3360 paths: Vec<RepoPath>,
3361 _cx: &mut App,
3362 ) -> oneshot::Receiver<Result<()>> {
3363 let commit = commit.to_string();
3364 let id = self.id;
3365
3366 self.send_job(
3367 Some(format!("git checkout {}", commit).into()),
3368 move |git_repo, _| async move {
3369 match git_repo {
3370 RepositoryState::Local {
3371 backend,
3372 environment,
3373 ..
3374 } => {
3375 backend
3376 .checkout_files(commit, paths, environment.clone())
3377 .await
3378 }
3379 RepositoryState::Remote { project_id, client } => {
3380 client
3381 .request(proto::GitCheckoutFiles {
3382 project_id: project_id.0,
3383 repository_id: id.to_proto(),
3384 commit,
3385 paths: paths
3386 .into_iter()
3387 .map(|p| p.to_string_lossy().to_string())
3388 .collect(),
3389 })
3390 .await?;
3391
3392 Ok(())
3393 }
3394 }
3395 },
3396 )
3397 }
3398
3399 pub fn reset(
3400 &mut self,
3401 commit: String,
3402 reset_mode: ResetMode,
3403 _cx: &mut App,
3404 ) -> oneshot::Receiver<Result<()>> {
3405 let commit = commit.to_string();
3406 let id = self.id;
3407
3408 self.send_job(None, move |git_repo, _| async move {
3409 match git_repo {
3410 RepositoryState::Local {
3411 backend,
3412 environment,
3413 ..
3414 } => backend.reset(commit, reset_mode, environment).await,
3415 RepositoryState::Remote { project_id, client } => {
3416 client
3417 .request(proto::GitReset {
3418 project_id: project_id.0,
3419 repository_id: id.to_proto(),
3420 commit,
3421 mode: match reset_mode {
3422 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3423 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3424 },
3425 })
3426 .await?;
3427
3428 Ok(())
3429 }
3430 }
3431 })
3432 }
3433
3434 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3435 let id = self.id;
3436 self.send_job(None, move |git_repo, _cx| async move {
3437 match git_repo {
3438 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3439 RepositoryState::Remote { project_id, client } => {
3440 let resp = client
3441 .request(proto::GitShow {
3442 project_id: project_id.0,
3443 repository_id: id.to_proto(),
3444 commit,
3445 })
3446 .await?;
3447
3448 Ok(CommitDetails {
3449 sha: resp.sha.into(),
3450 message: resp.message.into(),
3451 commit_timestamp: resp.commit_timestamp,
3452 author_email: resp.author_email.into(),
3453 author_name: resp.author_name.into(),
3454 })
3455 }
3456 }
3457 })
3458 }
3459
3460 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3461 let id = self.id;
3462 self.send_job(None, move |git_repo, cx| async move {
3463 match git_repo {
3464 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3465 RepositoryState::Remote {
3466 client, project_id, ..
3467 } => {
3468 let response = client
3469 .request(proto::LoadCommitDiff {
3470 project_id: project_id.0,
3471 repository_id: id.to_proto(),
3472 commit,
3473 })
3474 .await?;
3475 Ok(CommitDiff {
3476 files: response
3477 .files
3478 .into_iter()
3479 .map(|file| CommitFile {
3480 path: Path::new(&file.path).into(),
3481 old_text: file.old_text,
3482 new_text: file.new_text,
3483 })
3484 .collect(),
3485 })
3486 }
3487 }
3488 })
3489 }
3490
3491 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3492 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3493 }
3494
3495 pub fn stage_entries(
3496 &self,
3497 entries: Vec<RepoPath>,
3498 cx: &mut Context<Self>,
3499 ) -> Task<anyhow::Result<()>> {
3500 if entries.is_empty() {
3501 return Task::ready(Ok(()));
3502 }
3503 let id = self.id;
3504
3505 let mut save_futures = Vec::new();
3506 if let Some(buffer_store) = self.buffer_store(cx) {
3507 buffer_store.update(cx, |buffer_store, cx| {
3508 for path in &entries {
3509 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3510 continue;
3511 };
3512 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3513 if buffer
3514 .read(cx)
3515 .file()
3516 .map_or(false, |file| file.disk_state().exists())
3517 {
3518 save_futures.push(buffer_store.save_buffer(buffer, cx));
3519 }
3520 }
3521 }
3522 })
3523 }
3524
3525 cx.spawn(async move |this, cx| {
3526 for save_future in save_futures {
3527 save_future.await?;
3528 }
3529
3530 this.update(cx, |this, _| {
3531 this.send_job(None, move |git_repo, _cx| async move {
3532 match git_repo {
3533 RepositoryState::Local {
3534 backend,
3535 environment,
3536 ..
3537 } => backend.stage_paths(entries, environment.clone()).await,
3538 RepositoryState::Remote { project_id, client } => {
3539 client
3540 .request(proto::Stage {
3541 project_id: project_id.0,
3542 repository_id: id.to_proto(),
3543 paths: entries
3544 .into_iter()
3545 .map(|repo_path| repo_path.as_ref().to_proto())
3546 .collect(),
3547 })
3548 .await
3549 .context("sending stage request")?;
3550
3551 Ok(())
3552 }
3553 }
3554 })
3555 })?
3556 .await??;
3557
3558 Ok(())
3559 })
3560 }
3561
3562 pub fn unstage_entries(
3563 &self,
3564 entries: Vec<RepoPath>,
3565 cx: &mut Context<Self>,
3566 ) -> Task<anyhow::Result<()>> {
3567 if entries.is_empty() {
3568 return Task::ready(Ok(()));
3569 }
3570 let id = self.id;
3571
3572 let mut save_futures = Vec::new();
3573 if let Some(buffer_store) = self.buffer_store(cx) {
3574 buffer_store.update(cx, |buffer_store, cx| {
3575 for path in &entries {
3576 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3577 continue;
3578 };
3579 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3580 if buffer
3581 .read(cx)
3582 .file()
3583 .map_or(false, |file| file.disk_state().exists())
3584 {
3585 save_futures.push(buffer_store.save_buffer(buffer, cx));
3586 }
3587 }
3588 }
3589 })
3590 }
3591
3592 cx.spawn(async move |this, cx| {
3593 for save_future in save_futures {
3594 save_future.await?;
3595 }
3596
3597 this.update(cx, |this, _| {
3598 this.send_job(None, move |git_repo, _cx| async move {
3599 match git_repo {
3600 RepositoryState::Local {
3601 backend,
3602 environment,
3603 ..
3604 } => backend.unstage_paths(entries, environment).await,
3605 RepositoryState::Remote { project_id, client } => {
3606 client
3607 .request(proto::Unstage {
3608 project_id: project_id.0,
3609 repository_id: id.to_proto(),
3610 paths: entries
3611 .into_iter()
3612 .map(|repo_path| repo_path.as_ref().to_proto())
3613 .collect(),
3614 })
3615 .await
3616 .context("sending unstage request")?;
3617
3618 Ok(())
3619 }
3620 }
3621 })
3622 })?
3623 .await??;
3624
3625 Ok(())
3626 })
3627 }
3628
3629 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3630 let to_stage = self
3631 .cached_status()
3632 .filter(|entry| !entry.status.staging().is_fully_staged())
3633 .map(|entry| entry.repo_path.clone())
3634 .collect();
3635 self.stage_entries(to_stage, cx)
3636 }
3637
3638 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3639 let to_unstage = self
3640 .cached_status()
3641 .filter(|entry| entry.status.staging().has_staged())
3642 .map(|entry| entry.repo_path.clone())
3643 .collect();
3644 self.unstage_entries(to_unstage, cx)
3645 }
3646
3647 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3648 let to_stash = self
3649 .cached_status()
3650 .map(|entry| entry.repo_path.clone())
3651 .collect();
3652
3653 self.stash_entries(to_stash, cx)
3654 }
3655
3656 pub fn stash_entries(
3657 &mut self,
3658 entries: Vec<RepoPath>,
3659 cx: &mut Context<Self>,
3660 ) -> Task<anyhow::Result<()>> {
3661 let id = self.id;
3662
3663 cx.spawn(async move |this, cx| {
3664 this.update(cx, |this, _| {
3665 this.send_job(None, move |git_repo, _cx| async move {
3666 match git_repo {
3667 RepositoryState::Local {
3668 backend,
3669 environment,
3670 ..
3671 } => backend.stash_paths(entries, environment).await,
3672 RepositoryState::Remote { project_id, client } => {
3673 client
3674 .request(proto::Stash {
3675 project_id: project_id.0,
3676 repository_id: id.to_proto(),
3677 paths: entries
3678 .into_iter()
3679 .map(|repo_path| repo_path.as_ref().to_proto())
3680 .collect(),
3681 })
3682 .await
3683 .context("sending stash request")?;
3684 Ok(())
3685 }
3686 }
3687 })
3688 })?
3689 .await??;
3690 Ok(())
3691 })
3692 }
3693
3694 pub fn stash_pop(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3695 let id = self.id;
3696 cx.spawn(async move |this, cx| {
3697 this.update(cx, |this, _| {
3698 this.send_job(None, move |git_repo, _cx| async move {
3699 match git_repo {
3700 RepositoryState::Local {
3701 backend,
3702 environment,
3703 ..
3704 } => backend.stash_pop(environment).await,
3705 RepositoryState::Remote { project_id, client } => {
3706 client
3707 .request(proto::StashPop {
3708 project_id: project_id.0,
3709 repository_id: id.to_proto(),
3710 })
3711 .await
3712 .context("sending stash pop request")?;
3713 Ok(())
3714 }
3715 }
3716 })
3717 })?
3718 .await??;
3719 Ok(())
3720 })
3721 }
3722
3723 pub fn commit(
3724 &mut self,
3725 message: SharedString,
3726 name_and_email: Option<(SharedString, SharedString)>,
3727 options: CommitOptions,
3728 _cx: &mut App,
3729 ) -> oneshot::Receiver<Result<()>> {
3730 let id = self.id;
3731
3732 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3733 match git_repo {
3734 RepositoryState::Local {
3735 backend,
3736 environment,
3737 ..
3738 } => {
3739 backend
3740 .commit(message, name_and_email, options, environment)
3741 .await
3742 }
3743 RepositoryState::Remote { project_id, client } => {
3744 let (name, email) = name_and_email.unzip();
3745 client
3746 .request(proto::Commit {
3747 project_id: project_id.0,
3748 repository_id: id.to_proto(),
3749 message: String::from(message),
3750 name: name.map(String::from),
3751 email: email.map(String::from),
3752 options: Some(proto::commit::CommitOptions {
3753 amend: options.amend,
3754 signoff: options.signoff,
3755 }),
3756 })
3757 .await
3758 .context("sending commit request")?;
3759
3760 Ok(())
3761 }
3762 }
3763 })
3764 }
3765
3766 pub fn fetch(
3767 &mut self,
3768 fetch_options: FetchOptions,
3769 askpass: AskPassDelegate,
3770 _cx: &mut App,
3771 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3772 let askpass_delegates = self.askpass_delegates.clone();
3773 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3774 let id = self.id;
3775
3776 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3777 match git_repo {
3778 RepositoryState::Local {
3779 backend,
3780 environment,
3781 ..
3782 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3783 RepositoryState::Remote { project_id, client } => {
3784 askpass_delegates.lock().insert(askpass_id, askpass);
3785 let _defer = util::defer(|| {
3786 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3787 debug_assert!(askpass_delegate.is_some());
3788 });
3789
3790 let response = client
3791 .request(proto::Fetch {
3792 project_id: project_id.0,
3793 repository_id: id.to_proto(),
3794 askpass_id,
3795 remote: fetch_options.to_proto(),
3796 })
3797 .await
3798 .context("sending fetch request")?;
3799
3800 Ok(RemoteCommandOutput {
3801 stdout: response.stdout,
3802 stderr: response.stderr,
3803 })
3804 }
3805 }
3806 })
3807 }
3808
3809 pub fn push(
3810 &mut self,
3811 branch: SharedString,
3812 remote: SharedString,
3813 options: Option<PushOptions>,
3814 askpass: AskPassDelegate,
3815 cx: &mut Context<Self>,
3816 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3817 let askpass_delegates = self.askpass_delegates.clone();
3818 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3819 let id = self.id;
3820
3821 let args = options
3822 .map(|option| match option {
3823 PushOptions::SetUpstream => " --set-upstream",
3824 PushOptions::Force => " --force-with-lease",
3825 })
3826 .unwrap_or("");
3827
3828 let updates_tx = self
3829 .git_store()
3830 .and_then(|git_store| match &git_store.read(cx).state {
3831 GitStoreState::Local { downstream, .. } => downstream
3832 .as_ref()
3833 .map(|downstream| downstream.updates_tx.clone()),
3834 _ => None,
3835 });
3836
3837 let this = cx.weak_entity();
3838 self.send_job(
3839 Some(format!("git push {} {} {}", args, branch, remote).into()),
3840 move |git_repo, mut cx| async move {
3841 match git_repo {
3842 RepositoryState::Local {
3843 backend,
3844 environment,
3845 ..
3846 } => {
3847 let result = backend
3848 .push(
3849 branch.to_string(),
3850 remote.to_string(),
3851 options,
3852 askpass,
3853 environment.clone(),
3854 cx.clone(),
3855 )
3856 .await;
3857 if result.is_ok() {
3858 let branches = backend.branches().await?;
3859 let branch = branches.into_iter().find(|branch| branch.is_head);
3860 log::info!("head branch after scan is {branch:?}");
3861 let snapshot = this.update(&mut cx, |this, cx| {
3862 this.snapshot.branch = branch;
3863 let snapshot = this.snapshot.clone();
3864 cx.emit(RepositoryEvent::Updated {
3865 full_scan: false,
3866 new_instance: false,
3867 });
3868 snapshot
3869 })?;
3870 if let Some(updates_tx) = updates_tx {
3871 updates_tx
3872 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3873 .ok();
3874 }
3875 }
3876 result
3877 }
3878 RepositoryState::Remote { project_id, client } => {
3879 askpass_delegates.lock().insert(askpass_id, askpass);
3880 let _defer = util::defer(|| {
3881 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3882 debug_assert!(askpass_delegate.is_some());
3883 });
3884 let response = client
3885 .request(proto::Push {
3886 project_id: project_id.0,
3887 repository_id: id.to_proto(),
3888 askpass_id,
3889 branch_name: branch.to_string(),
3890 remote_name: remote.to_string(),
3891 options: options.map(|options| match options {
3892 PushOptions::Force => proto::push::PushOptions::Force,
3893 PushOptions::SetUpstream => {
3894 proto::push::PushOptions::SetUpstream
3895 }
3896 }
3897 as i32),
3898 })
3899 .await
3900 .context("sending push request")?;
3901
3902 Ok(RemoteCommandOutput {
3903 stdout: response.stdout,
3904 stderr: response.stderr,
3905 })
3906 }
3907 }
3908 },
3909 )
3910 }
3911
3912 pub fn pull(
3913 &mut self,
3914 branch: SharedString,
3915 remote: SharedString,
3916 askpass: AskPassDelegate,
3917 _cx: &mut App,
3918 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3919 let askpass_delegates = self.askpass_delegates.clone();
3920 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3921 let id = self.id;
3922
3923 self.send_job(
3924 Some(format!("git pull {} {}", remote, branch).into()),
3925 move |git_repo, cx| async move {
3926 match git_repo {
3927 RepositoryState::Local {
3928 backend,
3929 environment,
3930 ..
3931 } => {
3932 backend
3933 .pull(
3934 branch.to_string(),
3935 remote.to_string(),
3936 askpass,
3937 environment.clone(),
3938 cx,
3939 )
3940 .await
3941 }
3942 RepositoryState::Remote { project_id, client } => {
3943 askpass_delegates.lock().insert(askpass_id, askpass);
3944 let _defer = util::defer(|| {
3945 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3946 debug_assert!(askpass_delegate.is_some());
3947 });
3948 let response = client
3949 .request(proto::Pull {
3950 project_id: project_id.0,
3951 repository_id: id.to_proto(),
3952 askpass_id,
3953 branch_name: branch.to_string(),
3954 remote_name: remote.to_string(),
3955 })
3956 .await
3957 .context("sending pull request")?;
3958
3959 Ok(RemoteCommandOutput {
3960 stdout: response.stdout,
3961 stderr: response.stderr,
3962 })
3963 }
3964 }
3965 },
3966 )
3967 }
3968
3969 fn spawn_set_index_text_job(
3970 &mut self,
3971 path: RepoPath,
3972 content: Option<String>,
3973 hunk_staging_operation_count: Option<usize>,
3974 cx: &mut Context<Self>,
3975 ) -> oneshot::Receiver<anyhow::Result<()>> {
3976 let id = self.id;
3977 let this = cx.weak_entity();
3978 let git_store = self.git_store.clone();
3979 self.send_keyed_job(
3980 Some(GitJobKey::WriteIndex(path.clone())),
3981 None,
3982 move |git_repo, mut cx| async move {
3983 log::debug!("start updating index text for buffer {}", path.display());
3984 match git_repo {
3985 RepositoryState::Local {
3986 backend,
3987 environment,
3988 ..
3989 } => {
3990 backend
3991 .set_index_text(path.clone(), content, environment.clone())
3992 .await?;
3993 }
3994 RepositoryState::Remote { project_id, client } => {
3995 client
3996 .request(proto::SetIndexText {
3997 project_id: project_id.0,
3998 repository_id: id.to_proto(),
3999 path: path.as_ref().to_proto(),
4000 text: content,
4001 })
4002 .await?;
4003 }
4004 }
4005 log::debug!("finish updating index text for buffer {}", path.display());
4006
4007 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4008 let project_path = this
4009 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4010 .ok()
4011 .flatten();
4012 git_store.update(&mut cx, |git_store, cx| {
4013 let buffer_id = git_store
4014 .buffer_store
4015 .read(cx)
4016 .get_by_path(&project_path?)?
4017 .read(cx)
4018 .remote_id();
4019 let diff_state = git_store.diffs.get(&buffer_id)?;
4020 diff_state.update(cx, |diff_state, _| {
4021 diff_state.hunk_staging_operation_count_as_of_write =
4022 hunk_staging_operation_count;
4023 });
4024 Some(())
4025 })?;
4026 }
4027 Ok(())
4028 },
4029 )
4030 }
4031
4032 pub fn get_remotes(
4033 &mut self,
4034 branch_name: Option<String>,
4035 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4036 let id = self.id;
4037 self.send_job(None, move |repo, _cx| async move {
4038 match repo {
4039 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4040 RepositoryState::Remote { project_id, client } => {
4041 let response = client
4042 .request(proto::GetRemotes {
4043 project_id: project_id.0,
4044 repository_id: id.to_proto(),
4045 branch_name,
4046 })
4047 .await?;
4048
4049 let remotes = response
4050 .remotes
4051 .into_iter()
4052 .map(|remotes| git::repository::Remote {
4053 name: remotes.name.into(),
4054 })
4055 .collect();
4056
4057 Ok(remotes)
4058 }
4059 }
4060 })
4061 }
4062
4063 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4064 let id = self.id;
4065 self.send_job(None, move |repo, _| async move {
4066 match repo {
4067 RepositoryState::Local { backend, .. } => backend.branches().await,
4068 RepositoryState::Remote { project_id, client } => {
4069 let response = client
4070 .request(proto::GitGetBranches {
4071 project_id: project_id.0,
4072 repository_id: id.to_proto(),
4073 })
4074 .await?;
4075
4076 let branches = response
4077 .branches
4078 .into_iter()
4079 .map(|branch| proto_to_branch(&branch))
4080 .collect();
4081
4082 Ok(branches)
4083 }
4084 }
4085 })
4086 }
4087
4088 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4089 let id = self.id;
4090 self.send_job(None, move |repo, _| async move {
4091 match repo {
4092 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4093 RepositoryState::Remote { project_id, client } => {
4094 let response = client
4095 .request(proto::GetDefaultBranch {
4096 project_id: project_id.0,
4097 repository_id: id.to_proto(),
4098 })
4099 .await?;
4100
4101 anyhow::Ok(response.branch.map(SharedString::from))
4102 }
4103 }
4104 })
4105 }
4106
4107 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4108 let id = self.id;
4109 self.send_job(None, move |repo, _cx| async move {
4110 match repo {
4111 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4112 RepositoryState::Remote { project_id, client } => {
4113 let response = client
4114 .request(proto::GitDiff {
4115 project_id: project_id.0,
4116 repository_id: id.to_proto(),
4117 diff_type: match diff_type {
4118 DiffType::HeadToIndex => {
4119 proto::git_diff::DiffType::HeadToIndex.into()
4120 }
4121 DiffType::HeadToWorktree => {
4122 proto::git_diff::DiffType::HeadToWorktree.into()
4123 }
4124 },
4125 })
4126 .await?;
4127
4128 Ok(response.diff)
4129 }
4130 }
4131 })
4132 }
4133
4134 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4135 let id = self.id;
4136 self.send_job(
4137 Some(format!("git switch -c {branch_name}").into()),
4138 move |repo, _cx| async move {
4139 match repo {
4140 RepositoryState::Local { backend, .. } => {
4141 backend.create_branch(branch_name).await
4142 }
4143 RepositoryState::Remote { project_id, client } => {
4144 client
4145 .request(proto::GitCreateBranch {
4146 project_id: project_id.0,
4147 repository_id: id.to_proto(),
4148 branch_name,
4149 })
4150 .await?;
4151
4152 Ok(())
4153 }
4154 }
4155 },
4156 )
4157 }
4158
4159 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4160 let id = self.id;
4161 self.send_job(
4162 Some(format!("git switch {branch_name}").into()),
4163 move |repo, _cx| async move {
4164 match repo {
4165 RepositoryState::Local { backend, .. } => {
4166 backend.change_branch(branch_name).await
4167 }
4168 RepositoryState::Remote { project_id, client } => {
4169 client
4170 .request(proto::GitChangeBranch {
4171 project_id: project_id.0,
4172 repository_id: id.to_proto(),
4173 branch_name,
4174 })
4175 .await?;
4176
4177 Ok(())
4178 }
4179 }
4180 },
4181 )
4182 }
4183
4184 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4185 let id = self.id;
4186 self.send_job(None, move |repo, _cx| async move {
4187 match repo {
4188 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4189 RepositoryState::Remote { project_id, client } => {
4190 let response = client
4191 .request(proto::CheckForPushedCommits {
4192 project_id: project_id.0,
4193 repository_id: id.to_proto(),
4194 })
4195 .await?;
4196
4197 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4198
4199 Ok(branches)
4200 }
4201 }
4202 })
4203 }
4204
4205 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4206 self.send_job(None, |repo, _cx| async move {
4207 match repo {
4208 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4209 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4210 }
4211 })
4212 }
4213
4214 pub fn restore_checkpoint(
4215 &mut self,
4216 checkpoint: GitRepositoryCheckpoint,
4217 ) -> oneshot::Receiver<Result<()>> {
4218 self.send_job(None, move |repo, _cx| async move {
4219 match repo {
4220 RepositoryState::Local { backend, .. } => {
4221 backend.restore_checkpoint(checkpoint).await
4222 }
4223 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4224 }
4225 })
4226 }
4227
4228 pub(crate) fn apply_remote_update(
4229 &mut self,
4230 update: proto::UpdateRepository,
4231 is_new: bool,
4232 cx: &mut Context<Self>,
4233 ) -> Result<()> {
4234 let conflicted_paths = TreeSet::from_ordered_entries(
4235 update
4236 .current_merge_conflicts
4237 .into_iter()
4238 .map(|path| RepoPath(Path::new(&path).into())),
4239 );
4240 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4241 self.snapshot.head_commit = update
4242 .head_commit_details
4243 .as_ref()
4244 .map(proto_to_commit_details);
4245
4246 self.snapshot.merge.conflicted_paths = conflicted_paths;
4247
4248 let edits = update
4249 .removed_statuses
4250 .into_iter()
4251 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4252 .chain(
4253 update
4254 .updated_statuses
4255 .into_iter()
4256 .filter_map(|updated_status| {
4257 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4258 }),
4259 )
4260 .collect::<Vec<_>>();
4261 self.snapshot.statuses_by_path.edit(edits, &());
4262 if update.is_last_update {
4263 self.snapshot.scan_id = update.scan_id;
4264 }
4265 cx.emit(RepositoryEvent::Updated {
4266 full_scan: true,
4267 new_instance: is_new,
4268 });
4269 Ok(())
4270 }
4271
4272 pub fn compare_checkpoints(
4273 &mut self,
4274 left: GitRepositoryCheckpoint,
4275 right: GitRepositoryCheckpoint,
4276 ) -> oneshot::Receiver<Result<bool>> {
4277 self.send_job(None, move |repo, _cx| async move {
4278 match repo {
4279 RepositoryState::Local { backend, .. } => {
4280 backend.compare_checkpoints(left, right).await
4281 }
4282 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4283 }
4284 })
4285 }
4286
4287 pub fn diff_checkpoints(
4288 &mut self,
4289 base_checkpoint: GitRepositoryCheckpoint,
4290 target_checkpoint: GitRepositoryCheckpoint,
4291 ) -> oneshot::Receiver<Result<String>> {
4292 self.send_job(None, move |repo, _cx| async move {
4293 match repo {
4294 RepositoryState::Local { backend, .. } => {
4295 backend
4296 .diff_checkpoints(base_checkpoint, target_checkpoint)
4297 .await
4298 }
4299 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4300 }
4301 })
4302 }
4303
4304 fn schedule_scan(
4305 &mut self,
4306 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4307 cx: &mut Context<Self>,
4308 ) {
4309 let this = cx.weak_entity();
4310 let _ = self.send_keyed_job(
4311 Some(GitJobKey::ReloadGitState),
4312 None,
4313 |state, mut cx| async move {
4314 log::debug!("run scheduled git status scan");
4315
4316 let Some(this) = this.upgrade() else {
4317 return Ok(());
4318 };
4319 let RepositoryState::Local { backend, .. } = state else {
4320 bail!("not a local repository")
4321 };
4322 let (snapshot, events) = this
4323 .read_with(&mut cx, |this, _| {
4324 compute_snapshot(
4325 this.id,
4326 this.work_directory_abs_path.clone(),
4327 this.snapshot.clone(),
4328 backend.clone(),
4329 )
4330 })?
4331 .await?;
4332 this.update(&mut cx, |this, cx| {
4333 this.snapshot = snapshot.clone();
4334 for event in events {
4335 cx.emit(event);
4336 }
4337 })?;
4338 if let Some(updates_tx) = updates_tx {
4339 updates_tx
4340 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4341 .ok();
4342 }
4343 Ok(())
4344 },
4345 );
4346 }
4347
4348 fn spawn_local_git_worker(
4349 work_directory_abs_path: Arc<Path>,
4350 dot_git_abs_path: Arc<Path>,
4351 _repository_dir_abs_path: Arc<Path>,
4352 _common_dir_abs_path: Arc<Path>,
4353 project_environment: WeakEntity<ProjectEnvironment>,
4354 fs: Arc<dyn Fs>,
4355 cx: &mut Context<Self>,
4356 ) -> mpsc::UnboundedSender<GitJob> {
4357 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4358
4359 cx.spawn(async move |_, cx| {
4360 let environment = project_environment
4361 .upgrade()
4362 .context("missing project environment")?
4363 .update(cx, |project_environment, cx| {
4364 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4365 })?
4366 .await
4367 .unwrap_or_else(|| {
4368 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4369 HashMap::default()
4370 });
4371 let backend = cx
4372 .background_spawn(async move {
4373 fs.open_repo(&dot_git_abs_path)
4374 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4375 })
4376 .await?;
4377
4378 if let Some(git_hosting_provider_registry) =
4379 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4380 {
4381 git_hosting_providers::register_additional_providers(
4382 git_hosting_provider_registry,
4383 backend.clone(),
4384 );
4385 }
4386
4387 let state = RepositoryState::Local {
4388 backend,
4389 environment: Arc::new(environment),
4390 };
4391 let mut jobs = VecDeque::new();
4392 loop {
4393 while let Ok(Some(next_job)) = job_rx.try_next() {
4394 jobs.push_back(next_job);
4395 }
4396
4397 if let Some(job) = jobs.pop_front() {
4398 if let Some(current_key) = &job.key {
4399 if jobs
4400 .iter()
4401 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4402 {
4403 continue;
4404 }
4405 }
4406 (job.job)(state.clone(), cx).await;
4407 } else if let Some(job) = job_rx.next().await {
4408 jobs.push_back(job);
4409 } else {
4410 break;
4411 }
4412 }
4413 anyhow::Ok(())
4414 })
4415 .detach_and_log_err(cx);
4416
4417 job_tx
4418 }
4419
4420 fn spawn_remote_git_worker(
4421 project_id: ProjectId,
4422 client: AnyProtoClient,
4423 cx: &mut Context<Self>,
4424 ) -> mpsc::UnboundedSender<GitJob> {
4425 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4426
4427 cx.spawn(async move |_, cx| {
4428 let state = RepositoryState::Remote { project_id, client };
4429 let mut jobs = VecDeque::new();
4430 loop {
4431 while let Ok(Some(next_job)) = job_rx.try_next() {
4432 jobs.push_back(next_job);
4433 }
4434
4435 if let Some(job) = jobs.pop_front() {
4436 if let Some(current_key) = &job.key {
4437 if jobs
4438 .iter()
4439 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4440 {
4441 continue;
4442 }
4443 }
4444 (job.job)(state.clone(), cx).await;
4445 } else if let Some(job) = job_rx.next().await {
4446 jobs.push_back(job);
4447 } else {
4448 break;
4449 }
4450 }
4451 anyhow::Ok(())
4452 })
4453 .detach_and_log_err(cx);
4454
4455 job_tx
4456 }
4457
4458 fn load_staged_text(
4459 &mut self,
4460 buffer_id: BufferId,
4461 repo_path: RepoPath,
4462 cx: &App,
4463 ) -> Task<Result<Option<String>>> {
4464 let rx = self.send_job(None, move |state, _| async move {
4465 match state {
4466 RepositoryState::Local { backend, .. } => {
4467 anyhow::Ok(backend.load_index_text(repo_path).await)
4468 }
4469 RepositoryState::Remote { project_id, client } => {
4470 let response = client
4471 .request(proto::OpenUnstagedDiff {
4472 project_id: project_id.to_proto(),
4473 buffer_id: buffer_id.to_proto(),
4474 })
4475 .await?;
4476 Ok(response.staged_text)
4477 }
4478 }
4479 });
4480 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4481 }
4482
4483 fn load_committed_text(
4484 &mut self,
4485 buffer_id: BufferId,
4486 repo_path: RepoPath,
4487 cx: &App,
4488 ) -> Task<Result<DiffBasesChange>> {
4489 let rx = self.send_job(None, move |state, _| async move {
4490 match state {
4491 RepositoryState::Local { backend, .. } => {
4492 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4493 let staged_text = backend.load_index_text(repo_path).await;
4494 let diff_bases_change = if committed_text == staged_text {
4495 DiffBasesChange::SetBoth(committed_text)
4496 } else {
4497 DiffBasesChange::SetEach {
4498 index: staged_text,
4499 head: committed_text,
4500 }
4501 };
4502 anyhow::Ok(diff_bases_change)
4503 }
4504 RepositoryState::Remote { project_id, client } => {
4505 use proto::open_uncommitted_diff_response::Mode;
4506
4507 let response = client
4508 .request(proto::OpenUncommittedDiff {
4509 project_id: project_id.to_proto(),
4510 buffer_id: buffer_id.to_proto(),
4511 })
4512 .await?;
4513 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4514 let bases = match mode {
4515 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4516 Mode::IndexAndHead => DiffBasesChange::SetEach {
4517 head: response.committed_text,
4518 index: response.staged_text,
4519 },
4520 };
4521 Ok(bases)
4522 }
4523 }
4524 });
4525
4526 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4527 }
4528
4529 fn paths_changed(
4530 &mut self,
4531 paths: Vec<RepoPath>,
4532 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4533 cx: &mut Context<Self>,
4534 ) {
4535 self.paths_needing_status_update.extend(paths);
4536
4537 let this = cx.weak_entity();
4538 let _ = self.send_keyed_job(
4539 Some(GitJobKey::RefreshStatuses),
4540 None,
4541 |state, mut cx| async move {
4542 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4543 (
4544 this.snapshot.clone(),
4545 mem::take(&mut this.paths_needing_status_update),
4546 )
4547 })?;
4548 let RepositoryState::Local { backend, .. } = state else {
4549 bail!("not a local repository")
4550 };
4551
4552 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4553 let statuses = backend.status(&paths).await?;
4554
4555 let changed_path_statuses = cx
4556 .background_spawn(async move {
4557 let mut changed_path_statuses = Vec::new();
4558 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4559 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4560
4561 for (repo_path, status) in &*statuses.entries {
4562 changed_paths.remove(repo_path);
4563 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) {
4564 if cursor.item().is_some_and(|entry| entry.status == *status) {
4565 continue;
4566 }
4567 }
4568
4569 changed_path_statuses.push(Edit::Insert(StatusEntry {
4570 repo_path: repo_path.clone(),
4571 status: *status,
4572 }));
4573 }
4574 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4575 for path in changed_paths.into_iter() {
4576 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4577 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4578 }
4579 }
4580 changed_path_statuses
4581 })
4582 .await;
4583
4584 this.update(&mut cx, |this, cx| {
4585 if !changed_path_statuses.is_empty() {
4586 this.snapshot
4587 .statuses_by_path
4588 .edit(changed_path_statuses, &());
4589 this.snapshot.scan_id += 1;
4590 if let Some(updates_tx) = updates_tx {
4591 updates_tx
4592 .unbounded_send(DownstreamUpdate::UpdateRepository(
4593 this.snapshot.clone(),
4594 ))
4595 .ok();
4596 }
4597 }
4598 cx.emit(RepositoryEvent::Updated {
4599 full_scan: false,
4600 new_instance: false,
4601 });
4602 })
4603 },
4604 );
4605 }
4606
4607 /// currently running git command and when it started
4608 pub fn current_job(&self) -> Option<JobInfo> {
4609 self.active_jobs.values().next().cloned()
4610 }
4611
4612 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4613 self.send_job(None, |_, _| async {})
4614 }
4615}
4616
4617fn get_permalink_in_rust_registry_src(
4618 provider_registry: Arc<GitHostingProviderRegistry>,
4619 path: PathBuf,
4620 selection: Range<u32>,
4621) -> Result<url::Url> {
4622 #[derive(Deserialize)]
4623 struct CargoVcsGit {
4624 sha1: String,
4625 }
4626
4627 #[derive(Deserialize)]
4628 struct CargoVcsInfo {
4629 git: CargoVcsGit,
4630 path_in_vcs: String,
4631 }
4632
4633 #[derive(Deserialize)]
4634 struct CargoPackage {
4635 repository: String,
4636 }
4637
4638 #[derive(Deserialize)]
4639 struct CargoToml {
4640 package: CargoPackage,
4641 }
4642
4643 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4644 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4645 Some((dir, json))
4646 }) else {
4647 bail!("No .cargo_vcs_info.json found in parent directories")
4648 };
4649 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4650 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4651 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4652 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4653 .context("parsing package.repository field of manifest")?;
4654 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4655 let permalink = provider.build_permalink(
4656 remote,
4657 BuildPermalinkParams {
4658 sha: &cargo_vcs_info.git.sha1,
4659 path: &path.to_string_lossy(),
4660 selection: Some(selection),
4661 },
4662 );
4663 Ok(permalink)
4664}
4665
4666fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4667 let Some(blame) = blame else {
4668 return proto::BlameBufferResponse {
4669 blame_response: None,
4670 };
4671 };
4672
4673 let entries = blame
4674 .entries
4675 .into_iter()
4676 .map(|entry| proto::BlameEntry {
4677 sha: entry.sha.as_bytes().into(),
4678 start_line: entry.range.start,
4679 end_line: entry.range.end,
4680 original_line_number: entry.original_line_number,
4681 author: entry.author,
4682 author_mail: entry.author_mail,
4683 author_time: entry.author_time,
4684 author_tz: entry.author_tz,
4685 committer: entry.committer_name,
4686 committer_mail: entry.committer_email,
4687 committer_time: entry.committer_time,
4688 committer_tz: entry.committer_tz,
4689 summary: entry.summary,
4690 previous: entry.previous,
4691 filename: entry.filename,
4692 })
4693 .collect::<Vec<_>>();
4694
4695 let messages = blame
4696 .messages
4697 .into_iter()
4698 .map(|(oid, message)| proto::CommitMessage {
4699 oid: oid.as_bytes().into(),
4700 message,
4701 })
4702 .collect::<Vec<_>>();
4703
4704 proto::BlameBufferResponse {
4705 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4706 entries,
4707 messages,
4708 remote_url: blame.remote_url,
4709 }),
4710 }
4711}
4712
4713fn deserialize_blame_buffer_response(
4714 response: proto::BlameBufferResponse,
4715) -> Option<git::blame::Blame> {
4716 let response = response.blame_response?;
4717 let entries = response
4718 .entries
4719 .into_iter()
4720 .filter_map(|entry| {
4721 Some(git::blame::BlameEntry {
4722 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4723 range: entry.start_line..entry.end_line,
4724 original_line_number: entry.original_line_number,
4725 committer_name: entry.committer,
4726 committer_time: entry.committer_time,
4727 committer_tz: entry.committer_tz,
4728 committer_email: entry.committer_mail,
4729 author: entry.author,
4730 author_mail: entry.author_mail,
4731 author_time: entry.author_time,
4732 author_tz: entry.author_tz,
4733 summary: entry.summary,
4734 previous: entry.previous,
4735 filename: entry.filename,
4736 })
4737 })
4738 .collect::<Vec<_>>();
4739
4740 let messages = response
4741 .messages
4742 .into_iter()
4743 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4744 .collect::<HashMap<_, _>>();
4745
4746 Some(Blame {
4747 entries,
4748 messages,
4749 remote_url: response.remote_url,
4750 })
4751}
4752
4753fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4754 proto::Branch {
4755 is_head: branch.is_head,
4756 ref_name: branch.ref_name.to_string(),
4757 unix_timestamp: branch
4758 .most_recent_commit
4759 .as_ref()
4760 .map(|commit| commit.commit_timestamp as u64),
4761 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4762 ref_name: upstream.ref_name.to_string(),
4763 tracking: upstream
4764 .tracking
4765 .status()
4766 .map(|upstream| proto::UpstreamTracking {
4767 ahead: upstream.ahead as u64,
4768 behind: upstream.behind as u64,
4769 }),
4770 }),
4771 most_recent_commit: branch
4772 .most_recent_commit
4773 .as_ref()
4774 .map(|commit| proto::CommitSummary {
4775 sha: commit.sha.to_string(),
4776 subject: commit.subject.to_string(),
4777 commit_timestamp: commit.commit_timestamp,
4778 }),
4779 }
4780}
4781
4782fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4783 git::repository::Branch {
4784 is_head: proto.is_head,
4785 ref_name: proto.ref_name.clone().into(),
4786 upstream: proto
4787 .upstream
4788 .as_ref()
4789 .map(|upstream| git::repository::Upstream {
4790 ref_name: upstream.ref_name.to_string().into(),
4791 tracking: upstream
4792 .tracking
4793 .as_ref()
4794 .map(|tracking| {
4795 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4796 ahead: tracking.ahead as u32,
4797 behind: tracking.behind as u32,
4798 })
4799 })
4800 .unwrap_or(git::repository::UpstreamTracking::Gone),
4801 }),
4802 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4803 git::repository::CommitSummary {
4804 sha: commit.sha.to_string().into(),
4805 subject: commit.subject.to_string().into(),
4806 commit_timestamp: commit.commit_timestamp,
4807 has_parent: true,
4808 }
4809 }),
4810 }
4811}
4812
4813fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4814 proto::GitCommitDetails {
4815 sha: commit.sha.to_string(),
4816 message: commit.message.to_string(),
4817 commit_timestamp: commit.commit_timestamp,
4818 author_email: commit.author_email.to_string(),
4819 author_name: commit.author_name.to_string(),
4820 }
4821}
4822
4823fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4824 CommitDetails {
4825 sha: proto.sha.clone().into(),
4826 message: proto.message.clone().into(),
4827 commit_timestamp: proto.commit_timestamp,
4828 author_email: proto.author_email.clone().into(),
4829 author_name: proto.author_name.clone().into(),
4830 }
4831}
4832
4833async fn compute_snapshot(
4834 id: RepositoryId,
4835 work_directory_abs_path: Arc<Path>,
4836 prev_snapshot: RepositorySnapshot,
4837 backend: Arc<dyn GitRepository>,
4838) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4839 let mut events = Vec::new();
4840 let branches = backend.branches().await?;
4841 let branch = branches.into_iter().find(|branch| branch.is_head);
4842 let statuses = backend
4843 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
4844 .await?;
4845 let statuses_by_path = SumTree::from_iter(
4846 statuses
4847 .entries
4848 .iter()
4849 .map(|(repo_path, status)| StatusEntry {
4850 repo_path: repo_path.clone(),
4851 status: *status,
4852 }),
4853 &(),
4854 );
4855 let (merge_details, merge_heads_changed) =
4856 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
4857 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
4858
4859 if merge_heads_changed
4860 || branch != prev_snapshot.branch
4861 || statuses_by_path != prev_snapshot.statuses_by_path
4862 {
4863 events.push(RepositoryEvent::Updated {
4864 full_scan: true,
4865 new_instance: false,
4866 });
4867 }
4868
4869 // Cache merge conflict paths so they don't change from staging/unstaging,
4870 // until the merge heads change (at commit time, etc.).
4871 if merge_heads_changed {
4872 events.push(RepositoryEvent::MergeHeadsChanged);
4873 }
4874
4875 // Useful when branch is None in detached head state
4876 let head_commit = match backend.head_sha().await {
4877 Some(head_sha) => backend.show(head_sha).await.log_err(),
4878 None => None,
4879 };
4880
4881 // Used by edit prediction data collection
4882 let remote_origin_url = backend.remote_url("origin");
4883 let remote_upstream_url = backend.remote_url("upstream");
4884
4885 let snapshot = RepositorySnapshot {
4886 id,
4887 statuses_by_path,
4888 work_directory_abs_path,
4889 scan_id: prev_snapshot.scan_id + 1,
4890 branch,
4891 head_commit,
4892 merge: merge_details,
4893 remote_origin_url,
4894 remote_upstream_url,
4895 };
4896
4897 Ok((snapshot, events))
4898}
4899
4900fn status_from_proto(
4901 simple_status: i32,
4902 status: Option<proto::GitFileStatus>,
4903) -> anyhow::Result<FileStatus> {
4904 use proto::git_file_status::Variant;
4905
4906 let Some(variant) = status.and_then(|status| status.variant) else {
4907 let code = proto::GitStatus::from_i32(simple_status)
4908 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
4909 let result = match code {
4910 proto::GitStatus::Added => TrackedStatus {
4911 worktree_status: StatusCode::Added,
4912 index_status: StatusCode::Unmodified,
4913 }
4914 .into(),
4915 proto::GitStatus::Modified => TrackedStatus {
4916 worktree_status: StatusCode::Modified,
4917 index_status: StatusCode::Unmodified,
4918 }
4919 .into(),
4920 proto::GitStatus::Conflict => UnmergedStatus {
4921 first_head: UnmergedStatusCode::Updated,
4922 second_head: UnmergedStatusCode::Updated,
4923 }
4924 .into(),
4925 proto::GitStatus::Deleted => TrackedStatus {
4926 worktree_status: StatusCode::Deleted,
4927 index_status: StatusCode::Unmodified,
4928 }
4929 .into(),
4930 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
4931 };
4932 return Ok(result);
4933 };
4934
4935 let result = match variant {
4936 Variant::Untracked(_) => FileStatus::Untracked,
4937 Variant::Ignored(_) => FileStatus::Ignored,
4938 Variant::Unmerged(unmerged) => {
4939 let [first_head, second_head] =
4940 [unmerged.first_head, unmerged.second_head].map(|head| {
4941 let code = proto::GitStatus::from_i32(head)
4942 .with_context(|| format!("Invalid git status code: {head}"))?;
4943 let result = match code {
4944 proto::GitStatus::Added => UnmergedStatusCode::Added,
4945 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4946 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4947 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
4948 };
4949 Ok(result)
4950 });
4951 let [first_head, second_head] = [first_head?, second_head?];
4952 UnmergedStatus {
4953 first_head,
4954 second_head,
4955 }
4956 .into()
4957 }
4958 Variant::Tracked(tracked) => {
4959 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4960 .map(|status| {
4961 let code = proto::GitStatus::from_i32(status)
4962 .with_context(|| format!("Invalid git status code: {status}"))?;
4963 let result = match code {
4964 proto::GitStatus::Modified => StatusCode::Modified,
4965 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4966 proto::GitStatus::Added => StatusCode::Added,
4967 proto::GitStatus::Deleted => StatusCode::Deleted,
4968 proto::GitStatus::Renamed => StatusCode::Renamed,
4969 proto::GitStatus::Copied => StatusCode::Copied,
4970 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4971 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
4972 };
4973 Ok(result)
4974 });
4975 let [index_status, worktree_status] = [index_status?, worktree_status?];
4976 TrackedStatus {
4977 index_status,
4978 worktree_status,
4979 }
4980 .into()
4981 }
4982 };
4983 Ok(result)
4984}
4985
4986fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4987 use proto::git_file_status::{Tracked, Unmerged, Variant};
4988
4989 let variant = match status {
4990 FileStatus::Untracked => Variant::Untracked(Default::default()),
4991 FileStatus::Ignored => Variant::Ignored(Default::default()),
4992 FileStatus::Unmerged(UnmergedStatus {
4993 first_head,
4994 second_head,
4995 }) => Variant::Unmerged(Unmerged {
4996 first_head: unmerged_status_to_proto(first_head),
4997 second_head: unmerged_status_to_proto(second_head),
4998 }),
4999 FileStatus::Tracked(TrackedStatus {
5000 index_status,
5001 worktree_status,
5002 }) => Variant::Tracked(Tracked {
5003 index_status: tracked_status_to_proto(index_status),
5004 worktree_status: tracked_status_to_proto(worktree_status),
5005 }),
5006 };
5007 proto::GitFileStatus {
5008 variant: Some(variant),
5009 }
5010}
5011
5012fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5013 match code {
5014 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5015 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5016 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5017 }
5018}
5019
5020fn tracked_status_to_proto(code: StatusCode) -> i32 {
5021 match code {
5022 StatusCode::Added => proto::GitStatus::Added as _,
5023 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5024 StatusCode::Modified => proto::GitStatus::Modified as _,
5025 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5026 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5027 StatusCode::Copied => proto::GitStatus::Copied as _,
5028 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5029 }
5030}