1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 status::{
32 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
33 },
34};
35use gpui::{
36 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
37 WeakEntity,
38};
39use language::{
40 Buffer, BufferEvent, Language, LanguageRegistry,
41 proto::{deserialize_version, serialize_version},
42};
43use parking_lot::Mutex;
44use postage::stream::Stream as _;
45use rpc::{
46 AnyProtoClient, TypedEnvelope,
47 proto::{self, FromProto, ToProto, git_reset, split_repository_update},
48};
49use serde::Deserialize;
50use std::{
51 cmp::Ordering,
52 collections::{BTreeSet, VecDeque},
53 future::Future,
54 mem,
55 ops::Range,
56 path::{Path, PathBuf},
57 sync::{
58 Arc,
59 atomic::{self, AtomicU64},
60 },
61 time::Instant,
62};
63use sum_tree::{Edit, SumTree, TreeSet};
64use text::{Bias, BufferId};
65use util::{ResultExt, debug_panic, paths::SanitizedPath, post_inc};
66use worktree::{
67 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
68 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
69};
70
71pub struct GitStore {
72 state: GitStoreState,
73 buffer_store: Entity<BufferStore>,
74 worktree_store: Entity<WorktreeStore>,
75 repositories: HashMap<RepositoryId, Entity<Repository>>,
76 active_repo_id: Option<RepositoryId>,
77 #[allow(clippy::type_complexity)]
78 loading_diffs:
79 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
80 diffs: HashMap<BufferId, Entity<BufferGitState>>,
81 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
82 _subscriptions: Vec<Subscription>,
83}
84
85#[derive(Default)]
86struct SharedDiffs {
87 unstaged: Option<Entity<BufferDiff>>,
88 uncommitted: Option<Entity<BufferDiff>>,
89}
90
91struct BufferGitState {
92 unstaged_diff: Option<WeakEntity<BufferDiff>>,
93 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
94 conflict_set: Option<WeakEntity<ConflictSet>>,
95 recalculate_diff_task: Option<Task<Result<()>>>,
96 reparse_conflict_markers_task: Option<Task<Result<()>>>,
97 language: Option<Arc<Language>>,
98 language_registry: Option<Arc<LanguageRegistry>>,
99 conflict_updated_futures: Vec<oneshot::Sender<()>>,
100 recalculating_tx: postage::watch::Sender<bool>,
101
102 /// These operation counts are used to ensure that head and index text
103 /// values read from the git repository are up-to-date with any hunk staging
104 /// operations that have been performed on the BufferDiff.
105 ///
106 /// The operation count is incremented immediately when the user initiates a
107 /// hunk stage/unstage operation. Then, upon finishing writing the new index
108 /// text do disk, the `operation count as of write` is updated to reflect
109 /// the operation count that prompted the write.
110 hunk_staging_operation_count: usize,
111 hunk_staging_operation_count_as_of_write: usize,
112
113 head_text: Option<Arc<String>>,
114 index_text: Option<Arc<String>>,
115 head_changed: bool,
116 index_changed: bool,
117 language_changed: bool,
118}
119
120#[derive(Clone, Debug)]
121enum DiffBasesChange {
122 SetIndex(Option<String>),
123 SetHead(Option<String>),
124 SetEach {
125 index: Option<String>,
126 head: Option<String>,
127 },
128 SetBoth(Option<String>),
129}
130
131#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
132enum DiffKind {
133 Unstaged,
134 Uncommitted,
135}
136
137enum GitStoreState {
138 Local {
139 next_repository_id: Arc<AtomicU64>,
140 downstream: Option<LocalDownstreamState>,
141 project_environment: Entity<ProjectEnvironment>,
142 fs: Arc<dyn Fs>,
143 },
144 Remote {
145 upstream_client: AnyProtoClient,
146 upstream_project_id: u64,
147 downstream: Option<(AnyProtoClient, ProjectId)>,
148 },
149}
150
151enum DownstreamUpdate {
152 UpdateRepository(RepositorySnapshot),
153 RemoveRepository(RepositoryId),
154}
155
156struct LocalDownstreamState {
157 client: AnyProtoClient,
158 project_id: ProjectId,
159 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
160 _task: Task<Result<()>>,
161}
162
163#[derive(Clone, Debug)]
164pub struct GitStoreCheckpoint {
165 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
166}
167
168#[derive(Clone, Debug, PartialEq, Eq)]
169pub struct StatusEntry {
170 pub repo_path: RepoPath,
171 pub status: FileStatus,
172}
173
174impl StatusEntry {
175 fn to_proto(&self) -> proto::StatusEntry {
176 let simple_status = match self.status {
177 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
178 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
179 FileStatus::Tracked(TrackedStatus {
180 index_status,
181 worktree_status,
182 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
183 worktree_status
184 } else {
185 index_status
186 }),
187 };
188
189 proto::StatusEntry {
190 repo_path: self.repo_path.as_ref().to_proto(),
191 simple_status,
192 status: Some(status_to_proto(self.status)),
193 }
194 }
195}
196
197impl TryFrom<proto::StatusEntry> for StatusEntry {
198 type Error = anyhow::Error;
199
200 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
201 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
202 let status = status_from_proto(value.simple_status, value.status)?;
203 Ok(Self { repo_path, status })
204 }
205}
206
207impl sum_tree::Item for StatusEntry {
208 type Summary = PathSummary<GitSummary>;
209
210 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
211 PathSummary {
212 max_path: self.repo_path.0.clone(),
213 item_summary: self.status.summary(),
214 }
215 }
216}
217
218impl sum_tree::KeyedItem for StatusEntry {
219 type Key = PathKey;
220
221 fn key(&self) -> Self::Key {
222 PathKey(self.repo_path.0.clone())
223 }
224}
225
226#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
227pub struct RepositoryId(pub u64);
228
229#[derive(Clone, Debug, Default, PartialEq, Eq)]
230pub struct MergeDetails {
231 pub conflicted_paths: TreeSet<RepoPath>,
232 pub message: Option<SharedString>,
233 pub heads: Vec<Option<SharedString>>,
234}
235
236#[derive(Clone, Debug, PartialEq, Eq)]
237pub struct RepositorySnapshot {
238 pub id: RepositoryId,
239 pub statuses_by_path: SumTree<StatusEntry>,
240 pub work_directory_abs_path: Arc<Path>,
241 pub branch: Option<Branch>,
242 pub head_commit: Option<CommitDetails>,
243 pub scan_id: u64,
244 pub merge: MergeDetails,
245 pub remote_origin_url: Option<String>,
246 pub remote_upstream_url: Option<String>,
247}
248
249type JobId = u64;
250
251#[derive(Clone, Debug, PartialEq, Eq)]
252pub struct JobInfo {
253 pub start: Instant,
254 pub message: SharedString,
255}
256
257pub struct Repository {
258 this: WeakEntity<Self>,
259 snapshot: RepositorySnapshot,
260 commit_message_buffer: Option<Entity<Buffer>>,
261 git_store: WeakEntity<GitStore>,
262 // For a local repository, holds paths that have had worktree events since the last status scan completed,
263 // and that should be examined during the next status scan.
264 paths_needing_status_update: BTreeSet<RepoPath>,
265 job_sender: mpsc::UnboundedSender<GitJob>,
266 active_jobs: HashMap<JobId, JobInfo>,
267 job_id: JobId,
268 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
269 latest_askpass_id: u64,
270}
271
272impl std::ops::Deref for Repository {
273 type Target = RepositorySnapshot;
274
275 fn deref(&self) -> &Self::Target {
276 &self.snapshot
277 }
278}
279
280#[derive(Clone)]
281pub enum RepositoryState {
282 Local {
283 backend: Arc<dyn GitRepository>,
284 environment: Arc<HashMap<String, String>>,
285 },
286 Remote {
287 project_id: ProjectId,
288 client: AnyProtoClient,
289 },
290}
291
292#[derive(Clone, Debug)]
293pub enum RepositoryEvent {
294 Updated { full_scan: bool, new_instance: bool },
295 MergeHeadsChanged,
296}
297
298#[derive(Clone, Debug)]
299pub struct JobsUpdated;
300
301#[derive(Debug)]
302pub enum GitStoreEvent {
303 ActiveRepositoryChanged(Option<RepositoryId>),
304 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
305 RepositoryAdded(RepositoryId),
306 RepositoryRemoved(RepositoryId),
307 IndexWriteError(anyhow::Error),
308 JobsUpdated,
309 ConflictsUpdated,
310}
311
312impl EventEmitter<RepositoryEvent> for Repository {}
313impl EventEmitter<JobsUpdated> for Repository {}
314impl EventEmitter<GitStoreEvent> for GitStore {}
315
316pub struct GitJob {
317 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
318 key: Option<GitJobKey>,
319}
320
321#[derive(PartialEq, Eq)]
322enum GitJobKey {
323 WriteIndex(RepoPath),
324 ReloadBufferDiffBases,
325 RefreshStatuses,
326 ReloadGitState,
327}
328
329impl GitStore {
330 pub fn local(
331 worktree_store: &Entity<WorktreeStore>,
332 buffer_store: Entity<BufferStore>,
333 environment: Entity<ProjectEnvironment>,
334 fs: Arc<dyn Fs>,
335 cx: &mut Context<Self>,
336 ) -> Self {
337 Self::new(
338 worktree_store.clone(),
339 buffer_store,
340 GitStoreState::Local {
341 next_repository_id: Arc::new(AtomicU64::new(1)),
342 downstream: None,
343 project_environment: environment,
344 fs,
345 },
346 cx,
347 )
348 }
349
350 pub fn remote(
351 worktree_store: &Entity<WorktreeStore>,
352 buffer_store: Entity<BufferStore>,
353 upstream_client: AnyProtoClient,
354 project_id: u64,
355 cx: &mut Context<Self>,
356 ) -> Self {
357 Self::new(
358 worktree_store.clone(),
359 buffer_store,
360 GitStoreState::Remote {
361 upstream_client,
362 upstream_project_id: project_id,
363 downstream: None,
364 },
365 cx,
366 )
367 }
368
369 fn new(
370 worktree_store: Entity<WorktreeStore>,
371 buffer_store: Entity<BufferStore>,
372 state: GitStoreState,
373 cx: &mut Context<Self>,
374 ) -> Self {
375 let _subscriptions = vec![
376 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
377 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
378 ];
379
380 GitStore {
381 state,
382 buffer_store,
383 worktree_store,
384 repositories: HashMap::default(),
385 active_repo_id: None,
386 _subscriptions,
387 loading_diffs: HashMap::default(),
388 shared_diffs: HashMap::default(),
389 diffs: HashMap::default(),
390 }
391 }
392
393 pub fn init(client: &AnyProtoClient) {
394 client.add_entity_request_handler(Self::handle_get_remotes);
395 client.add_entity_request_handler(Self::handle_get_branches);
396 client.add_entity_request_handler(Self::handle_get_default_branch);
397 client.add_entity_request_handler(Self::handle_change_branch);
398 client.add_entity_request_handler(Self::handle_create_branch);
399 client.add_entity_request_handler(Self::handle_rename_branch);
400 client.add_entity_request_handler(Self::handle_git_init);
401 client.add_entity_request_handler(Self::handle_push);
402 client.add_entity_request_handler(Self::handle_pull);
403 client.add_entity_request_handler(Self::handle_fetch);
404 client.add_entity_request_handler(Self::handle_stage);
405 client.add_entity_request_handler(Self::handle_unstage);
406 client.add_entity_request_handler(Self::handle_stash);
407 client.add_entity_request_handler(Self::handle_stash_pop);
408 client.add_entity_request_handler(Self::handle_commit);
409 client.add_entity_request_handler(Self::handle_reset);
410 client.add_entity_request_handler(Self::handle_show);
411 client.add_entity_request_handler(Self::handle_load_commit_diff);
412 client.add_entity_request_handler(Self::handle_checkout_files);
413 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
414 client.add_entity_request_handler(Self::handle_set_index_text);
415 client.add_entity_request_handler(Self::handle_askpass);
416 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
417 client.add_entity_request_handler(Self::handle_git_diff);
418 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
419 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
420 client.add_entity_message_handler(Self::handle_update_diff_bases);
421 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
422 client.add_entity_request_handler(Self::handle_blame_buffer);
423 client.add_entity_message_handler(Self::handle_update_repository);
424 client.add_entity_message_handler(Self::handle_remove_repository);
425 client.add_entity_request_handler(Self::handle_git_clone);
426 }
427
428 pub fn is_local(&self) -> bool {
429 matches!(self.state, GitStoreState::Local { .. })
430 }
431
432 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
433 match &mut self.state {
434 GitStoreState::Remote {
435 downstream: downstream_client,
436 ..
437 } => {
438 for repo in self.repositories.values() {
439 let update = repo.read(cx).snapshot.initial_update(project_id);
440 for update in split_repository_update(update) {
441 client.send(update).log_err();
442 }
443 }
444 *downstream_client = Some((client, ProjectId(project_id)));
445 }
446 GitStoreState::Local {
447 downstream: downstream_client,
448 ..
449 } => {
450 let mut snapshots = HashMap::default();
451 let (updates_tx, mut updates_rx) = mpsc::unbounded();
452 for repo in self.repositories.values() {
453 updates_tx
454 .unbounded_send(DownstreamUpdate::UpdateRepository(
455 repo.read(cx).snapshot.clone(),
456 ))
457 .ok();
458 }
459 *downstream_client = Some(LocalDownstreamState {
460 client: client.clone(),
461 project_id: ProjectId(project_id),
462 updates_tx,
463 _task: cx.spawn(async move |this, cx| {
464 cx.background_spawn(async move {
465 while let Some(update) = updates_rx.next().await {
466 match update {
467 DownstreamUpdate::UpdateRepository(snapshot) => {
468 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
469 {
470 let update =
471 snapshot.build_update(old_snapshot, project_id);
472 *old_snapshot = snapshot;
473 for update in split_repository_update(update) {
474 client.send(update)?;
475 }
476 } else {
477 let update = snapshot.initial_update(project_id);
478 for update in split_repository_update(update) {
479 client.send(update)?;
480 }
481 snapshots.insert(snapshot.id, snapshot);
482 }
483 }
484 DownstreamUpdate::RemoveRepository(id) => {
485 client.send(proto::RemoveRepository {
486 project_id,
487 id: id.to_proto(),
488 })?;
489 }
490 }
491 }
492 anyhow::Ok(())
493 })
494 .await
495 .ok();
496 this.update(cx, |this, _| {
497 if let GitStoreState::Local {
498 downstream: downstream_client,
499 ..
500 } = &mut this.state
501 {
502 downstream_client.take();
503 } else {
504 unreachable!("unshared called on remote store");
505 }
506 })
507 }),
508 });
509 }
510 }
511 }
512
513 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
514 match &mut self.state {
515 GitStoreState::Local {
516 downstream: downstream_client,
517 ..
518 } => {
519 downstream_client.take();
520 }
521 GitStoreState::Remote {
522 downstream: downstream_client,
523 ..
524 } => {
525 downstream_client.take();
526 }
527 }
528 self.shared_diffs.clear();
529 }
530
531 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
532 self.shared_diffs.remove(peer_id);
533 }
534
535 pub fn active_repository(&self) -> Option<Entity<Repository>> {
536 self.active_repo_id
537 .as_ref()
538 .map(|id| self.repositories[id].clone())
539 }
540
541 pub fn open_unstaged_diff(
542 &mut self,
543 buffer: Entity<Buffer>,
544 cx: &mut Context<Self>,
545 ) -> Task<Result<Entity<BufferDiff>>> {
546 let buffer_id = buffer.read(cx).remote_id();
547 if let Some(diff_state) = self.diffs.get(&buffer_id)
548 && let Some(unstaged_diff) = diff_state
549 .read(cx)
550 .unstaged_diff
551 .as_ref()
552 .and_then(|weak| weak.upgrade())
553 {
554 if let Some(task) =
555 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
556 {
557 return cx.background_executor().spawn(async move {
558 task.await;
559 Ok(unstaged_diff)
560 });
561 }
562 return Task::ready(Ok(unstaged_diff));
563 }
564
565 let Some((repo, repo_path)) =
566 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
567 else {
568 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
569 };
570
571 let task = self
572 .loading_diffs
573 .entry((buffer_id, DiffKind::Unstaged))
574 .or_insert_with(|| {
575 let staged_text = repo.update(cx, |repo, cx| {
576 repo.load_staged_text(buffer_id, repo_path, cx)
577 });
578 cx.spawn(async move |this, cx| {
579 Self::open_diff_internal(
580 this,
581 DiffKind::Unstaged,
582 staged_text.await.map(DiffBasesChange::SetIndex),
583 buffer,
584 cx,
585 )
586 .await
587 .map_err(Arc::new)
588 })
589 .shared()
590 })
591 .clone();
592
593 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
594 }
595
596 pub fn open_uncommitted_diff(
597 &mut self,
598 buffer: Entity<Buffer>,
599 cx: &mut Context<Self>,
600 ) -> Task<Result<Entity<BufferDiff>>> {
601 let buffer_id = buffer.read(cx).remote_id();
602
603 if let Some(diff_state) = self.diffs.get(&buffer_id)
604 && let Some(uncommitted_diff) = diff_state
605 .read(cx)
606 .uncommitted_diff
607 .as_ref()
608 .and_then(|weak| weak.upgrade())
609 {
610 if let Some(task) =
611 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
612 {
613 return cx.background_executor().spawn(async move {
614 task.await;
615 Ok(uncommitted_diff)
616 });
617 }
618 return Task::ready(Ok(uncommitted_diff));
619 }
620
621 let Some((repo, repo_path)) =
622 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
623 else {
624 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
625 };
626
627 let task = self
628 .loading_diffs
629 .entry((buffer_id, DiffKind::Uncommitted))
630 .or_insert_with(|| {
631 let changes = repo.update(cx, |repo, cx| {
632 repo.load_committed_text(buffer_id, repo_path, cx)
633 });
634
635 cx.spawn(async move |this, cx| {
636 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
637 .await
638 .map_err(Arc::new)
639 })
640 .shared()
641 })
642 .clone();
643
644 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
645 }
646
647 async fn open_diff_internal(
648 this: WeakEntity<Self>,
649 kind: DiffKind,
650 texts: Result<DiffBasesChange>,
651 buffer_entity: Entity<Buffer>,
652 cx: &mut AsyncApp,
653 ) -> Result<Entity<BufferDiff>> {
654 let diff_bases_change = match texts {
655 Err(e) => {
656 this.update(cx, |this, cx| {
657 let buffer = buffer_entity.read(cx);
658 let buffer_id = buffer.remote_id();
659 this.loading_diffs.remove(&(buffer_id, kind));
660 })?;
661 return Err(e);
662 }
663 Ok(change) => change,
664 };
665
666 this.update(cx, |this, cx| {
667 let buffer = buffer_entity.read(cx);
668 let buffer_id = buffer.remote_id();
669 let language = buffer.language().cloned();
670 let language_registry = buffer.language_registry();
671 let text_snapshot = buffer.text_snapshot();
672 this.loading_diffs.remove(&(buffer_id, kind));
673
674 let git_store = cx.weak_entity();
675 let diff_state = this
676 .diffs
677 .entry(buffer_id)
678 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
679
680 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
681
682 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
683 diff_state.update(cx, |diff_state, cx| {
684 diff_state.language = language;
685 diff_state.language_registry = language_registry;
686
687 match kind {
688 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
689 DiffKind::Uncommitted => {
690 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
691 diff
692 } else {
693 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
694 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
695 unstaged_diff
696 };
697
698 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
699 diff_state.uncommitted_diff = Some(diff.downgrade())
700 }
701 }
702
703 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
704 let rx = diff_state.wait_for_recalculation();
705
706 anyhow::Ok(async move {
707 if let Some(rx) = rx {
708 rx.await;
709 }
710 Ok(diff)
711 })
712 })
713 })??
714 .await
715 }
716
717 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
718 let diff_state = self.diffs.get(&buffer_id)?;
719 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
720 }
721
722 pub fn get_uncommitted_diff(
723 &self,
724 buffer_id: BufferId,
725 cx: &App,
726 ) -> Option<Entity<BufferDiff>> {
727 let diff_state = self.diffs.get(&buffer_id)?;
728 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
729 }
730
731 pub fn open_conflict_set(
732 &mut self,
733 buffer: Entity<Buffer>,
734 cx: &mut Context<Self>,
735 ) -> Entity<ConflictSet> {
736 log::debug!("open conflict set");
737 let buffer_id = buffer.read(cx).remote_id();
738
739 if let Some(git_state) = self.diffs.get(&buffer_id)
740 && let Some(conflict_set) = git_state
741 .read(cx)
742 .conflict_set
743 .as_ref()
744 .and_then(|weak| weak.upgrade())
745 {
746 let conflict_set = conflict_set;
747 let buffer_snapshot = buffer.read(cx).text_snapshot();
748
749 git_state.update(cx, |state, cx| {
750 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
751 });
752
753 return conflict_set;
754 }
755
756 let is_unmerged = self
757 .repository_and_path_for_buffer_id(buffer_id, cx)
758 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
759 let git_store = cx.weak_entity();
760 let buffer_git_state = self
761 .diffs
762 .entry(buffer_id)
763 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
764 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
765
766 self._subscriptions
767 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
768 cx.emit(GitStoreEvent::ConflictsUpdated);
769 }));
770
771 buffer_git_state.update(cx, |state, cx| {
772 state.conflict_set = Some(conflict_set.downgrade());
773 let buffer_snapshot = buffer.read(cx).text_snapshot();
774 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
775 });
776
777 conflict_set
778 }
779
780 pub fn project_path_git_status(
781 &self,
782 project_path: &ProjectPath,
783 cx: &App,
784 ) -> Option<FileStatus> {
785 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
786 Some(repo.read(cx).status_for_path(&repo_path)?.status)
787 }
788
789 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
790 let mut work_directory_abs_paths = Vec::new();
791 let mut checkpoints = Vec::new();
792 for repository in self.repositories.values() {
793 repository.update(cx, |repository, _| {
794 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
795 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
796 });
797 }
798
799 cx.background_executor().spawn(async move {
800 let checkpoints = future::try_join_all(checkpoints).await?;
801 Ok(GitStoreCheckpoint {
802 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
803 .into_iter()
804 .zip(checkpoints)
805 .collect(),
806 })
807 })
808 }
809
810 pub fn restore_checkpoint(
811 &self,
812 checkpoint: GitStoreCheckpoint,
813 cx: &mut App,
814 ) -> Task<Result<()>> {
815 let repositories_by_work_dir_abs_path = self
816 .repositories
817 .values()
818 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
819 .collect::<HashMap<_, _>>();
820
821 let mut tasks = Vec::new();
822 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
823 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
824 let restore = repository.update(cx, |repository, _| {
825 repository.restore_checkpoint(checkpoint)
826 });
827 tasks.push(async move { restore.await? });
828 }
829 }
830 cx.background_spawn(async move {
831 future::try_join_all(tasks).await?;
832 Ok(())
833 })
834 }
835
836 /// Compares two checkpoints, returning true if they are equal.
837 pub fn compare_checkpoints(
838 &self,
839 left: GitStoreCheckpoint,
840 mut right: GitStoreCheckpoint,
841 cx: &mut App,
842 ) -> Task<Result<bool>> {
843 let repositories_by_work_dir_abs_path = self
844 .repositories
845 .values()
846 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
847 .collect::<HashMap<_, _>>();
848
849 let mut tasks = Vec::new();
850 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
851 if let Some(right_checkpoint) = right
852 .checkpoints_by_work_dir_abs_path
853 .remove(&work_dir_abs_path)
854 {
855 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
856 {
857 let compare = repository.update(cx, |repository, _| {
858 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
859 });
860
861 tasks.push(async move { compare.await? });
862 }
863 } else {
864 return Task::ready(Ok(false));
865 }
866 }
867 cx.background_spawn(async move {
868 Ok(future::try_join_all(tasks)
869 .await?
870 .into_iter()
871 .all(|result| result))
872 })
873 }
874
875 /// Blames a buffer.
876 pub fn blame_buffer(
877 &self,
878 buffer: &Entity<Buffer>,
879 version: Option<clock::Global>,
880 cx: &mut App,
881 ) -> Task<Result<Option<Blame>>> {
882 let buffer = buffer.read(cx);
883 let Some((repo, repo_path)) =
884 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
885 else {
886 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
887 };
888 let content = match &version {
889 Some(version) => buffer.rope_for_version(version),
890 None => buffer.as_rope().clone(),
891 };
892 let version = version.unwrap_or(buffer.version());
893 let buffer_id = buffer.remote_id();
894
895 let rx = repo.update(cx, |repo, _| {
896 repo.send_job(None, move |state, _| async move {
897 match state {
898 RepositoryState::Local { backend, .. } => backend
899 .blame(repo_path.clone(), content)
900 .await
901 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
902 .map(Some),
903 RepositoryState::Remote { project_id, client } => {
904 let response = client
905 .request(proto::BlameBuffer {
906 project_id: project_id.to_proto(),
907 buffer_id: buffer_id.into(),
908 version: serialize_version(&version),
909 })
910 .await?;
911 Ok(deserialize_blame_buffer_response(response))
912 }
913 }
914 })
915 });
916
917 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
918 }
919
920 pub fn get_permalink_to_line(
921 &self,
922 buffer: &Entity<Buffer>,
923 selection: Range<u32>,
924 cx: &mut App,
925 ) -> Task<Result<url::Url>> {
926 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
927 return Task::ready(Err(anyhow!("buffer has no file")));
928 };
929
930 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
931 &(file.worktree.read(cx).id(), file.path.clone()).into(),
932 cx,
933 ) else {
934 // If we're not in a Git repo, check whether this is a Rust source
935 // file in the Cargo registry (presumably opened with go-to-definition
936 // from a normal Rust file). If so, we can put together a permalink
937 // using crate metadata.
938 if buffer
939 .read(cx)
940 .language()
941 .is_none_or(|lang| lang.name() != "Rust".into())
942 {
943 return Task::ready(Err(anyhow!("no permalink available")));
944 }
945 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
946 return Task::ready(Err(anyhow!("no permalink available")));
947 };
948 return cx.spawn(async move |cx| {
949 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
950 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
951 .context("no permalink available")
952 });
953
954 // TODO remote case
955 };
956
957 let buffer_id = buffer.read(cx).remote_id();
958 let branch = repo.read(cx).branch.clone();
959 let remote = branch
960 .as_ref()
961 .and_then(|b| b.upstream.as_ref())
962 .and_then(|b| b.remote_name())
963 .unwrap_or("origin")
964 .to_string();
965
966 let rx = repo.update(cx, |repo, _| {
967 repo.send_job(None, move |state, cx| async move {
968 match state {
969 RepositoryState::Local { backend, .. } => {
970 let origin_url = backend
971 .remote_url(&remote)
972 .with_context(|| format!("remote \"{remote}\" not found"))?;
973
974 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
975
976 let provider_registry =
977 cx.update(GitHostingProviderRegistry::default_global)?;
978
979 let (provider, remote) =
980 parse_git_remote_url(provider_registry, &origin_url)
981 .context("parsing Git remote URL")?;
982
983 let path = repo_path.to_str().with_context(|| {
984 format!("converting repo path {repo_path:?} to string")
985 })?;
986
987 Ok(provider.build_permalink(
988 remote,
989 BuildPermalinkParams {
990 sha: &sha,
991 path,
992 selection: Some(selection),
993 },
994 ))
995 }
996 RepositoryState::Remote { project_id, client } => {
997 let response = client
998 .request(proto::GetPermalinkToLine {
999 project_id: project_id.to_proto(),
1000 buffer_id: buffer_id.into(),
1001 selection: Some(proto::Range {
1002 start: selection.start as u64,
1003 end: selection.end as u64,
1004 }),
1005 })
1006 .await?;
1007
1008 url::Url::parse(&response.permalink).context("failed to parse permalink")
1009 }
1010 }
1011 })
1012 });
1013 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1014 }
1015
1016 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1017 match &self.state {
1018 GitStoreState::Local {
1019 downstream: downstream_client,
1020 ..
1021 } => downstream_client
1022 .as_ref()
1023 .map(|state| (state.client.clone(), state.project_id)),
1024 GitStoreState::Remote {
1025 downstream: downstream_client,
1026 ..
1027 } => downstream_client.clone(),
1028 }
1029 }
1030
1031 fn upstream_client(&self) -> Option<AnyProtoClient> {
1032 match &self.state {
1033 GitStoreState::Local { .. } => None,
1034 GitStoreState::Remote {
1035 upstream_client, ..
1036 } => Some(upstream_client.clone()),
1037 }
1038 }
1039
1040 fn on_worktree_store_event(
1041 &mut self,
1042 worktree_store: Entity<WorktreeStore>,
1043 event: &WorktreeStoreEvent,
1044 cx: &mut Context<Self>,
1045 ) {
1046 let GitStoreState::Local {
1047 project_environment,
1048 downstream,
1049 next_repository_id,
1050 fs,
1051 } = &self.state
1052 else {
1053 return;
1054 };
1055
1056 match event {
1057 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1058 if let Some(worktree) = self
1059 .worktree_store
1060 .read(cx)
1061 .worktree_for_id(*worktree_id, cx)
1062 {
1063 let paths_by_git_repo =
1064 self.process_updated_entries(&worktree, updated_entries, cx);
1065 let downstream = downstream
1066 .as_ref()
1067 .map(|downstream| downstream.updates_tx.clone());
1068 cx.spawn(async move |_, cx| {
1069 let paths_by_git_repo = paths_by_git_repo.await;
1070 for (repo, paths) in paths_by_git_repo {
1071 repo.update(cx, |repo, cx| {
1072 repo.paths_changed(paths, downstream.clone(), cx);
1073 })
1074 .ok();
1075 }
1076 })
1077 .detach();
1078 }
1079 }
1080 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1081 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1082 else {
1083 return;
1084 };
1085 if !worktree.read(cx).is_visible() {
1086 log::debug!(
1087 "not adding repositories for local worktree {:?} because it's not visible",
1088 worktree.read(cx).abs_path()
1089 );
1090 return;
1091 }
1092 self.update_repositories_from_worktree(
1093 project_environment.clone(),
1094 next_repository_id.clone(),
1095 downstream
1096 .as_ref()
1097 .map(|downstream| downstream.updates_tx.clone()),
1098 changed_repos.clone(),
1099 fs.clone(),
1100 cx,
1101 );
1102 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1103 }
1104 _ => {}
1105 }
1106 }
1107
1108 fn on_repository_event(
1109 &mut self,
1110 repo: Entity<Repository>,
1111 event: &RepositoryEvent,
1112 cx: &mut Context<Self>,
1113 ) {
1114 let id = repo.read(cx).id;
1115 let repo_snapshot = repo.read(cx).snapshot.clone();
1116 for (buffer_id, diff) in self.diffs.iter() {
1117 if let Some((buffer_repo, repo_path)) =
1118 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1119 && buffer_repo == repo
1120 {
1121 diff.update(cx, |diff, cx| {
1122 if let Some(conflict_set) = &diff.conflict_set {
1123 let conflict_status_changed =
1124 conflict_set.update(cx, |conflict_set, cx| {
1125 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1126 conflict_set.set_has_conflict(has_conflict, cx)
1127 })?;
1128 if conflict_status_changed {
1129 let buffer_store = self.buffer_store.read(cx);
1130 if let Some(buffer) = buffer_store.get(*buffer_id) {
1131 let _ = diff
1132 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1133 }
1134 }
1135 }
1136 anyhow::Ok(())
1137 })
1138 .ok();
1139 }
1140 }
1141 cx.emit(GitStoreEvent::RepositoryUpdated(
1142 id,
1143 event.clone(),
1144 self.active_repo_id == Some(id),
1145 ))
1146 }
1147
1148 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1149 cx.emit(GitStoreEvent::JobsUpdated)
1150 }
1151
1152 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1153 fn update_repositories_from_worktree(
1154 &mut self,
1155 project_environment: Entity<ProjectEnvironment>,
1156 next_repository_id: Arc<AtomicU64>,
1157 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1158 updated_git_repositories: UpdatedGitRepositoriesSet,
1159 fs: Arc<dyn Fs>,
1160 cx: &mut Context<Self>,
1161 ) {
1162 let mut removed_ids = Vec::new();
1163 for update in updated_git_repositories.iter() {
1164 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1165 let existing_work_directory_abs_path =
1166 repo.read(cx).work_directory_abs_path.clone();
1167 Some(&existing_work_directory_abs_path)
1168 == update.old_work_directory_abs_path.as_ref()
1169 || Some(&existing_work_directory_abs_path)
1170 == update.new_work_directory_abs_path.as_ref()
1171 }) {
1172 if let Some(new_work_directory_abs_path) =
1173 update.new_work_directory_abs_path.clone()
1174 {
1175 existing.update(cx, |existing, cx| {
1176 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1177 existing.schedule_scan(updates_tx.clone(), cx);
1178 });
1179 } else {
1180 removed_ids.push(*id);
1181 }
1182 } else if let UpdatedGitRepository {
1183 new_work_directory_abs_path: Some(work_directory_abs_path),
1184 dot_git_abs_path: Some(dot_git_abs_path),
1185 repository_dir_abs_path: Some(repository_dir_abs_path),
1186 common_dir_abs_path: Some(common_dir_abs_path),
1187 ..
1188 } = update
1189 {
1190 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1191 let git_store = cx.weak_entity();
1192 let repo = cx.new(|cx| {
1193 let mut repo = Repository::local(
1194 id,
1195 work_directory_abs_path.clone(),
1196 dot_git_abs_path.clone(),
1197 repository_dir_abs_path.clone(),
1198 common_dir_abs_path.clone(),
1199 project_environment.downgrade(),
1200 fs.clone(),
1201 git_store,
1202 cx,
1203 );
1204 repo.schedule_scan(updates_tx.clone(), cx);
1205 repo
1206 });
1207 self._subscriptions
1208 .push(cx.subscribe(&repo, Self::on_repository_event));
1209 self._subscriptions
1210 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1211 self.repositories.insert(id, repo);
1212 cx.emit(GitStoreEvent::RepositoryAdded(id));
1213 self.active_repo_id.get_or_insert_with(|| {
1214 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1215 id
1216 });
1217 }
1218 }
1219
1220 for id in removed_ids {
1221 if self.active_repo_id == Some(id) {
1222 self.active_repo_id = None;
1223 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1224 }
1225 self.repositories.remove(&id);
1226 if let Some(updates_tx) = updates_tx.as_ref() {
1227 updates_tx
1228 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1229 .ok();
1230 }
1231 }
1232 }
1233
1234 fn on_buffer_store_event(
1235 &mut self,
1236 _: Entity<BufferStore>,
1237 event: &BufferStoreEvent,
1238 cx: &mut Context<Self>,
1239 ) {
1240 match event {
1241 BufferStoreEvent::BufferAdded(buffer) => {
1242 cx.subscribe(buffer, |this, buffer, event, cx| {
1243 if let BufferEvent::LanguageChanged = event {
1244 let buffer_id = buffer.read(cx).remote_id();
1245 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1246 diff_state.update(cx, |diff_state, cx| {
1247 diff_state.buffer_language_changed(buffer, cx);
1248 });
1249 }
1250 }
1251 })
1252 .detach();
1253 }
1254 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1255 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1256 diffs.remove(buffer_id);
1257 }
1258 }
1259 BufferStoreEvent::BufferDropped(buffer_id) => {
1260 self.diffs.remove(buffer_id);
1261 for diffs in self.shared_diffs.values_mut() {
1262 diffs.remove(buffer_id);
1263 }
1264 }
1265
1266 _ => {}
1267 }
1268 }
1269
1270 pub fn recalculate_buffer_diffs(
1271 &mut self,
1272 buffers: Vec<Entity<Buffer>>,
1273 cx: &mut Context<Self>,
1274 ) -> impl Future<Output = ()> + use<> {
1275 let mut futures = Vec::new();
1276 for buffer in buffers {
1277 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1278 let buffer = buffer.read(cx).text_snapshot();
1279 diff_state.update(cx, |diff_state, cx| {
1280 diff_state.recalculate_diffs(buffer.clone(), cx);
1281 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1282 });
1283 futures.push(diff_state.update(cx, |diff_state, cx| {
1284 diff_state
1285 .reparse_conflict_markers(buffer, cx)
1286 .map(|_| {})
1287 .boxed()
1288 }));
1289 }
1290 }
1291 async move {
1292 futures::future::join_all(futures).await;
1293 }
1294 }
1295
1296 fn on_buffer_diff_event(
1297 &mut self,
1298 diff: Entity<buffer_diff::BufferDiff>,
1299 event: &BufferDiffEvent,
1300 cx: &mut Context<Self>,
1301 ) {
1302 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1303 let buffer_id = diff.read(cx).buffer_id;
1304 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1305 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1306 diff_state.hunk_staging_operation_count += 1;
1307 diff_state.hunk_staging_operation_count
1308 });
1309 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1310 let recv = repo.update(cx, |repo, cx| {
1311 log::debug!("hunks changed for {}", path.display());
1312 repo.spawn_set_index_text_job(
1313 path,
1314 new_index_text.as_ref().map(|rope| rope.to_string()),
1315 Some(hunk_staging_operation_count),
1316 cx,
1317 )
1318 });
1319 let diff = diff.downgrade();
1320 cx.spawn(async move |this, cx| {
1321 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1322 diff.update(cx, |diff, cx| {
1323 diff.clear_pending_hunks(cx);
1324 })
1325 .ok();
1326 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1327 .ok();
1328 }
1329 })
1330 .detach();
1331 }
1332 }
1333 }
1334 }
1335
1336 fn local_worktree_git_repos_changed(
1337 &mut self,
1338 worktree: Entity<Worktree>,
1339 changed_repos: &UpdatedGitRepositoriesSet,
1340 cx: &mut Context<Self>,
1341 ) {
1342 log::debug!("local worktree repos changed");
1343 debug_assert!(worktree.read(cx).is_local());
1344
1345 for repository in self.repositories.values() {
1346 repository.update(cx, |repository, cx| {
1347 let repo_abs_path = &repository.work_directory_abs_path;
1348 if changed_repos.iter().any(|update| {
1349 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1350 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1351 }) {
1352 repository.reload_buffer_diff_bases(cx);
1353 }
1354 });
1355 }
1356 }
1357
1358 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1359 &self.repositories
1360 }
1361
1362 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1363 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1364 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1365 Some(status.status)
1366 }
1367
1368 pub fn repository_and_path_for_buffer_id(
1369 &self,
1370 buffer_id: BufferId,
1371 cx: &App,
1372 ) -> Option<(Entity<Repository>, RepoPath)> {
1373 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1374 let project_path = buffer.read(cx).project_path(cx)?;
1375 self.repository_and_path_for_project_path(&project_path, cx)
1376 }
1377
1378 pub fn repository_and_path_for_project_path(
1379 &self,
1380 path: &ProjectPath,
1381 cx: &App,
1382 ) -> Option<(Entity<Repository>, RepoPath)> {
1383 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1384 self.repositories
1385 .values()
1386 .filter_map(|repo| {
1387 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1388 Some((repo.clone(), repo_path))
1389 })
1390 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1391 }
1392
1393 pub fn git_init(
1394 &self,
1395 path: Arc<Path>,
1396 fallback_branch_name: String,
1397 cx: &App,
1398 ) -> Task<Result<()>> {
1399 match &self.state {
1400 GitStoreState::Local { fs, .. } => {
1401 let fs = fs.clone();
1402 cx.background_executor()
1403 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1404 }
1405 GitStoreState::Remote {
1406 upstream_client,
1407 upstream_project_id: project_id,
1408 ..
1409 } => {
1410 let client = upstream_client.clone();
1411 let project_id = *project_id;
1412 cx.background_executor().spawn(async move {
1413 client
1414 .request(proto::GitInit {
1415 project_id: project_id,
1416 abs_path: path.to_string_lossy().to_string(),
1417 fallback_branch_name,
1418 })
1419 .await?;
1420 Ok(())
1421 })
1422 }
1423 }
1424 }
1425
1426 pub fn git_clone(
1427 &self,
1428 repo: String,
1429 path: impl Into<Arc<std::path::Path>>,
1430 cx: &App,
1431 ) -> Task<Result<()>> {
1432 let path = path.into();
1433 match &self.state {
1434 GitStoreState::Local { fs, .. } => {
1435 let fs = fs.clone();
1436 cx.background_executor()
1437 .spawn(async move { fs.git_clone(&repo, &path).await })
1438 }
1439 GitStoreState::Remote {
1440 upstream_client,
1441 upstream_project_id,
1442 ..
1443 } => {
1444 if upstream_client.is_via_collab() {
1445 return Task::ready(Err(anyhow!(
1446 "Git Clone isn't supported for project guests"
1447 )));
1448 }
1449 let request = upstream_client.request(proto::GitClone {
1450 project_id: *upstream_project_id,
1451 abs_path: path.to_string_lossy().to_string(),
1452 remote_repo: repo,
1453 });
1454
1455 cx.background_spawn(async move {
1456 let result = request.await?;
1457
1458 match result.success {
1459 true => Ok(()),
1460 false => Err(anyhow!("Git Clone failed")),
1461 }
1462 })
1463 }
1464 }
1465 }
1466
1467 async fn handle_update_repository(
1468 this: Entity<Self>,
1469 envelope: TypedEnvelope<proto::UpdateRepository>,
1470 mut cx: AsyncApp,
1471 ) -> Result<()> {
1472 this.update(&mut cx, |this, cx| {
1473 let mut update = envelope.payload;
1474
1475 let id = RepositoryId::from_proto(update.id);
1476 let client = this.upstream_client().context("no upstream client")?;
1477
1478 let mut is_new = false;
1479 let repo = this.repositories.entry(id).or_insert_with(|| {
1480 is_new = true;
1481 let git_store = cx.weak_entity();
1482 cx.new(|cx| {
1483 Repository::remote(
1484 id,
1485 Path::new(&update.abs_path).into(),
1486 ProjectId(update.project_id),
1487 client,
1488 git_store,
1489 cx,
1490 )
1491 })
1492 });
1493 if is_new {
1494 this._subscriptions
1495 .push(cx.subscribe(repo, Self::on_repository_event))
1496 }
1497
1498 repo.update(cx, {
1499 let update = update.clone();
1500 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1501 })?;
1502
1503 this.active_repo_id.get_or_insert_with(|| {
1504 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1505 id
1506 });
1507
1508 if let Some((client, project_id)) = this.downstream_client() {
1509 update.project_id = project_id.to_proto();
1510 client.send(update).log_err();
1511 }
1512 Ok(())
1513 })?
1514 }
1515
1516 async fn handle_remove_repository(
1517 this: Entity<Self>,
1518 envelope: TypedEnvelope<proto::RemoveRepository>,
1519 mut cx: AsyncApp,
1520 ) -> Result<()> {
1521 this.update(&mut cx, |this, cx| {
1522 let mut update = envelope.payload;
1523 let id = RepositoryId::from_proto(update.id);
1524 this.repositories.remove(&id);
1525 if let Some((client, project_id)) = this.downstream_client() {
1526 update.project_id = project_id.to_proto();
1527 client.send(update).log_err();
1528 }
1529 if this.active_repo_id == Some(id) {
1530 this.active_repo_id = None;
1531 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1532 }
1533 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1534 })
1535 }
1536
1537 async fn handle_git_init(
1538 this: Entity<Self>,
1539 envelope: TypedEnvelope<proto::GitInit>,
1540 cx: AsyncApp,
1541 ) -> Result<proto::Ack> {
1542 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1543 let name = envelope.payload.fallback_branch_name;
1544 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1545 .await?;
1546
1547 Ok(proto::Ack {})
1548 }
1549
1550 async fn handle_git_clone(
1551 this: Entity<Self>,
1552 envelope: TypedEnvelope<proto::GitClone>,
1553 cx: AsyncApp,
1554 ) -> Result<proto::GitCloneResponse> {
1555 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1556 let repo_name = envelope.payload.remote_repo;
1557 let result = cx
1558 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1559 .await;
1560
1561 Ok(proto::GitCloneResponse {
1562 success: result.is_ok(),
1563 })
1564 }
1565
1566 async fn handle_fetch(
1567 this: Entity<Self>,
1568 envelope: TypedEnvelope<proto::Fetch>,
1569 mut cx: AsyncApp,
1570 ) -> Result<proto::RemoteMessageResponse> {
1571 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1572 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1573 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1574 let askpass_id = envelope.payload.askpass_id;
1575
1576 let askpass = make_remote_delegate(
1577 this,
1578 envelope.payload.project_id,
1579 repository_id,
1580 askpass_id,
1581 &mut cx,
1582 );
1583
1584 let remote_output = repository_handle
1585 .update(&mut cx, |repository_handle, cx| {
1586 repository_handle.fetch(fetch_options, askpass, cx)
1587 })?
1588 .await??;
1589
1590 Ok(proto::RemoteMessageResponse {
1591 stdout: remote_output.stdout,
1592 stderr: remote_output.stderr,
1593 })
1594 }
1595
1596 async fn handle_push(
1597 this: Entity<Self>,
1598 envelope: TypedEnvelope<proto::Push>,
1599 mut cx: AsyncApp,
1600 ) -> Result<proto::RemoteMessageResponse> {
1601 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1602 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1603
1604 let askpass_id = envelope.payload.askpass_id;
1605 let askpass = make_remote_delegate(
1606 this,
1607 envelope.payload.project_id,
1608 repository_id,
1609 askpass_id,
1610 &mut cx,
1611 );
1612
1613 let options = envelope
1614 .payload
1615 .options
1616 .as_ref()
1617 .map(|_| match envelope.payload.options() {
1618 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1619 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1620 });
1621
1622 let branch_name = envelope.payload.branch_name.into();
1623 let remote_name = envelope.payload.remote_name.into();
1624
1625 let remote_output = repository_handle
1626 .update(&mut cx, |repository_handle, cx| {
1627 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1628 })?
1629 .await??;
1630 Ok(proto::RemoteMessageResponse {
1631 stdout: remote_output.stdout,
1632 stderr: remote_output.stderr,
1633 })
1634 }
1635
1636 async fn handle_pull(
1637 this: Entity<Self>,
1638 envelope: TypedEnvelope<proto::Pull>,
1639 mut cx: AsyncApp,
1640 ) -> Result<proto::RemoteMessageResponse> {
1641 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1642 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1643 let askpass_id = envelope.payload.askpass_id;
1644 let askpass = make_remote_delegate(
1645 this,
1646 envelope.payload.project_id,
1647 repository_id,
1648 askpass_id,
1649 &mut cx,
1650 );
1651
1652 let branch_name = envelope.payload.branch_name.into();
1653 let remote_name = envelope.payload.remote_name.into();
1654
1655 let remote_message = repository_handle
1656 .update(&mut cx, |repository_handle, cx| {
1657 repository_handle.pull(branch_name, remote_name, askpass, cx)
1658 })?
1659 .await??;
1660
1661 Ok(proto::RemoteMessageResponse {
1662 stdout: remote_message.stdout,
1663 stderr: remote_message.stderr,
1664 })
1665 }
1666
1667 async fn handle_stage(
1668 this: Entity<Self>,
1669 envelope: TypedEnvelope<proto::Stage>,
1670 mut cx: AsyncApp,
1671 ) -> Result<proto::Ack> {
1672 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1673 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1674
1675 let entries = envelope
1676 .payload
1677 .paths
1678 .into_iter()
1679 .map(PathBuf::from)
1680 .map(RepoPath::new)
1681 .collect();
1682
1683 repository_handle
1684 .update(&mut cx, |repository_handle, cx| {
1685 repository_handle.stage_entries(entries, cx)
1686 })?
1687 .await?;
1688 Ok(proto::Ack {})
1689 }
1690
1691 async fn handle_unstage(
1692 this: Entity<Self>,
1693 envelope: TypedEnvelope<proto::Unstage>,
1694 mut cx: AsyncApp,
1695 ) -> Result<proto::Ack> {
1696 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1697 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1698
1699 let entries = envelope
1700 .payload
1701 .paths
1702 .into_iter()
1703 .map(PathBuf::from)
1704 .map(RepoPath::new)
1705 .collect();
1706
1707 repository_handle
1708 .update(&mut cx, |repository_handle, cx| {
1709 repository_handle.unstage_entries(entries, cx)
1710 })?
1711 .await?;
1712
1713 Ok(proto::Ack {})
1714 }
1715
1716 async fn handle_stash(
1717 this: Entity<Self>,
1718 envelope: TypedEnvelope<proto::Stash>,
1719 mut cx: AsyncApp,
1720 ) -> Result<proto::Ack> {
1721 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1722 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1723
1724 let entries = envelope
1725 .payload
1726 .paths
1727 .into_iter()
1728 .map(PathBuf::from)
1729 .map(RepoPath::new)
1730 .collect();
1731
1732 repository_handle
1733 .update(&mut cx, |repository_handle, cx| {
1734 repository_handle.stash_entries(entries, cx)
1735 })?
1736 .await?;
1737
1738 Ok(proto::Ack {})
1739 }
1740
1741 async fn handle_stash_pop(
1742 this: Entity<Self>,
1743 envelope: TypedEnvelope<proto::StashPop>,
1744 mut cx: AsyncApp,
1745 ) -> Result<proto::Ack> {
1746 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1747 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1748
1749 repository_handle
1750 .update(&mut cx, |repository_handle, cx| {
1751 repository_handle.stash_pop(cx)
1752 })?
1753 .await?;
1754
1755 Ok(proto::Ack {})
1756 }
1757
1758 async fn handle_set_index_text(
1759 this: Entity<Self>,
1760 envelope: TypedEnvelope<proto::SetIndexText>,
1761 mut cx: AsyncApp,
1762 ) -> Result<proto::Ack> {
1763 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1764 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1765 let repo_path = RepoPath::from_str(&envelope.payload.path);
1766
1767 repository_handle
1768 .update(&mut cx, |repository_handle, cx| {
1769 repository_handle.spawn_set_index_text_job(
1770 repo_path,
1771 envelope.payload.text,
1772 None,
1773 cx,
1774 )
1775 })?
1776 .await??;
1777 Ok(proto::Ack {})
1778 }
1779
1780 async fn handle_commit(
1781 this: Entity<Self>,
1782 envelope: TypedEnvelope<proto::Commit>,
1783 mut cx: AsyncApp,
1784 ) -> Result<proto::Ack> {
1785 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1786 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1787
1788 let message = SharedString::from(envelope.payload.message);
1789 let name = envelope.payload.name.map(SharedString::from);
1790 let email = envelope.payload.email.map(SharedString::from);
1791 let options = envelope.payload.options.unwrap_or_default();
1792
1793 repository_handle
1794 .update(&mut cx, |repository_handle, cx| {
1795 repository_handle.commit(
1796 message,
1797 name.zip(email),
1798 CommitOptions {
1799 amend: options.amend,
1800 signoff: options.signoff,
1801 },
1802 cx,
1803 )
1804 })?
1805 .await??;
1806 Ok(proto::Ack {})
1807 }
1808
1809 async fn handle_get_remotes(
1810 this: Entity<Self>,
1811 envelope: TypedEnvelope<proto::GetRemotes>,
1812 mut cx: AsyncApp,
1813 ) -> Result<proto::GetRemotesResponse> {
1814 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1815 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1816
1817 let branch_name = envelope.payload.branch_name;
1818
1819 let remotes = repository_handle
1820 .update(&mut cx, |repository_handle, _| {
1821 repository_handle.get_remotes(branch_name)
1822 })?
1823 .await??;
1824
1825 Ok(proto::GetRemotesResponse {
1826 remotes: remotes
1827 .into_iter()
1828 .map(|remotes| proto::get_remotes_response::Remote {
1829 name: remotes.name.to_string(),
1830 })
1831 .collect::<Vec<_>>(),
1832 })
1833 }
1834
1835 async fn handle_get_branches(
1836 this: Entity<Self>,
1837 envelope: TypedEnvelope<proto::GitGetBranches>,
1838 mut cx: AsyncApp,
1839 ) -> Result<proto::GitBranchesResponse> {
1840 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1841 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1842
1843 let branches = repository_handle
1844 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1845 .await??;
1846
1847 Ok(proto::GitBranchesResponse {
1848 branches: branches
1849 .into_iter()
1850 .map(|branch| branch_to_proto(&branch))
1851 .collect::<Vec<_>>(),
1852 })
1853 }
1854 async fn handle_get_default_branch(
1855 this: Entity<Self>,
1856 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1857 mut cx: AsyncApp,
1858 ) -> Result<proto::GetDefaultBranchResponse> {
1859 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1860 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1861
1862 let branch = repository_handle
1863 .update(&mut cx, |repository_handle, _| {
1864 repository_handle.default_branch()
1865 })?
1866 .await??
1867 .map(Into::into);
1868
1869 Ok(proto::GetDefaultBranchResponse { branch })
1870 }
1871 async fn handle_create_branch(
1872 this: Entity<Self>,
1873 envelope: TypedEnvelope<proto::GitCreateBranch>,
1874 mut cx: AsyncApp,
1875 ) -> Result<proto::Ack> {
1876 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1877 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1878 let branch_name = envelope.payload.branch_name;
1879
1880 repository_handle
1881 .update(&mut cx, |repository_handle, _| {
1882 repository_handle.create_branch(branch_name)
1883 })?
1884 .await??;
1885
1886 Ok(proto::Ack {})
1887 }
1888
1889 async fn handle_change_branch(
1890 this: Entity<Self>,
1891 envelope: TypedEnvelope<proto::GitChangeBranch>,
1892 mut cx: AsyncApp,
1893 ) -> Result<proto::Ack> {
1894 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1895 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1896 let branch_name = envelope.payload.branch_name;
1897
1898 repository_handle
1899 .update(&mut cx, |repository_handle, _| {
1900 repository_handle.change_branch(branch_name)
1901 })?
1902 .await??;
1903
1904 Ok(proto::Ack {})
1905 }
1906
1907 async fn handle_rename_branch(
1908 this: Entity<Self>,
1909 envelope: TypedEnvelope<proto::GitRenameBranch>,
1910 mut cx: AsyncApp,
1911 ) -> Result<proto::Ack> {
1912 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1913 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1914 let branch = envelope.payload.branch;
1915 let new_name = envelope.payload.new_name;
1916
1917 repository_handle
1918 .update(&mut cx, |repository_handle, _| {
1919 repository_handle.rename_branch(branch, new_name)
1920 })?
1921 .await??;
1922
1923 Ok(proto::Ack {})
1924 }
1925
1926 async fn handle_show(
1927 this: Entity<Self>,
1928 envelope: TypedEnvelope<proto::GitShow>,
1929 mut cx: AsyncApp,
1930 ) -> Result<proto::GitCommitDetails> {
1931 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1932 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1933
1934 let commit = repository_handle
1935 .update(&mut cx, |repository_handle, _| {
1936 repository_handle.show(envelope.payload.commit)
1937 })?
1938 .await??;
1939 Ok(proto::GitCommitDetails {
1940 sha: commit.sha.into(),
1941 message: commit.message.into(),
1942 commit_timestamp: commit.commit_timestamp,
1943 author_email: commit.author_email.into(),
1944 author_name: commit.author_name.into(),
1945 })
1946 }
1947
1948 async fn handle_load_commit_diff(
1949 this: Entity<Self>,
1950 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1951 mut cx: AsyncApp,
1952 ) -> Result<proto::LoadCommitDiffResponse> {
1953 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1954 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1955
1956 let commit_diff = repository_handle
1957 .update(&mut cx, |repository_handle, _| {
1958 repository_handle.load_commit_diff(envelope.payload.commit)
1959 })?
1960 .await??;
1961 Ok(proto::LoadCommitDiffResponse {
1962 files: commit_diff
1963 .files
1964 .into_iter()
1965 .map(|file| proto::CommitFile {
1966 path: file.path.to_string(),
1967 old_text: file.old_text,
1968 new_text: file.new_text,
1969 })
1970 .collect(),
1971 })
1972 }
1973
1974 async fn handle_reset(
1975 this: Entity<Self>,
1976 envelope: TypedEnvelope<proto::GitReset>,
1977 mut cx: AsyncApp,
1978 ) -> Result<proto::Ack> {
1979 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1980 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1981
1982 let mode = match envelope.payload.mode() {
1983 git_reset::ResetMode::Soft => ResetMode::Soft,
1984 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1985 };
1986
1987 repository_handle
1988 .update(&mut cx, |repository_handle, cx| {
1989 repository_handle.reset(envelope.payload.commit, mode, cx)
1990 })?
1991 .await??;
1992 Ok(proto::Ack {})
1993 }
1994
1995 async fn handle_checkout_files(
1996 this: Entity<Self>,
1997 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1998 mut cx: AsyncApp,
1999 ) -> Result<proto::Ack> {
2000 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2001 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2002 let paths = envelope
2003 .payload
2004 .paths
2005 .iter()
2006 .map(|s| RepoPath::from_str(s))
2007 .collect();
2008
2009 repository_handle
2010 .update(&mut cx, |repository_handle, cx| {
2011 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2012 })?
2013 .await??;
2014 Ok(proto::Ack {})
2015 }
2016
2017 async fn handle_open_commit_message_buffer(
2018 this: Entity<Self>,
2019 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2020 mut cx: AsyncApp,
2021 ) -> Result<proto::OpenBufferResponse> {
2022 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2023 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2024 let buffer = repository
2025 .update(&mut cx, |repository, cx| {
2026 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2027 })?
2028 .await?;
2029
2030 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2031 this.update(&mut cx, |this, cx| {
2032 this.buffer_store.update(cx, |buffer_store, cx| {
2033 buffer_store
2034 .create_buffer_for_peer(
2035 &buffer,
2036 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2037 cx,
2038 )
2039 .detach_and_log_err(cx);
2040 })
2041 })?;
2042
2043 Ok(proto::OpenBufferResponse {
2044 buffer_id: buffer_id.to_proto(),
2045 })
2046 }
2047
2048 async fn handle_askpass(
2049 this: Entity<Self>,
2050 envelope: TypedEnvelope<proto::AskPassRequest>,
2051 mut cx: AsyncApp,
2052 ) -> Result<proto::AskPassResponse> {
2053 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2054 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2055
2056 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2057 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2058 debug_panic!("no askpass found");
2059 anyhow::bail!("no askpass found");
2060 };
2061
2062 let response = askpass.ask_password(envelope.payload.prompt).await?;
2063
2064 delegates
2065 .lock()
2066 .insert(envelope.payload.askpass_id, askpass);
2067
2068 Ok(proto::AskPassResponse { response })
2069 }
2070
2071 async fn handle_check_for_pushed_commits(
2072 this: Entity<Self>,
2073 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2074 mut cx: AsyncApp,
2075 ) -> Result<proto::CheckForPushedCommitsResponse> {
2076 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2077 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2078
2079 let branches = repository_handle
2080 .update(&mut cx, |repository_handle, _| {
2081 repository_handle.check_for_pushed_commits()
2082 })?
2083 .await??;
2084 Ok(proto::CheckForPushedCommitsResponse {
2085 pushed_to: branches
2086 .into_iter()
2087 .map(|commit| commit.to_string())
2088 .collect(),
2089 })
2090 }
2091
2092 async fn handle_git_diff(
2093 this: Entity<Self>,
2094 envelope: TypedEnvelope<proto::GitDiff>,
2095 mut cx: AsyncApp,
2096 ) -> Result<proto::GitDiffResponse> {
2097 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2098 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2099 let diff_type = match envelope.payload.diff_type() {
2100 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2101 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2102 };
2103
2104 let mut diff = repository_handle
2105 .update(&mut cx, |repository_handle, cx| {
2106 repository_handle.diff(diff_type, cx)
2107 })?
2108 .await??;
2109 const ONE_MB: usize = 1_000_000;
2110 if diff.len() > ONE_MB {
2111 diff = diff.chars().take(ONE_MB).collect()
2112 }
2113
2114 Ok(proto::GitDiffResponse { diff })
2115 }
2116
2117 async fn handle_open_unstaged_diff(
2118 this: Entity<Self>,
2119 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2120 mut cx: AsyncApp,
2121 ) -> Result<proto::OpenUnstagedDiffResponse> {
2122 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2123 let diff = this
2124 .update(&mut cx, |this, cx| {
2125 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2126 Some(this.open_unstaged_diff(buffer, cx))
2127 })?
2128 .context("missing buffer")?
2129 .await?;
2130 this.update(&mut cx, |this, _| {
2131 let shared_diffs = this
2132 .shared_diffs
2133 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2134 .or_default();
2135 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2136 })?;
2137 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2138 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2139 }
2140
2141 async fn handle_open_uncommitted_diff(
2142 this: Entity<Self>,
2143 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2144 mut cx: AsyncApp,
2145 ) -> Result<proto::OpenUncommittedDiffResponse> {
2146 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2147 let diff = this
2148 .update(&mut cx, |this, cx| {
2149 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2150 Some(this.open_uncommitted_diff(buffer, cx))
2151 })?
2152 .context("missing buffer")?
2153 .await?;
2154 this.update(&mut cx, |this, _| {
2155 let shared_diffs = this
2156 .shared_diffs
2157 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2158 .or_default();
2159 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2160 })?;
2161 diff.read_with(&cx, |diff, cx| {
2162 use proto::open_uncommitted_diff_response::Mode;
2163
2164 let unstaged_diff = diff.secondary_diff();
2165 let index_snapshot = unstaged_diff.and_then(|diff| {
2166 let diff = diff.read(cx);
2167 diff.base_text_exists().then(|| diff.base_text())
2168 });
2169
2170 let mode;
2171 let staged_text;
2172 let committed_text;
2173 if diff.base_text_exists() {
2174 let committed_snapshot = diff.base_text();
2175 committed_text = Some(committed_snapshot.text());
2176 if let Some(index_text) = index_snapshot {
2177 if index_text.remote_id() == committed_snapshot.remote_id() {
2178 mode = Mode::IndexMatchesHead;
2179 staged_text = None;
2180 } else {
2181 mode = Mode::IndexAndHead;
2182 staged_text = Some(index_text.text());
2183 }
2184 } else {
2185 mode = Mode::IndexAndHead;
2186 staged_text = None;
2187 }
2188 } else {
2189 mode = Mode::IndexAndHead;
2190 committed_text = None;
2191 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2192 }
2193
2194 proto::OpenUncommittedDiffResponse {
2195 committed_text,
2196 staged_text,
2197 mode: mode.into(),
2198 }
2199 })
2200 }
2201
2202 async fn handle_update_diff_bases(
2203 this: Entity<Self>,
2204 request: TypedEnvelope<proto::UpdateDiffBases>,
2205 mut cx: AsyncApp,
2206 ) -> Result<()> {
2207 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2208 this.update(&mut cx, |this, cx| {
2209 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2210 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2211 {
2212 let buffer = buffer.read(cx).text_snapshot();
2213 diff_state.update(cx, |diff_state, cx| {
2214 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2215 })
2216 }
2217 })
2218 }
2219
2220 async fn handle_blame_buffer(
2221 this: Entity<Self>,
2222 envelope: TypedEnvelope<proto::BlameBuffer>,
2223 mut cx: AsyncApp,
2224 ) -> Result<proto::BlameBufferResponse> {
2225 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2226 let version = deserialize_version(&envelope.payload.version);
2227 let buffer = this.read_with(&cx, |this, cx| {
2228 this.buffer_store.read(cx).get_existing(buffer_id)
2229 })??;
2230 buffer
2231 .update(&mut cx, |buffer, _| {
2232 buffer.wait_for_version(version.clone())
2233 })?
2234 .await?;
2235 let blame = this
2236 .update(&mut cx, |this, cx| {
2237 this.blame_buffer(&buffer, Some(version), cx)
2238 })?
2239 .await?;
2240 Ok(serialize_blame_buffer_response(blame))
2241 }
2242
2243 async fn handle_get_permalink_to_line(
2244 this: Entity<Self>,
2245 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2246 mut cx: AsyncApp,
2247 ) -> Result<proto::GetPermalinkToLineResponse> {
2248 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2249 // let version = deserialize_version(&envelope.payload.version);
2250 let selection = {
2251 let proto_selection = envelope
2252 .payload
2253 .selection
2254 .context("no selection to get permalink for defined")?;
2255 proto_selection.start as u32..proto_selection.end as u32
2256 };
2257 let buffer = this.read_with(&cx, |this, cx| {
2258 this.buffer_store.read(cx).get_existing(buffer_id)
2259 })??;
2260 let permalink = this
2261 .update(&mut cx, |this, cx| {
2262 this.get_permalink_to_line(&buffer, selection, cx)
2263 })?
2264 .await?;
2265 Ok(proto::GetPermalinkToLineResponse {
2266 permalink: permalink.to_string(),
2267 })
2268 }
2269
2270 fn repository_for_request(
2271 this: &Entity<Self>,
2272 id: RepositoryId,
2273 cx: &mut AsyncApp,
2274 ) -> Result<Entity<Repository>> {
2275 this.read_with(cx, |this, _| {
2276 this.repositories
2277 .get(&id)
2278 .context("missing repository handle")
2279 .cloned()
2280 })?
2281 }
2282
2283 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2284 self.repositories
2285 .iter()
2286 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2287 .collect()
2288 }
2289
2290 fn process_updated_entries(
2291 &self,
2292 worktree: &Entity<Worktree>,
2293 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2294 cx: &mut App,
2295 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2296 let mut repo_paths = self
2297 .repositories
2298 .values()
2299 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2300 .collect::<Vec<_>>();
2301 let mut entries: Vec<_> = updated_entries
2302 .iter()
2303 .map(|(path, _, _)| path.clone())
2304 .collect();
2305 entries.sort();
2306 let worktree = worktree.read(cx);
2307
2308 let entries = entries
2309 .into_iter()
2310 .filter_map(|path| worktree.absolutize(&path).ok())
2311 .collect::<Arc<[_]>>();
2312
2313 let executor = cx.background_executor().clone();
2314 cx.background_executor().spawn(async move {
2315 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2316 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2317 let mut tasks = FuturesOrdered::new();
2318 for (repo_path, repo) in repo_paths.into_iter().rev() {
2319 let entries = entries.clone();
2320 let task = executor.spawn(async move {
2321 // Find all repository paths that belong to this repo
2322 let mut ix = entries.partition_point(|path| path < &*repo_path);
2323 if ix == entries.len() {
2324 return None;
2325 };
2326
2327 let mut paths = Vec::new();
2328 // All paths prefixed by a given repo will constitute a continuous range.
2329 while let Some(path) = entries.get(ix)
2330 && let Some(repo_path) =
2331 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, path)
2332 {
2333 paths.push((repo_path, ix));
2334 ix += 1;
2335 }
2336 if paths.is_empty() {
2337 None
2338 } else {
2339 Some((repo, paths))
2340 }
2341 });
2342 tasks.push_back(task);
2343 }
2344
2345 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2346 let mut path_was_used = vec![false; entries.len()];
2347 let tasks = tasks.collect::<Vec<_>>().await;
2348 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2349 // We always want to assign a path to it's innermost repository.
2350 for t in tasks {
2351 let Some((repo, paths)) = t else {
2352 continue;
2353 };
2354 let entry = paths_by_git_repo.entry(repo).or_default();
2355 for (repo_path, ix) in paths {
2356 if path_was_used[ix] {
2357 continue;
2358 }
2359 path_was_used[ix] = true;
2360 entry.push(repo_path);
2361 }
2362 }
2363
2364 paths_by_git_repo
2365 })
2366 }
2367}
2368
2369impl BufferGitState {
2370 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2371 Self {
2372 unstaged_diff: Default::default(),
2373 uncommitted_diff: Default::default(),
2374 recalculate_diff_task: Default::default(),
2375 language: Default::default(),
2376 language_registry: Default::default(),
2377 recalculating_tx: postage::watch::channel_with(false).0,
2378 hunk_staging_operation_count: 0,
2379 hunk_staging_operation_count_as_of_write: 0,
2380 head_text: Default::default(),
2381 index_text: Default::default(),
2382 head_changed: Default::default(),
2383 index_changed: Default::default(),
2384 language_changed: Default::default(),
2385 conflict_updated_futures: Default::default(),
2386 conflict_set: Default::default(),
2387 reparse_conflict_markers_task: Default::default(),
2388 }
2389 }
2390
2391 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2392 self.language = buffer.read(cx).language().cloned();
2393 self.language_changed = true;
2394 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2395 }
2396
2397 fn reparse_conflict_markers(
2398 &mut self,
2399 buffer: text::BufferSnapshot,
2400 cx: &mut Context<Self>,
2401 ) -> oneshot::Receiver<()> {
2402 let (tx, rx) = oneshot::channel();
2403
2404 let Some(conflict_set) = self
2405 .conflict_set
2406 .as_ref()
2407 .and_then(|conflict_set| conflict_set.upgrade())
2408 else {
2409 return rx;
2410 };
2411
2412 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2413 if conflict_set.has_conflict {
2414 Some(conflict_set.snapshot())
2415 } else {
2416 None
2417 }
2418 });
2419
2420 if let Some(old_snapshot) = old_snapshot {
2421 self.conflict_updated_futures.push(tx);
2422 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2423 let (snapshot, changed_range) = cx
2424 .background_spawn(async move {
2425 let new_snapshot = ConflictSet::parse(&buffer);
2426 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2427 (new_snapshot, changed_range)
2428 })
2429 .await;
2430 this.update(cx, |this, cx| {
2431 if let Some(conflict_set) = &this.conflict_set {
2432 conflict_set
2433 .update(cx, |conflict_set, cx| {
2434 conflict_set.set_snapshot(snapshot, changed_range, cx);
2435 })
2436 .ok();
2437 }
2438 let futures = std::mem::take(&mut this.conflict_updated_futures);
2439 for tx in futures {
2440 tx.send(()).ok();
2441 }
2442 })
2443 }))
2444 }
2445
2446 rx
2447 }
2448
2449 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2450 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2451 }
2452
2453 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2454 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2455 }
2456
2457 fn handle_base_texts_updated(
2458 &mut self,
2459 buffer: text::BufferSnapshot,
2460 message: proto::UpdateDiffBases,
2461 cx: &mut Context<Self>,
2462 ) {
2463 use proto::update_diff_bases::Mode;
2464
2465 let Some(mode) = Mode::from_i32(message.mode) else {
2466 return;
2467 };
2468
2469 let diff_bases_change = match mode {
2470 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2471 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2472 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2473 Mode::IndexAndHead => DiffBasesChange::SetEach {
2474 index: message.staged_text,
2475 head: message.committed_text,
2476 },
2477 };
2478
2479 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2480 }
2481
2482 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2483 if *self.recalculating_tx.borrow() {
2484 let mut rx = self.recalculating_tx.subscribe();
2485 Some(async move {
2486 loop {
2487 let is_recalculating = rx.recv().await;
2488 if is_recalculating != Some(true) {
2489 break;
2490 }
2491 }
2492 })
2493 } else {
2494 None
2495 }
2496 }
2497
2498 fn diff_bases_changed(
2499 &mut self,
2500 buffer: text::BufferSnapshot,
2501 diff_bases_change: Option<DiffBasesChange>,
2502 cx: &mut Context<Self>,
2503 ) {
2504 match diff_bases_change {
2505 Some(DiffBasesChange::SetIndex(index)) => {
2506 self.index_text = index.map(|mut index| {
2507 text::LineEnding::normalize(&mut index);
2508 Arc::new(index)
2509 });
2510 self.index_changed = true;
2511 }
2512 Some(DiffBasesChange::SetHead(head)) => {
2513 self.head_text = head.map(|mut head| {
2514 text::LineEnding::normalize(&mut head);
2515 Arc::new(head)
2516 });
2517 self.head_changed = true;
2518 }
2519 Some(DiffBasesChange::SetBoth(text)) => {
2520 let text = text.map(|mut text| {
2521 text::LineEnding::normalize(&mut text);
2522 Arc::new(text)
2523 });
2524 self.head_text = text.clone();
2525 self.index_text = text;
2526 self.head_changed = true;
2527 self.index_changed = true;
2528 }
2529 Some(DiffBasesChange::SetEach { index, head }) => {
2530 self.index_text = index.map(|mut index| {
2531 text::LineEnding::normalize(&mut index);
2532 Arc::new(index)
2533 });
2534 self.index_changed = true;
2535 self.head_text = head.map(|mut head| {
2536 text::LineEnding::normalize(&mut head);
2537 Arc::new(head)
2538 });
2539 self.head_changed = true;
2540 }
2541 None => {}
2542 }
2543
2544 self.recalculate_diffs(buffer, cx)
2545 }
2546
2547 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2548 *self.recalculating_tx.borrow_mut() = true;
2549
2550 let language = self.language.clone();
2551 let language_registry = self.language_registry.clone();
2552 let unstaged_diff = self.unstaged_diff();
2553 let uncommitted_diff = self.uncommitted_diff();
2554 let head = self.head_text.clone();
2555 let index = self.index_text.clone();
2556 let index_changed = self.index_changed;
2557 let head_changed = self.head_changed;
2558 let language_changed = self.language_changed;
2559 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2560 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2561 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2562 (None, None) => true,
2563 _ => false,
2564 };
2565 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2566 log::debug!(
2567 "start recalculating diffs for buffer {}",
2568 buffer.remote_id()
2569 );
2570
2571 let mut new_unstaged_diff = None;
2572 if let Some(unstaged_diff) = &unstaged_diff {
2573 new_unstaged_diff = Some(
2574 BufferDiff::update_diff(
2575 unstaged_diff.clone(),
2576 buffer.clone(),
2577 index,
2578 index_changed,
2579 language_changed,
2580 language.clone(),
2581 language_registry.clone(),
2582 cx,
2583 )
2584 .await?,
2585 );
2586 }
2587
2588 let mut new_uncommitted_diff = None;
2589 if let Some(uncommitted_diff) = &uncommitted_diff {
2590 new_uncommitted_diff = if index_matches_head {
2591 new_unstaged_diff.clone()
2592 } else {
2593 Some(
2594 BufferDiff::update_diff(
2595 uncommitted_diff.clone(),
2596 buffer.clone(),
2597 head,
2598 head_changed,
2599 language_changed,
2600 language.clone(),
2601 language_registry.clone(),
2602 cx,
2603 )
2604 .await?,
2605 )
2606 }
2607 }
2608
2609 let cancel = this.update(cx, |this, _| {
2610 // This checks whether all pending stage/unstage operations
2611 // have quiesced (i.e. both the corresponding write and the
2612 // read of that write have completed). If not, then we cancel
2613 // this recalculation attempt to avoid invalidating pending
2614 // state too quickly; another recalculation will come along
2615 // later and clear the pending state once the state of the index has settled.
2616 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2617 *this.recalculating_tx.borrow_mut() = false;
2618 true
2619 } else {
2620 false
2621 }
2622 })?;
2623 if cancel {
2624 log::debug!(
2625 concat!(
2626 "aborting recalculating diffs for buffer {}",
2627 "due to subsequent hunk operations",
2628 ),
2629 buffer.remote_id()
2630 );
2631 return Ok(());
2632 }
2633
2634 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2635 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2636 {
2637 unstaged_diff.update(cx, |diff, cx| {
2638 if language_changed {
2639 diff.language_changed(cx);
2640 }
2641 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2642 })?
2643 } else {
2644 None
2645 };
2646
2647 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2648 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2649 {
2650 uncommitted_diff.update(cx, |diff, cx| {
2651 if language_changed {
2652 diff.language_changed(cx);
2653 }
2654 diff.set_snapshot_with_secondary(
2655 new_uncommitted_diff,
2656 &buffer,
2657 unstaged_changed_range,
2658 true,
2659 cx,
2660 );
2661 })?;
2662 }
2663
2664 log::debug!(
2665 "finished recalculating diffs for buffer {}",
2666 buffer.remote_id()
2667 );
2668
2669 if let Some(this) = this.upgrade() {
2670 this.update(cx, |this, _| {
2671 this.index_changed = false;
2672 this.head_changed = false;
2673 this.language_changed = false;
2674 *this.recalculating_tx.borrow_mut() = false;
2675 })?;
2676 }
2677
2678 Ok(())
2679 }));
2680 }
2681}
2682
2683fn make_remote_delegate(
2684 this: Entity<GitStore>,
2685 project_id: u64,
2686 repository_id: RepositoryId,
2687 askpass_id: u64,
2688 cx: &mut AsyncApp,
2689) -> AskPassDelegate {
2690 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2691 this.update(cx, |this, cx| {
2692 let Some((client, _)) = this.downstream_client() else {
2693 return;
2694 };
2695 let response = client.request(proto::AskPassRequest {
2696 project_id,
2697 repository_id: repository_id.to_proto(),
2698 askpass_id,
2699 prompt,
2700 });
2701 cx.spawn(async move |_, _| {
2702 tx.send(response.await?.response).ok();
2703 anyhow::Ok(())
2704 })
2705 .detach_and_log_err(cx);
2706 })
2707 .log_err();
2708 })
2709}
2710
2711impl RepositoryId {
2712 pub fn to_proto(self) -> u64 {
2713 self.0
2714 }
2715
2716 pub fn from_proto(id: u64) -> Self {
2717 RepositoryId(id)
2718 }
2719}
2720
2721impl RepositorySnapshot {
2722 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2723 Self {
2724 id,
2725 statuses_by_path: Default::default(),
2726 work_directory_abs_path,
2727 branch: None,
2728 head_commit: None,
2729 scan_id: 0,
2730 merge: Default::default(),
2731 remote_origin_url: None,
2732 remote_upstream_url: None,
2733 }
2734 }
2735
2736 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2737 proto::UpdateRepository {
2738 branch_summary: self.branch.as_ref().map(branch_to_proto),
2739 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2740 updated_statuses: self
2741 .statuses_by_path
2742 .iter()
2743 .map(|entry| entry.to_proto())
2744 .collect(),
2745 removed_statuses: Default::default(),
2746 current_merge_conflicts: self
2747 .merge
2748 .conflicted_paths
2749 .iter()
2750 .map(|repo_path| repo_path.to_proto())
2751 .collect(),
2752 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2753 project_id,
2754 id: self.id.to_proto(),
2755 abs_path: self.work_directory_abs_path.to_proto(),
2756 entry_ids: vec![self.id.to_proto()],
2757 scan_id: self.scan_id,
2758 is_last_update: true,
2759 }
2760 }
2761
2762 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2763 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2764 let mut removed_statuses: Vec<String> = Vec::new();
2765
2766 let mut new_statuses = self.statuses_by_path.iter().peekable();
2767 let mut old_statuses = old.statuses_by_path.iter().peekable();
2768
2769 let mut current_new_entry = new_statuses.next();
2770 let mut current_old_entry = old_statuses.next();
2771 loop {
2772 match (current_new_entry, current_old_entry) {
2773 (Some(new_entry), Some(old_entry)) => {
2774 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2775 Ordering::Less => {
2776 updated_statuses.push(new_entry.to_proto());
2777 current_new_entry = new_statuses.next();
2778 }
2779 Ordering::Equal => {
2780 if new_entry.status != old_entry.status {
2781 updated_statuses.push(new_entry.to_proto());
2782 }
2783 current_old_entry = old_statuses.next();
2784 current_new_entry = new_statuses.next();
2785 }
2786 Ordering::Greater => {
2787 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2788 current_old_entry = old_statuses.next();
2789 }
2790 }
2791 }
2792 (None, Some(old_entry)) => {
2793 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2794 current_old_entry = old_statuses.next();
2795 }
2796 (Some(new_entry), None) => {
2797 updated_statuses.push(new_entry.to_proto());
2798 current_new_entry = new_statuses.next();
2799 }
2800 (None, None) => break,
2801 }
2802 }
2803
2804 proto::UpdateRepository {
2805 branch_summary: self.branch.as_ref().map(branch_to_proto),
2806 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2807 updated_statuses,
2808 removed_statuses,
2809 current_merge_conflicts: self
2810 .merge
2811 .conflicted_paths
2812 .iter()
2813 .map(|path| path.as_ref().to_proto())
2814 .collect(),
2815 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2816 project_id,
2817 id: self.id.to_proto(),
2818 abs_path: self.work_directory_abs_path.to_proto(),
2819 entry_ids: vec![],
2820 scan_id: self.scan_id,
2821 is_last_update: true,
2822 }
2823 }
2824
2825 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2826 self.statuses_by_path.iter().cloned()
2827 }
2828
2829 pub fn status_summary(&self) -> GitSummary {
2830 self.statuses_by_path.summary().item_summary
2831 }
2832
2833 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2834 self.statuses_by_path
2835 .get(&PathKey(path.0.clone()), &())
2836 .cloned()
2837 }
2838
2839 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2840 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2841 }
2842
2843 #[inline]
2844 fn abs_path_to_repo_path_inner(
2845 work_directory_abs_path: &Path,
2846 abs_path: &Path,
2847 ) -> Option<RepoPath> {
2848 abs_path
2849 .strip_prefix(&work_directory_abs_path)
2850 .map(RepoPath::from)
2851 .ok()
2852 }
2853
2854 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2855 self.merge.conflicted_paths.contains(repo_path)
2856 }
2857
2858 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2859 let had_conflict_on_last_merge_head_change =
2860 self.merge.conflicted_paths.contains(repo_path);
2861 let has_conflict_currently = self
2862 .status_for_path(repo_path)
2863 .is_some_and(|entry| entry.status.is_conflicted());
2864 had_conflict_on_last_merge_head_change || has_conflict_currently
2865 }
2866
2867 /// This is the name that will be displayed in the repository selector for this repository.
2868 pub fn display_name(&self) -> SharedString {
2869 self.work_directory_abs_path
2870 .file_name()
2871 .unwrap_or_default()
2872 .to_string_lossy()
2873 .to_string()
2874 .into()
2875 }
2876}
2877
2878impl MergeDetails {
2879 async fn load(
2880 backend: &Arc<dyn GitRepository>,
2881 status: &SumTree<StatusEntry>,
2882 prev_snapshot: &RepositorySnapshot,
2883 ) -> Result<(MergeDetails, bool)> {
2884 log::debug!("load merge details");
2885 let message = backend.merge_message().await;
2886 let heads = backend
2887 .revparse_batch(vec![
2888 "MERGE_HEAD".into(),
2889 "CHERRY_PICK_HEAD".into(),
2890 "REBASE_HEAD".into(),
2891 "REVERT_HEAD".into(),
2892 "APPLY_HEAD".into(),
2893 ])
2894 .await
2895 .log_err()
2896 .unwrap_or_default()
2897 .into_iter()
2898 .map(|opt| opt.map(SharedString::from))
2899 .collect::<Vec<_>>();
2900 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2901 let conflicted_paths = if merge_heads_changed {
2902 let current_conflicted_paths = TreeSet::from_ordered_entries(
2903 status
2904 .iter()
2905 .filter(|entry| entry.status.is_conflicted())
2906 .map(|entry| entry.repo_path.clone()),
2907 );
2908
2909 // It can happen that we run a scan while a lengthy merge is in progress
2910 // that will eventually result in conflicts, but before those conflicts
2911 // are reported by `git status`. Since for the moment we only care about
2912 // the merge heads state for the purposes of tracking conflicts, don't update
2913 // this state until we see some conflicts.
2914 if heads.iter().any(Option::is_some)
2915 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2916 && current_conflicted_paths.is_empty()
2917 {
2918 log::debug!("not updating merge heads because no conflicts found");
2919 return Ok((
2920 MergeDetails {
2921 message: message.map(SharedString::from),
2922 ..prev_snapshot.merge.clone()
2923 },
2924 false,
2925 ));
2926 }
2927
2928 current_conflicted_paths
2929 } else {
2930 prev_snapshot.merge.conflicted_paths.clone()
2931 };
2932 let details = MergeDetails {
2933 conflicted_paths,
2934 message: message.map(SharedString::from),
2935 heads,
2936 };
2937 Ok((details, merge_heads_changed))
2938 }
2939}
2940
2941impl Repository {
2942 pub fn snapshot(&self) -> RepositorySnapshot {
2943 self.snapshot.clone()
2944 }
2945
2946 fn local(
2947 id: RepositoryId,
2948 work_directory_abs_path: Arc<Path>,
2949 dot_git_abs_path: Arc<Path>,
2950 repository_dir_abs_path: Arc<Path>,
2951 common_dir_abs_path: Arc<Path>,
2952 project_environment: WeakEntity<ProjectEnvironment>,
2953 fs: Arc<dyn Fs>,
2954 git_store: WeakEntity<GitStore>,
2955 cx: &mut Context<Self>,
2956 ) -> Self {
2957 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2958 Repository {
2959 this: cx.weak_entity(),
2960 git_store,
2961 snapshot,
2962 commit_message_buffer: None,
2963 askpass_delegates: Default::default(),
2964 paths_needing_status_update: Default::default(),
2965 latest_askpass_id: 0,
2966 job_sender: Repository::spawn_local_git_worker(
2967 work_directory_abs_path,
2968 dot_git_abs_path,
2969 repository_dir_abs_path,
2970 common_dir_abs_path,
2971 project_environment,
2972 fs,
2973 cx,
2974 ),
2975 job_id: 0,
2976 active_jobs: Default::default(),
2977 }
2978 }
2979
2980 fn remote(
2981 id: RepositoryId,
2982 work_directory_abs_path: Arc<Path>,
2983 project_id: ProjectId,
2984 client: AnyProtoClient,
2985 git_store: WeakEntity<GitStore>,
2986 cx: &mut Context<Self>,
2987 ) -> Self {
2988 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2989 Self {
2990 this: cx.weak_entity(),
2991 snapshot,
2992 commit_message_buffer: None,
2993 git_store,
2994 paths_needing_status_update: Default::default(),
2995 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2996 askpass_delegates: Default::default(),
2997 latest_askpass_id: 0,
2998 active_jobs: Default::default(),
2999 job_id: 0,
3000 }
3001 }
3002
3003 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3004 self.git_store.upgrade()
3005 }
3006
3007 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3008 let this = cx.weak_entity();
3009 let git_store = self.git_store.clone();
3010 let _ = self.send_keyed_job(
3011 Some(GitJobKey::ReloadBufferDiffBases),
3012 None,
3013 |state, mut cx| async move {
3014 let RepositoryState::Local { backend, .. } = state else {
3015 log::error!("tried to recompute diffs for a non-local repository");
3016 return Ok(());
3017 };
3018
3019 let Some(this) = this.upgrade() else {
3020 return Ok(());
3021 };
3022
3023 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3024 git_store.update(cx, |git_store, cx| {
3025 git_store
3026 .diffs
3027 .iter()
3028 .filter_map(|(buffer_id, diff_state)| {
3029 let buffer_store = git_store.buffer_store.read(cx);
3030 let buffer = buffer_store.get(*buffer_id)?;
3031 let file = File::from_dyn(buffer.read(cx).file())?;
3032 let abs_path =
3033 file.worktree.read(cx).absolutize(&file.path).ok()?;
3034 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3035 log::debug!(
3036 "start reload diff bases for repo path {}",
3037 repo_path.0.display()
3038 );
3039 diff_state.update(cx, |diff_state, _| {
3040 let has_unstaged_diff = diff_state
3041 .unstaged_diff
3042 .as_ref()
3043 .is_some_and(|diff| diff.is_upgradable());
3044 let has_uncommitted_diff = diff_state
3045 .uncommitted_diff
3046 .as_ref()
3047 .is_some_and(|set| set.is_upgradable());
3048
3049 Some((
3050 buffer,
3051 repo_path,
3052 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3053 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3054 ))
3055 })
3056 })
3057 .collect::<Vec<_>>()
3058 })
3059 })??;
3060
3061 let buffer_diff_base_changes = cx
3062 .background_spawn(async move {
3063 let mut changes = Vec::new();
3064 for (buffer, repo_path, current_index_text, current_head_text) in
3065 &repo_diff_state_updates
3066 {
3067 let index_text = if current_index_text.is_some() {
3068 backend.load_index_text(repo_path.clone()).await
3069 } else {
3070 None
3071 };
3072 let head_text = if current_head_text.is_some() {
3073 backend.load_committed_text(repo_path.clone()).await
3074 } else {
3075 None
3076 };
3077
3078 let change =
3079 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3080 (Some(current_index), Some(current_head)) => {
3081 let index_changed =
3082 index_text.as_ref() != current_index.as_deref();
3083 let head_changed =
3084 head_text.as_ref() != current_head.as_deref();
3085 if index_changed && head_changed {
3086 if index_text == head_text {
3087 Some(DiffBasesChange::SetBoth(head_text))
3088 } else {
3089 Some(DiffBasesChange::SetEach {
3090 index: index_text,
3091 head: head_text,
3092 })
3093 }
3094 } else if index_changed {
3095 Some(DiffBasesChange::SetIndex(index_text))
3096 } else if head_changed {
3097 Some(DiffBasesChange::SetHead(head_text))
3098 } else {
3099 None
3100 }
3101 }
3102 (Some(current_index), None) => {
3103 let index_changed =
3104 index_text.as_ref() != current_index.as_deref();
3105 index_changed
3106 .then_some(DiffBasesChange::SetIndex(index_text))
3107 }
3108 (None, Some(current_head)) => {
3109 let head_changed =
3110 head_text.as_ref() != current_head.as_deref();
3111 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3112 }
3113 (None, None) => None,
3114 };
3115
3116 changes.push((buffer.clone(), change))
3117 }
3118 changes
3119 })
3120 .await;
3121
3122 git_store.update(&mut cx, |git_store, cx| {
3123 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3124 let buffer_snapshot = buffer.read(cx).text_snapshot();
3125 let buffer_id = buffer_snapshot.remote_id();
3126 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3127 continue;
3128 };
3129
3130 let downstream_client = git_store.downstream_client();
3131 diff_state.update(cx, |diff_state, cx| {
3132 use proto::update_diff_bases::Mode;
3133
3134 if let Some((diff_bases_change, (client, project_id))) =
3135 diff_bases_change.clone().zip(downstream_client)
3136 {
3137 let (staged_text, committed_text, mode) = match diff_bases_change {
3138 DiffBasesChange::SetIndex(index) => {
3139 (index, None, Mode::IndexOnly)
3140 }
3141 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3142 DiffBasesChange::SetEach { index, head } => {
3143 (index, head, Mode::IndexAndHead)
3144 }
3145 DiffBasesChange::SetBoth(text) => {
3146 (None, text, Mode::IndexMatchesHead)
3147 }
3148 };
3149 client
3150 .send(proto::UpdateDiffBases {
3151 project_id: project_id.to_proto(),
3152 buffer_id: buffer_id.to_proto(),
3153 staged_text,
3154 committed_text,
3155 mode: mode as i32,
3156 })
3157 .log_err();
3158 }
3159
3160 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3161 });
3162 }
3163 })
3164 },
3165 );
3166 }
3167
3168 pub fn send_job<F, Fut, R>(
3169 &mut self,
3170 status: Option<SharedString>,
3171 job: F,
3172 ) -> oneshot::Receiver<R>
3173 where
3174 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3175 Fut: Future<Output = R> + 'static,
3176 R: Send + 'static,
3177 {
3178 self.send_keyed_job(None, status, job)
3179 }
3180
3181 fn send_keyed_job<F, Fut, R>(
3182 &mut self,
3183 key: Option<GitJobKey>,
3184 status: Option<SharedString>,
3185 job: F,
3186 ) -> oneshot::Receiver<R>
3187 where
3188 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3189 Fut: Future<Output = R> + 'static,
3190 R: Send + 'static,
3191 {
3192 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3193 let job_id = post_inc(&mut self.job_id);
3194 let this = self.this.clone();
3195 self.job_sender
3196 .unbounded_send(GitJob {
3197 key,
3198 job: Box::new(move |state, cx: &mut AsyncApp| {
3199 let job = job(state, cx.clone());
3200 cx.spawn(async move |cx| {
3201 if let Some(s) = status.clone() {
3202 this.update(cx, |this, cx| {
3203 this.active_jobs.insert(
3204 job_id,
3205 JobInfo {
3206 start: Instant::now(),
3207 message: s.clone(),
3208 },
3209 );
3210
3211 cx.notify();
3212 })
3213 .ok();
3214 }
3215 let result = job.await;
3216
3217 this.update(cx, |this, cx| {
3218 this.active_jobs.remove(&job_id);
3219 cx.notify();
3220 })
3221 .ok();
3222
3223 result_tx.send(result).ok();
3224 })
3225 }),
3226 })
3227 .ok();
3228 result_rx
3229 }
3230
3231 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3232 let Some(git_store) = self.git_store.upgrade() else {
3233 return;
3234 };
3235 let entity = cx.entity();
3236 git_store.update(cx, |git_store, cx| {
3237 let Some((&id, _)) = git_store
3238 .repositories
3239 .iter()
3240 .find(|(_, handle)| *handle == &entity)
3241 else {
3242 return;
3243 };
3244 git_store.active_repo_id = Some(id);
3245 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3246 });
3247 }
3248
3249 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3250 self.snapshot.status()
3251 }
3252
3253 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3254 let git_store = self.git_store.upgrade()?;
3255 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3256 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3257 let abs_path = SanitizedPath::new(&abs_path);
3258 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3259 Some(ProjectPath {
3260 worktree_id: worktree.read(cx).id(),
3261 path: relative_path.into(),
3262 })
3263 }
3264
3265 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3266 let git_store = self.git_store.upgrade()?;
3267 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3268 let abs_path = worktree_store.absolutize(path, cx)?;
3269 self.snapshot.abs_path_to_repo_path(&abs_path)
3270 }
3271
3272 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3273 other
3274 .read(cx)
3275 .snapshot
3276 .work_directory_abs_path
3277 .starts_with(&self.snapshot.work_directory_abs_path)
3278 }
3279
3280 pub fn open_commit_buffer(
3281 &mut self,
3282 languages: Option<Arc<LanguageRegistry>>,
3283 buffer_store: Entity<BufferStore>,
3284 cx: &mut Context<Self>,
3285 ) -> Task<Result<Entity<Buffer>>> {
3286 let id = self.id;
3287 if let Some(buffer) = self.commit_message_buffer.clone() {
3288 return Task::ready(Ok(buffer));
3289 }
3290 let this = cx.weak_entity();
3291
3292 let rx = self.send_job(None, move |state, mut cx| async move {
3293 let Some(this) = this.upgrade() else {
3294 bail!("git store was dropped");
3295 };
3296 match state {
3297 RepositoryState::Local { .. } => {
3298 this.update(&mut cx, |_, cx| {
3299 Self::open_local_commit_buffer(languages, buffer_store, cx)
3300 })?
3301 .await
3302 }
3303 RepositoryState::Remote { project_id, client } => {
3304 let request = client.request(proto::OpenCommitMessageBuffer {
3305 project_id: project_id.0,
3306 repository_id: id.to_proto(),
3307 });
3308 let response = request.await.context("requesting to open commit buffer")?;
3309 let buffer_id = BufferId::new(response.buffer_id)?;
3310 let buffer = buffer_store
3311 .update(&mut cx, |buffer_store, cx| {
3312 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3313 })?
3314 .await?;
3315 if let Some(language_registry) = languages {
3316 let git_commit_language =
3317 language_registry.language_for_name("Git Commit").await?;
3318 buffer.update(&mut cx, |buffer, cx| {
3319 buffer.set_language(Some(git_commit_language), cx);
3320 })?;
3321 }
3322 this.update(&mut cx, |this, _| {
3323 this.commit_message_buffer = Some(buffer.clone());
3324 })?;
3325 Ok(buffer)
3326 }
3327 }
3328 });
3329
3330 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3331 }
3332
3333 fn open_local_commit_buffer(
3334 language_registry: Option<Arc<LanguageRegistry>>,
3335 buffer_store: Entity<BufferStore>,
3336 cx: &mut Context<Self>,
3337 ) -> Task<Result<Entity<Buffer>>> {
3338 cx.spawn(async move |repository, cx| {
3339 let buffer = buffer_store
3340 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3341 .await?;
3342
3343 if let Some(language_registry) = language_registry {
3344 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3345 buffer.update(cx, |buffer, cx| {
3346 buffer.set_language(Some(git_commit_language), cx);
3347 })?;
3348 }
3349
3350 repository.update(cx, |repository, _| {
3351 repository.commit_message_buffer = Some(buffer.clone());
3352 })?;
3353 Ok(buffer)
3354 })
3355 }
3356
3357 pub fn checkout_files(
3358 &mut self,
3359 commit: &str,
3360 paths: Vec<RepoPath>,
3361 _cx: &mut App,
3362 ) -> oneshot::Receiver<Result<()>> {
3363 let commit = commit.to_string();
3364 let id = self.id;
3365
3366 self.send_job(
3367 Some(format!("git checkout {}", commit).into()),
3368 move |git_repo, _| async move {
3369 match git_repo {
3370 RepositoryState::Local {
3371 backend,
3372 environment,
3373 ..
3374 } => {
3375 backend
3376 .checkout_files(commit, paths, environment.clone())
3377 .await
3378 }
3379 RepositoryState::Remote { project_id, client } => {
3380 client
3381 .request(proto::GitCheckoutFiles {
3382 project_id: project_id.0,
3383 repository_id: id.to_proto(),
3384 commit,
3385 paths: paths
3386 .into_iter()
3387 .map(|p| p.to_string_lossy().to_string())
3388 .collect(),
3389 })
3390 .await?;
3391
3392 Ok(())
3393 }
3394 }
3395 },
3396 )
3397 }
3398
3399 pub fn reset(
3400 &mut self,
3401 commit: String,
3402 reset_mode: ResetMode,
3403 _cx: &mut App,
3404 ) -> oneshot::Receiver<Result<()>> {
3405 let id = self.id;
3406
3407 self.send_job(None, move |git_repo, _| async move {
3408 match git_repo {
3409 RepositoryState::Local {
3410 backend,
3411 environment,
3412 ..
3413 } => backend.reset(commit, reset_mode, environment).await,
3414 RepositoryState::Remote { project_id, client } => {
3415 client
3416 .request(proto::GitReset {
3417 project_id: project_id.0,
3418 repository_id: id.to_proto(),
3419 commit,
3420 mode: match reset_mode {
3421 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3422 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3423 },
3424 })
3425 .await?;
3426
3427 Ok(())
3428 }
3429 }
3430 })
3431 }
3432
3433 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3434 let id = self.id;
3435 self.send_job(None, move |git_repo, _cx| async move {
3436 match git_repo {
3437 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3438 RepositoryState::Remote { project_id, client } => {
3439 let resp = client
3440 .request(proto::GitShow {
3441 project_id: project_id.0,
3442 repository_id: id.to_proto(),
3443 commit,
3444 })
3445 .await?;
3446
3447 Ok(CommitDetails {
3448 sha: resp.sha.into(),
3449 message: resp.message.into(),
3450 commit_timestamp: resp.commit_timestamp,
3451 author_email: resp.author_email.into(),
3452 author_name: resp.author_name.into(),
3453 })
3454 }
3455 }
3456 })
3457 }
3458
3459 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3460 let id = self.id;
3461 self.send_job(None, move |git_repo, cx| async move {
3462 match git_repo {
3463 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3464 RepositoryState::Remote {
3465 client, project_id, ..
3466 } => {
3467 let response = client
3468 .request(proto::LoadCommitDiff {
3469 project_id: project_id.0,
3470 repository_id: id.to_proto(),
3471 commit,
3472 })
3473 .await?;
3474 Ok(CommitDiff {
3475 files: response
3476 .files
3477 .into_iter()
3478 .map(|file| CommitFile {
3479 path: Path::new(&file.path).into(),
3480 old_text: file.old_text,
3481 new_text: file.new_text,
3482 })
3483 .collect(),
3484 })
3485 }
3486 }
3487 })
3488 }
3489
3490 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3491 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3492 }
3493
3494 pub fn stage_entries(
3495 &self,
3496 entries: Vec<RepoPath>,
3497 cx: &mut Context<Self>,
3498 ) -> Task<anyhow::Result<()>> {
3499 if entries.is_empty() {
3500 return Task::ready(Ok(()));
3501 }
3502 let id = self.id;
3503
3504 let mut save_futures = Vec::new();
3505 if let Some(buffer_store) = self.buffer_store(cx) {
3506 buffer_store.update(cx, |buffer_store, cx| {
3507 for path in &entries {
3508 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3509 continue;
3510 };
3511 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3512 && buffer
3513 .read(cx)
3514 .file()
3515 .is_some_and(|file| file.disk_state().exists())
3516 {
3517 save_futures.push(buffer_store.save_buffer(buffer, cx));
3518 }
3519 }
3520 })
3521 }
3522
3523 cx.spawn(async move |this, cx| {
3524 for save_future in save_futures {
3525 save_future.await?;
3526 }
3527
3528 this.update(cx, |this, _| {
3529 this.send_job(None, move |git_repo, _cx| async move {
3530 match git_repo {
3531 RepositoryState::Local {
3532 backend,
3533 environment,
3534 ..
3535 } => backend.stage_paths(entries, environment.clone()).await,
3536 RepositoryState::Remote { project_id, client } => {
3537 client
3538 .request(proto::Stage {
3539 project_id: project_id.0,
3540 repository_id: id.to_proto(),
3541 paths: entries
3542 .into_iter()
3543 .map(|repo_path| repo_path.as_ref().to_proto())
3544 .collect(),
3545 })
3546 .await
3547 .context("sending stage request")?;
3548
3549 Ok(())
3550 }
3551 }
3552 })
3553 })?
3554 .await??;
3555
3556 Ok(())
3557 })
3558 }
3559
3560 pub fn unstage_entries(
3561 &self,
3562 entries: Vec<RepoPath>,
3563 cx: &mut Context<Self>,
3564 ) -> Task<anyhow::Result<()>> {
3565 if entries.is_empty() {
3566 return Task::ready(Ok(()));
3567 }
3568 let id = self.id;
3569
3570 let mut save_futures = Vec::new();
3571 if let Some(buffer_store) = self.buffer_store(cx) {
3572 buffer_store.update(cx, |buffer_store, cx| {
3573 for path in &entries {
3574 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3575 continue;
3576 };
3577 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3578 && buffer
3579 .read(cx)
3580 .file()
3581 .is_some_and(|file| file.disk_state().exists())
3582 {
3583 save_futures.push(buffer_store.save_buffer(buffer, cx));
3584 }
3585 }
3586 })
3587 }
3588
3589 cx.spawn(async move |this, cx| {
3590 for save_future in save_futures {
3591 save_future.await?;
3592 }
3593
3594 this.update(cx, |this, _| {
3595 this.send_job(None, move |git_repo, _cx| async move {
3596 match git_repo {
3597 RepositoryState::Local {
3598 backend,
3599 environment,
3600 ..
3601 } => backend.unstage_paths(entries, environment).await,
3602 RepositoryState::Remote { project_id, client } => {
3603 client
3604 .request(proto::Unstage {
3605 project_id: project_id.0,
3606 repository_id: id.to_proto(),
3607 paths: entries
3608 .into_iter()
3609 .map(|repo_path| repo_path.as_ref().to_proto())
3610 .collect(),
3611 })
3612 .await
3613 .context("sending unstage request")?;
3614
3615 Ok(())
3616 }
3617 }
3618 })
3619 })?
3620 .await??;
3621
3622 Ok(())
3623 })
3624 }
3625
3626 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3627 let to_stage = self
3628 .cached_status()
3629 .filter(|entry| !entry.status.staging().is_fully_staged())
3630 .map(|entry| entry.repo_path)
3631 .collect();
3632 self.stage_entries(to_stage, cx)
3633 }
3634
3635 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3636 let to_unstage = self
3637 .cached_status()
3638 .filter(|entry| entry.status.staging().has_staged())
3639 .map(|entry| entry.repo_path)
3640 .collect();
3641 self.unstage_entries(to_unstage, cx)
3642 }
3643
3644 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3645 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3646
3647 self.stash_entries(to_stash, cx)
3648 }
3649
3650 pub fn stash_entries(
3651 &mut self,
3652 entries: Vec<RepoPath>,
3653 cx: &mut Context<Self>,
3654 ) -> Task<anyhow::Result<()>> {
3655 let id = self.id;
3656
3657 cx.spawn(async move |this, cx| {
3658 this.update(cx, |this, _| {
3659 this.send_job(None, move |git_repo, _cx| async move {
3660 match git_repo {
3661 RepositoryState::Local {
3662 backend,
3663 environment,
3664 ..
3665 } => backend.stash_paths(entries, environment).await,
3666 RepositoryState::Remote { project_id, client } => {
3667 client
3668 .request(proto::Stash {
3669 project_id: project_id.0,
3670 repository_id: id.to_proto(),
3671 paths: entries
3672 .into_iter()
3673 .map(|repo_path| repo_path.as_ref().to_proto())
3674 .collect(),
3675 })
3676 .await
3677 .context("sending stash request")?;
3678 Ok(())
3679 }
3680 }
3681 })
3682 })?
3683 .await??;
3684 Ok(())
3685 })
3686 }
3687
3688 pub fn stash_pop(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3689 let id = self.id;
3690 cx.spawn(async move |this, cx| {
3691 this.update(cx, |this, _| {
3692 this.send_job(None, move |git_repo, _cx| async move {
3693 match git_repo {
3694 RepositoryState::Local {
3695 backend,
3696 environment,
3697 ..
3698 } => backend.stash_pop(environment).await,
3699 RepositoryState::Remote { project_id, client } => {
3700 client
3701 .request(proto::StashPop {
3702 project_id: project_id.0,
3703 repository_id: id.to_proto(),
3704 })
3705 .await
3706 .context("sending stash pop request")?;
3707 Ok(())
3708 }
3709 }
3710 })
3711 })?
3712 .await??;
3713 Ok(())
3714 })
3715 }
3716
3717 pub fn commit(
3718 &mut self,
3719 message: SharedString,
3720 name_and_email: Option<(SharedString, SharedString)>,
3721 options: CommitOptions,
3722 _cx: &mut App,
3723 ) -> oneshot::Receiver<Result<()>> {
3724 let id = self.id;
3725
3726 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3727 match git_repo {
3728 RepositoryState::Local {
3729 backend,
3730 environment,
3731 ..
3732 } => {
3733 backend
3734 .commit(message, name_and_email, options, environment)
3735 .await
3736 }
3737 RepositoryState::Remote { project_id, client } => {
3738 let (name, email) = name_and_email.unzip();
3739 client
3740 .request(proto::Commit {
3741 project_id: project_id.0,
3742 repository_id: id.to_proto(),
3743 message: String::from(message),
3744 name: name.map(String::from),
3745 email: email.map(String::from),
3746 options: Some(proto::commit::CommitOptions {
3747 amend: options.amend,
3748 signoff: options.signoff,
3749 }),
3750 })
3751 .await
3752 .context("sending commit request")?;
3753
3754 Ok(())
3755 }
3756 }
3757 })
3758 }
3759
3760 pub fn fetch(
3761 &mut self,
3762 fetch_options: FetchOptions,
3763 askpass: AskPassDelegate,
3764 _cx: &mut App,
3765 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3766 let askpass_delegates = self.askpass_delegates.clone();
3767 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3768 let id = self.id;
3769
3770 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3771 match git_repo {
3772 RepositoryState::Local {
3773 backend,
3774 environment,
3775 ..
3776 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3777 RepositoryState::Remote { project_id, client } => {
3778 askpass_delegates.lock().insert(askpass_id, askpass);
3779 let _defer = util::defer(|| {
3780 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3781 debug_assert!(askpass_delegate.is_some());
3782 });
3783
3784 let response = client
3785 .request(proto::Fetch {
3786 project_id: project_id.0,
3787 repository_id: id.to_proto(),
3788 askpass_id,
3789 remote: fetch_options.to_proto(),
3790 })
3791 .await
3792 .context("sending fetch request")?;
3793
3794 Ok(RemoteCommandOutput {
3795 stdout: response.stdout,
3796 stderr: response.stderr,
3797 })
3798 }
3799 }
3800 })
3801 }
3802
3803 pub fn push(
3804 &mut self,
3805 branch: SharedString,
3806 remote: SharedString,
3807 options: Option<PushOptions>,
3808 askpass: AskPassDelegate,
3809 cx: &mut Context<Self>,
3810 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3811 let askpass_delegates = self.askpass_delegates.clone();
3812 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3813 let id = self.id;
3814
3815 let args = options
3816 .map(|option| match option {
3817 PushOptions::SetUpstream => " --set-upstream",
3818 PushOptions::Force => " --force-with-lease",
3819 })
3820 .unwrap_or("");
3821
3822 let updates_tx = self
3823 .git_store()
3824 .and_then(|git_store| match &git_store.read(cx).state {
3825 GitStoreState::Local { downstream, .. } => downstream
3826 .as_ref()
3827 .map(|downstream| downstream.updates_tx.clone()),
3828 _ => None,
3829 });
3830
3831 let this = cx.weak_entity();
3832 self.send_job(
3833 Some(format!("git push {} {} {}", args, branch, remote).into()),
3834 move |git_repo, mut cx| async move {
3835 match git_repo {
3836 RepositoryState::Local {
3837 backend,
3838 environment,
3839 ..
3840 } => {
3841 let result = backend
3842 .push(
3843 branch.to_string(),
3844 remote.to_string(),
3845 options,
3846 askpass,
3847 environment.clone(),
3848 cx.clone(),
3849 )
3850 .await;
3851 if result.is_ok() {
3852 let branches = backend.branches().await?;
3853 let branch = branches.into_iter().find(|branch| branch.is_head);
3854 log::info!("head branch after scan is {branch:?}");
3855 let snapshot = this.update(&mut cx, |this, cx| {
3856 this.snapshot.branch = branch;
3857 let snapshot = this.snapshot.clone();
3858 cx.emit(RepositoryEvent::Updated {
3859 full_scan: false,
3860 new_instance: false,
3861 });
3862 snapshot
3863 })?;
3864 if let Some(updates_tx) = updates_tx {
3865 updates_tx
3866 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3867 .ok();
3868 }
3869 }
3870 result
3871 }
3872 RepositoryState::Remote { project_id, client } => {
3873 askpass_delegates.lock().insert(askpass_id, askpass);
3874 let _defer = util::defer(|| {
3875 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3876 debug_assert!(askpass_delegate.is_some());
3877 });
3878 let response = client
3879 .request(proto::Push {
3880 project_id: project_id.0,
3881 repository_id: id.to_proto(),
3882 askpass_id,
3883 branch_name: branch.to_string(),
3884 remote_name: remote.to_string(),
3885 options: options.map(|options| match options {
3886 PushOptions::Force => proto::push::PushOptions::Force,
3887 PushOptions::SetUpstream => {
3888 proto::push::PushOptions::SetUpstream
3889 }
3890 }
3891 as i32),
3892 })
3893 .await
3894 .context("sending push request")?;
3895
3896 Ok(RemoteCommandOutput {
3897 stdout: response.stdout,
3898 stderr: response.stderr,
3899 })
3900 }
3901 }
3902 },
3903 )
3904 }
3905
3906 pub fn pull(
3907 &mut self,
3908 branch: SharedString,
3909 remote: SharedString,
3910 askpass: AskPassDelegate,
3911 _cx: &mut App,
3912 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3913 let askpass_delegates = self.askpass_delegates.clone();
3914 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3915 let id = self.id;
3916
3917 self.send_job(
3918 Some(format!("git pull {} {}", remote, branch).into()),
3919 move |git_repo, cx| async move {
3920 match git_repo {
3921 RepositoryState::Local {
3922 backend,
3923 environment,
3924 ..
3925 } => {
3926 backend
3927 .pull(
3928 branch.to_string(),
3929 remote.to_string(),
3930 askpass,
3931 environment.clone(),
3932 cx,
3933 )
3934 .await
3935 }
3936 RepositoryState::Remote { project_id, client } => {
3937 askpass_delegates.lock().insert(askpass_id, askpass);
3938 let _defer = util::defer(|| {
3939 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3940 debug_assert!(askpass_delegate.is_some());
3941 });
3942 let response = client
3943 .request(proto::Pull {
3944 project_id: project_id.0,
3945 repository_id: id.to_proto(),
3946 askpass_id,
3947 branch_name: branch.to_string(),
3948 remote_name: remote.to_string(),
3949 })
3950 .await
3951 .context("sending pull request")?;
3952
3953 Ok(RemoteCommandOutput {
3954 stdout: response.stdout,
3955 stderr: response.stderr,
3956 })
3957 }
3958 }
3959 },
3960 )
3961 }
3962
3963 fn spawn_set_index_text_job(
3964 &mut self,
3965 path: RepoPath,
3966 content: Option<String>,
3967 hunk_staging_operation_count: Option<usize>,
3968 cx: &mut Context<Self>,
3969 ) -> oneshot::Receiver<anyhow::Result<()>> {
3970 let id = self.id;
3971 let this = cx.weak_entity();
3972 let git_store = self.git_store.clone();
3973 self.send_keyed_job(
3974 Some(GitJobKey::WriteIndex(path.clone())),
3975 None,
3976 move |git_repo, mut cx| async move {
3977 log::debug!("start updating index text for buffer {}", path.display());
3978 match git_repo {
3979 RepositoryState::Local {
3980 backend,
3981 environment,
3982 ..
3983 } => {
3984 backend
3985 .set_index_text(path.clone(), content, environment.clone())
3986 .await?;
3987 }
3988 RepositoryState::Remote { project_id, client } => {
3989 client
3990 .request(proto::SetIndexText {
3991 project_id: project_id.0,
3992 repository_id: id.to_proto(),
3993 path: path.as_ref().to_proto(),
3994 text: content,
3995 })
3996 .await?;
3997 }
3998 }
3999 log::debug!("finish updating index text for buffer {}", path.display());
4000
4001 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4002 let project_path = this
4003 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4004 .ok()
4005 .flatten();
4006 git_store.update(&mut cx, |git_store, cx| {
4007 let buffer_id = git_store
4008 .buffer_store
4009 .read(cx)
4010 .get_by_path(&project_path?)?
4011 .read(cx)
4012 .remote_id();
4013 let diff_state = git_store.diffs.get(&buffer_id)?;
4014 diff_state.update(cx, |diff_state, _| {
4015 diff_state.hunk_staging_operation_count_as_of_write =
4016 hunk_staging_operation_count;
4017 });
4018 Some(())
4019 })?;
4020 }
4021 Ok(())
4022 },
4023 )
4024 }
4025
4026 pub fn get_remotes(
4027 &mut self,
4028 branch_name: Option<String>,
4029 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4030 let id = self.id;
4031 self.send_job(None, move |repo, _cx| async move {
4032 match repo {
4033 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4034 RepositoryState::Remote { project_id, client } => {
4035 let response = client
4036 .request(proto::GetRemotes {
4037 project_id: project_id.0,
4038 repository_id: id.to_proto(),
4039 branch_name,
4040 })
4041 .await?;
4042
4043 let remotes = response
4044 .remotes
4045 .into_iter()
4046 .map(|remotes| git::repository::Remote {
4047 name: remotes.name.into(),
4048 })
4049 .collect();
4050
4051 Ok(remotes)
4052 }
4053 }
4054 })
4055 }
4056
4057 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4058 let id = self.id;
4059 self.send_job(None, move |repo, _| async move {
4060 match repo {
4061 RepositoryState::Local { backend, .. } => backend.branches().await,
4062 RepositoryState::Remote { project_id, client } => {
4063 let response = client
4064 .request(proto::GitGetBranches {
4065 project_id: project_id.0,
4066 repository_id: id.to_proto(),
4067 })
4068 .await?;
4069
4070 let branches = response
4071 .branches
4072 .into_iter()
4073 .map(|branch| proto_to_branch(&branch))
4074 .collect();
4075
4076 Ok(branches)
4077 }
4078 }
4079 })
4080 }
4081
4082 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4083 let id = self.id;
4084 self.send_job(None, move |repo, _| async move {
4085 match repo {
4086 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4087 RepositoryState::Remote { project_id, client } => {
4088 let response = client
4089 .request(proto::GetDefaultBranch {
4090 project_id: project_id.0,
4091 repository_id: id.to_proto(),
4092 })
4093 .await?;
4094
4095 anyhow::Ok(response.branch.map(SharedString::from))
4096 }
4097 }
4098 })
4099 }
4100
4101 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4102 let id = self.id;
4103 self.send_job(None, move |repo, _cx| async move {
4104 match repo {
4105 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4106 RepositoryState::Remote { project_id, client } => {
4107 let response = client
4108 .request(proto::GitDiff {
4109 project_id: project_id.0,
4110 repository_id: id.to_proto(),
4111 diff_type: match diff_type {
4112 DiffType::HeadToIndex => {
4113 proto::git_diff::DiffType::HeadToIndex.into()
4114 }
4115 DiffType::HeadToWorktree => {
4116 proto::git_diff::DiffType::HeadToWorktree.into()
4117 }
4118 },
4119 })
4120 .await?;
4121
4122 Ok(response.diff)
4123 }
4124 }
4125 })
4126 }
4127
4128 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4129 let id = self.id;
4130 self.send_job(
4131 Some(format!("git switch -c {branch_name}").into()),
4132 move |repo, _cx| async move {
4133 match repo {
4134 RepositoryState::Local { backend, .. } => {
4135 backend.create_branch(branch_name).await
4136 }
4137 RepositoryState::Remote { project_id, client } => {
4138 client
4139 .request(proto::GitCreateBranch {
4140 project_id: project_id.0,
4141 repository_id: id.to_proto(),
4142 branch_name,
4143 })
4144 .await?;
4145
4146 Ok(())
4147 }
4148 }
4149 },
4150 )
4151 }
4152
4153 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4154 let id = self.id;
4155 self.send_job(
4156 Some(format!("git switch {branch_name}").into()),
4157 move |repo, _cx| async move {
4158 match repo {
4159 RepositoryState::Local { backend, .. } => {
4160 backend.change_branch(branch_name).await
4161 }
4162 RepositoryState::Remote { project_id, client } => {
4163 client
4164 .request(proto::GitChangeBranch {
4165 project_id: project_id.0,
4166 repository_id: id.to_proto(),
4167 branch_name,
4168 })
4169 .await?;
4170
4171 Ok(())
4172 }
4173 }
4174 },
4175 )
4176 }
4177
4178 pub fn rename_branch(
4179 &mut self,
4180 branch: String,
4181 new_name: String,
4182 ) -> oneshot::Receiver<Result<()>> {
4183 let id = self.id;
4184 self.send_job(
4185 Some(format!("git branch -m {branch} {new_name}").into()),
4186 move |repo, _cx| async move {
4187 match repo {
4188 RepositoryState::Local { backend, .. } => {
4189 backend.rename_branch(branch, new_name).await
4190 }
4191 RepositoryState::Remote { project_id, client } => {
4192 client
4193 .request(proto::GitRenameBranch {
4194 project_id: project_id.0,
4195 repository_id: id.to_proto(),
4196 branch,
4197 new_name,
4198 })
4199 .await?;
4200
4201 Ok(())
4202 }
4203 }
4204 },
4205 )
4206 }
4207
4208 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4209 let id = self.id;
4210 self.send_job(None, move |repo, _cx| async move {
4211 match repo {
4212 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4213 RepositoryState::Remote { project_id, client } => {
4214 let response = client
4215 .request(proto::CheckForPushedCommits {
4216 project_id: project_id.0,
4217 repository_id: id.to_proto(),
4218 })
4219 .await?;
4220
4221 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4222
4223 Ok(branches)
4224 }
4225 }
4226 })
4227 }
4228
4229 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4230 self.send_job(None, |repo, _cx| async move {
4231 match repo {
4232 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4233 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4234 }
4235 })
4236 }
4237
4238 pub fn restore_checkpoint(
4239 &mut self,
4240 checkpoint: GitRepositoryCheckpoint,
4241 ) -> oneshot::Receiver<Result<()>> {
4242 self.send_job(None, move |repo, _cx| async move {
4243 match repo {
4244 RepositoryState::Local { backend, .. } => {
4245 backend.restore_checkpoint(checkpoint).await
4246 }
4247 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4248 }
4249 })
4250 }
4251
4252 pub(crate) fn apply_remote_update(
4253 &mut self,
4254 update: proto::UpdateRepository,
4255 is_new: bool,
4256 cx: &mut Context<Self>,
4257 ) -> Result<()> {
4258 let conflicted_paths = TreeSet::from_ordered_entries(
4259 update
4260 .current_merge_conflicts
4261 .into_iter()
4262 .map(|path| RepoPath(Path::new(&path).into())),
4263 );
4264 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4265 self.snapshot.head_commit = update
4266 .head_commit_details
4267 .as_ref()
4268 .map(proto_to_commit_details);
4269
4270 self.snapshot.merge.conflicted_paths = conflicted_paths;
4271 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4272
4273 let edits = update
4274 .removed_statuses
4275 .into_iter()
4276 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4277 .chain(
4278 update
4279 .updated_statuses
4280 .into_iter()
4281 .filter_map(|updated_status| {
4282 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4283 }),
4284 )
4285 .collect::<Vec<_>>();
4286 self.snapshot.statuses_by_path.edit(edits, &());
4287 if update.is_last_update {
4288 self.snapshot.scan_id = update.scan_id;
4289 }
4290 cx.emit(RepositoryEvent::Updated {
4291 full_scan: true,
4292 new_instance: is_new,
4293 });
4294 Ok(())
4295 }
4296
4297 pub fn compare_checkpoints(
4298 &mut self,
4299 left: GitRepositoryCheckpoint,
4300 right: GitRepositoryCheckpoint,
4301 ) -> oneshot::Receiver<Result<bool>> {
4302 self.send_job(None, move |repo, _cx| async move {
4303 match repo {
4304 RepositoryState::Local { backend, .. } => {
4305 backend.compare_checkpoints(left, right).await
4306 }
4307 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4308 }
4309 })
4310 }
4311
4312 pub fn diff_checkpoints(
4313 &mut self,
4314 base_checkpoint: GitRepositoryCheckpoint,
4315 target_checkpoint: GitRepositoryCheckpoint,
4316 ) -> oneshot::Receiver<Result<String>> {
4317 self.send_job(None, move |repo, _cx| async move {
4318 match repo {
4319 RepositoryState::Local { backend, .. } => {
4320 backend
4321 .diff_checkpoints(base_checkpoint, target_checkpoint)
4322 .await
4323 }
4324 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4325 }
4326 })
4327 }
4328
4329 fn schedule_scan(
4330 &mut self,
4331 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4332 cx: &mut Context<Self>,
4333 ) {
4334 let this = cx.weak_entity();
4335 let _ = self.send_keyed_job(
4336 Some(GitJobKey::ReloadGitState),
4337 None,
4338 |state, mut cx| async move {
4339 log::debug!("run scheduled git status scan");
4340
4341 let Some(this) = this.upgrade() else {
4342 return Ok(());
4343 };
4344 let RepositoryState::Local { backend, .. } = state else {
4345 bail!("not a local repository")
4346 };
4347 let (snapshot, events) = this
4348 .update(&mut cx, |this, _| {
4349 this.paths_needing_status_update.clear();
4350 compute_snapshot(
4351 this.id,
4352 this.work_directory_abs_path.clone(),
4353 this.snapshot.clone(),
4354 backend.clone(),
4355 )
4356 })?
4357 .await?;
4358 this.update(&mut cx, |this, cx| {
4359 this.snapshot = snapshot.clone();
4360 for event in events {
4361 cx.emit(event);
4362 }
4363 })?;
4364 if let Some(updates_tx) = updates_tx {
4365 updates_tx
4366 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4367 .ok();
4368 }
4369 Ok(())
4370 },
4371 );
4372 }
4373
4374 fn spawn_local_git_worker(
4375 work_directory_abs_path: Arc<Path>,
4376 dot_git_abs_path: Arc<Path>,
4377 _repository_dir_abs_path: Arc<Path>,
4378 _common_dir_abs_path: Arc<Path>,
4379 project_environment: WeakEntity<ProjectEnvironment>,
4380 fs: Arc<dyn Fs>,
4381 cx: &mut Context<Self>,
4382 ) -> mpsc::UnboundedSender<GitJob> {
4383 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4384
4385 cx.spawn(async move |_, cx| {
4386 let environment = project_environment
4387 .upgrade()
4388 .context("missing project environment")?
4389 .update(cx, |project_environment, cx| {
4390 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4391 })?
4392 .await
4393 .unwrap_or_else(|| {
4394 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4395 HashMap::default()
4396 });
4397 let backend = cx
4398 .background_spawn(async move {
4399 fs.open_repo(&dot_git_abs_path)
4400 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4401 })
4402 .await?;
4403
4404 if let Some(git_hosting_provider_registry) =
4405 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4406 {
4407 git_hosting_providers::register_additional_providers(
4408 git_hosting_provider_registry,
4409 backend.clone(),
4410 );
4411 }
4412
4413 let state = RepositoryState::Local {
4414 backend,
4415 environment: Arc::new(environment),
4416 };
4417 let mut jobs = VecDeque::new();
4418 loop {
4419 while let Ok(Some(next_job)) = job_rx.try_next() {
4420 jobs.push_back(next_job);
4421 }
4422
4423 if let Some(job) = jobs.pop_front() {
4424 if let Some(current_key) = &job.key
4425 && jobs
4426 .iter()
4427 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4428 {
4429 continue;
4430 }
4431 (job.job)(state.clone(), cx).await;
4432 } else if let Some(job) = job_rx.next().await {
4433 jobs.push_back(job);
4434 } else {
4435 break;
4436 }
4437 }
4438 anyhow::Ok(())
4439 })
4440 .detach_and_log_err(cx);
4441
4442 job_tx
4443 }
4444
4445 fn spawn_remote_git_worker(
4446 project_id: ProjectId,
4447 client: AnyProtoClient,
4448 cx: &mut Context<Self>,
4449 ) -> mpsc::UnboundedSender<GitJob> {
4450 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4451
4452 cx.spawn(async move |_, cx| {
4453 let state = RepositoryState::Remote { project_id, client };
4454 let mut jobs = VecDeque::new();
4455 loop {
4456 while let Ok(Some(next_job)) = job_rx.try_next() {
4457 jobs.push_back(next_job);
4458 }
4459
4460 if let Some(job) = jobs.pop_front() {
4461 if let Some(current_key) = &job.key
4462 && jobs
4463 .iter()
4464 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4465 {
4466 continue;
4467 }
4468 (job.job)(state.clone(), cx).await;
4469 } else if let Some(job) = job_rx.next().await {
4470 jobs.push_back(job);
4471 } else {
4472 break;
4473 }
4474 }
4475 anyhow::Ok(())
4476 })
4477 .detach_and_log_err(cx);
4478
4479 job_tx
4480 }
4481
4482 fn load_staged_text(
4483 &mut self,
4484 buffer_id: BufferId,
4485 repo_path: RepoPath,
4486 cx: &App,
4487 ) -> Task<Result<Option<String>>> {
4488 let rx = self.send_job(None, move |state, _| async move {
4489 match state {
4490 RepositoryState::Local { backend, .. } => {
4491 anyhow::Ok(backend.load_index_text(repo_path).await)
4492 }
4493 RepositoryState::Remote { project_id, client } => {
4494 let response = client
4495 .request(proto::OpenUnstagedDiff {
4496 project_id: project_id.to_proto(),
4497 buffer_id: buffer_id.to_proto(),
4498 })
4499 .await?;
4500 Ok(response.staged_text)
4501 }
4502 }
4503 });
4504 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4505 }
4506
4507 fn load_committed_text(
4508 &mut self,
4509 buffer_id: BufferId,
4510 repo_path: RepoPath,
4511 cx: &App,
4512 ) -> Task<Result<DiffBasesChange>> {
4513 let rx = self.send_job(None, move |state, _| async move {
4514 match state {
4515 RepositoryState::Local { backend, .. } => {
4516 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4517 let staged_text = backend.load_index_text(repo_path).await;
4518 let diff_bases_change = if committed_text == staged_text {
4519 DiffBasesChange::SetBoth(committed_text)
4520 } else {
4521 DiffBasesChange::SetEach {
4522 index: staged_text,
4523 head: committed_text,
4524 }
4525 };
4526 anyhow::Ok(diff_bases_change)
4527 }
4528 RepositoryState::Remote { project_id, client } => {
4529 use proto::open_uncommitted_diff_response::Mode;
4530
4531 let response = client
4532 .request(proto::OpenUncommittedDiff {
4533 project_id: project_id.to_proto(),
4534 buffer_id: buffer_id.to_proto(),
4535 })
4536 .await?;
4537 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4538 let bases = match mode {
4539 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4540 Mode::IndexAndHead => DiffBasesChange::SetEach {
4541 head: response.committed_text,
4542 index: response.staged_text,
4543 },
4544 };
4545 Ok(bases)
4546 }
4547 }
4548 });
4549
4550 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4551 }
4552
4553 fn paths_changed(
4554 &mut self,
4555 paths: Vec<RepoPath>,
4556 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4557 cx: &mut Context<Self>,
4558 ) {
4559 self.paths_needing_status_update.extend(paths);
4560
4561 let this = cx.weak_entity();
4562 let _ = self.send_keyed_job(
4563 Some(GitJobKey::RefreshStatuses),
4564 None,
4565 |state, mut cx| async move {
4566 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4567 (
4568 this.snapshot.clone(),
4569 mem::take(&mut this.paths_needing_status_update),
4570 )
4571 })?;
4572 let RepositoryState::Local { backend, .. } = state else {
4573 bail!("not a local repository")
4574 };
4575
4576 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4577 if paths.is_empty() {
4578 return Ok(());
4579 }
4580 let statuses = backend.status(&paths).await?;
4581
4582 let changed_path_statuses = cx
4583 .background_spawn(async move {
4584 let mut changed_path_statuses = Vec::new();
4585 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4586 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4587
4588 for (repo_path, status) in &*statuses.entries {
4589 changed_paths.remove(repo_path);
4590 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4591 && cursor.item().is_some_and(|entry| entry.status == *status)
4592 {
4593 continue;
4594 }
4595
4596 changed_path_statuses.push(Edit::Insert(StatusEntry {
4597 repo_path: repo_path.clone(),
4598 status: *status,
4599 }));
4600 }
4601 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4602 for path in changed_paths.into_iter() {
4603 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4604 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4605 }
4606 }
4607 changed_path_statuses
4608 })
4609 .await;
4610
4611 this.update(&mut cx, |this, cx| {
4612 if !changed_path_statuses.is_empty() {
4613 this.snapshot
4614 .statuses_by_path
4615 .edit(changed_path_statuses, &());
4616 this.snapshot.scan_id += 1;
4617 if let Some(updates_tx) = updates_tx {
4618 updates_tx
4619 .unbounded_send(DownstreamUpdate::UpdateRepository(
4620 this.snapshot.clone(),
4621 ))
4622 .ok();
4623 }
4624 }
4625 cx.emit(RepositoryEvent::Updated {
4626 full_scan: false,
4627 new_instance: false,
4628 });
4629 })
4630 },
4631 );
4632 }
4633
4634 /// currently running git command and when it started
4635 pub fn current_job(&self) -> Option<JobInfo> {
4636 self.active_jobs.values().next().cloned()
4637 }
4638
4639 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4640 self.send_job(None, |_, _| async {})
4641 }
4642}
4643
4644fn get_permalink_in_rust_registry_src(
4645 provider_registry: Arc<GitHostingProviderRegistry>,
4646 path: PathBuf,
4647 selection: Range<u32>,
4648) -> Result<url::Url> {
4649 #[derive(Deserialize)]
4650 struct CargoVcsGit {
4651 sha1: String,
4652 }
4653
4654 #[derive(Deserialize)]
4655 struct CargoVcsInfo {
4656 git: CargoVcsGit,
4657 path_in_vcs: String,
4658 }
4659
4660 #[derive(Deserialize)]
4661 struct CargoPackage {
4662 repository: String,
4663 }
4664
4665 #[derive(Deserialize)]
4666 struct CargoToml {
4667 package: CargoPackage,
4668 }
4669
4670 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4671 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4672 Some((dir, json))
4673 }) else {
4674 bail!("No .cargo_vcs_info.json found in parent directories")
4675 };
4676 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4677 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4678 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4679 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4680 .context("parsing package.repository field of manifest")?;
4681 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4682 let permalink = provider.build_permalink(
4683 remote,
4684 BuildPermalinkParams {
4685 sha: &cargo_vcs_info.git.sha1,
4686 path: &path.to_string_lossy(),
4687 selection: Some(selection),
4688 },
4689 );
4690 Ok(permalink)
4691}
4692
4693fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4694 let Some(blame) = blame else {
4695 return proto::BlameBufferResponse {
4696 blame_response: None,
4697 };
4698 };
4699
4700 let entries = blame
4701 .entries
4702 .into_iter()
4703 .map(|entry| proto::BlameEntry {
4704 sha: entry.sha.as_bytes().into(),
4705 start_line: entry.range.start,
4706 end_line: entry.range.end,
4707 original_line_number: entry.original_line_number,
4708 author: entry.author,
4709 author_mail: entry.author_mail,
4710 author_time: entry.author_time,
4711 author_tz: entry.author_tz,
4712 committer: entry.committer_name,
4713 committer_mail: entry.committer_email,
4714 committer_time: entry.committer_time,
4715 committer_tz: entry.committer_tz,
4716 summary: entry.summary,
4717 previous: entry.previous,
4718 filename: entry.filename,
4719 })
4720 .collect::<Vec<_>>();
4721
4722 let messages = blame
4723 .messages
4724 .into_iter()
4725 .map(|(oid, message)| proto::CommitMessage {
4726 oid: oid.as_bytes().into(),
4727 message,
4728 })
4729 .collect::<Vec<_>>();
4730
4731 proto::BlameBufferResponse {
4732 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4733 entries,
4734 messages,
4735 remote_url: blame.remote_url,
4736 }),
4737 }
4738}
4739
4740fn deserialize_blame_buffer_response(
4741 response: proto::BlameBufferResponse,
4742) -> Option<git::blame::Blame> {
4743 let response = response.blame_response?;
4744 let entries = response
4745 .entries
4746 .into_iter()
4747 .filter_map(|entry| {
4748 Some(git::blame::BlameEntry {
4749 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4750 range: entry.start_line..entry.end_line,
4751 original_line_number: entry.original_line_number,
4752 committer_name: entry.committer,
4753 committer_time: entry.committer_time,
4754 committer_tz: entry.committer_tz,
4755 committer_email: entry.committer_mail,
4756 author: entry.author,
4757 author_mail: entry.author_mail,
4758 author_time: entry.author_time,
4759 author_tz: entry.author_tz,
4760 summary: entry.summary,
4761 previous: entry.previous,
4762 filename: entry.filename,
4763 })
4764 })
4765 .collect::<Vec<_>>();
4766
4767 let messages = response
4768 .messages
4769 .into_iter()
4770 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4771 .collect::<HashMap<_, _>>();
4772
4773 Some(Blame {
4774 entries,
4775 messages,
4776 remote_url: response.remote_url,
4777 })
4778}
4779
4780fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4781 proto::Branch {
4782 is_head: branch.is_head,
4783 ref_name: branch.ref_name.to_string(),
4784 unix_timestamp: branch
4785 .most_recent_commit
4786 .as_ref()
4787 .map(|commit| commit.commit_timestamp as u64),
4788 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4789 ref_name: upstream.ref_name.to_string(),
4790 tracking: upstream
4791 .tracking
4792 .status()
4793 .map(|upstream| proto::UpstreamTracking {
4794 ahead: upstream.ahead as u64,
4795 behind: upstream.behind as u64,
4796 }),
4797 }),
4798 most_recent_commit: branch
4799 .most_recent_commit
4800 .as_ref()
4801 .map(|commit| proto::CommitSummary {
4802 sha: commit.sha.to_string(),
4803 subject: commit.subject.to_string(),
4804 commit_timestamp: commit.commit_timestamp,
4805 author_name: commit.author_name.to_string(),
4806 }),
4807 }
4808}
4809
4810fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4811 git::repository::Branch {
4812 is_head: proto.is_head,
4813 ref_name: proto.ref_name.clone().into(),
4814 upstream: proto
4815 .upstream
4816 .as_ref()
4817 .map(|upstream| git::repository::Upstream {
4818 ref_name: upstream.ref_name.to_string().into(),
4819 tracking: upstream
4820 .tracking
4821 .as_ref()
4822 .map(|tracking| {
4823 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4824 ahead: tracking.ahead as u32,
4825 behind: tracking.behind as u32,
4826 })
4827 })
4828 .unwrap_or(git::repository::UpstreamTracking::Gone),
4829 }),
4830 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4831 git::repository::CommitSummary {
4832 sha: commit.sha.to_string().into(),
4833 subject: commit.subject.to_string().into(),
4834 commit_timestamp: commit.commit_timestamp,
4835 author_name: commit.author_name.to_string().into(),
4836 has_parent: true,
4837 }
4838 }),
4839 }
4840}
4841
4842fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4843 proto::GitCommitDetails {
4844 sha: commit.sha.to_string(),
4845 message: commit.message.to_string(),
4846 commit_timestamp: commit.commit_timestamp,
4847 author_email: commit.author_email.to_string(),
4848 author_name: commit.author_name.to_string(),
4849 }
4850}
4851
4852fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4853 CommitDetails {
4854 sha: proto.sha.clone().into(),
4855 message: proto.message.clone().into(),
4856 commit_timestamp: proto.commit_timestamp,
4857 author_email: proto.author_email.clone().into(),
4858 author_name: proto.author_name.clone().into(),
4859 }
4860}
4861
4862async fn compute_snapshot(
4863 id: RepositoryId,
4864 work_directory_abs_path: Arc<Path>,
4865 prev_snapshot: RepositorySnapshot,
4866 backend: Arc<dyn GitRepository>,
4867) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4868 let mut events = Vec::new();
4869 let branches = backend.branches().await?;
4870 let branch = branches.into_iter().find(|branch| branch.is_head);
4871 let statuses = backend
4872 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
4873 .await?;
4874 let statuses_by_path = SumTree::from_iter(
4875 statuses
4876 .entries
4877 .iter()
4878 .map(|(repo_path, status)| StatusEntry {
4879 repo_path: repo_path.clone(),
4880 status: *status,
4881 }),
4882 &(),
4883 );
4884 let (merge_details, merge_heads_changed) =
4885 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
4886 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
4887
4888 if merge_heads_changed
4889 || branch != prev_snapshot.branch
4890 || statuses_by_path != prev_snapshot.statuses_by_path
4891 {
4892 events.push(RepositoryEvent::Updated {
4893 full_scan: true,
4894 new_instance: false,
4895 });
4896 }
4897
4898 // Cache merge conflict paths so they don't change from staging/unstaging,
4899 // until the merge heads change (at commit time, etc.).
4900 if merge_heads_changed {
4901 events.push(RepositoryEvent::MergeHeadsChanged);
4902 }
4903
4904 // Useful when branch is None in detached head state
4905 let head_commit = match backend.head_sha().await {
4906 Some(head_sha) => backend.show(head_sha).await.log_err(),
4907 None => None,
4908 };
4909
4910 // Used by edit prediction data collection
4911 let remote_origin_url = backend.remote_url("origin");
4912 let remote_upstream_url = backend.remote_url("upstream");
4913
4914 let snapshot = RepositorySnapshot {
4915 id,
4916 statuses_by_path,
4917 work_directory_abs_path,
4918 scan_id: prev_snapshot.scan_id + 1,
4919 branch,
4920 head_commit,
4921 merge: merge_details,
4922 remote_origin_url,
4923 remote_upstream_url,
4924 };
4925
4926 Ok((snapshot, events))
4927}
4928
4929fn status_from_proto(
4930 simple_status: i32,
4931 status: Option<proto::GitFileStatus>,
4932) -> anyhow::Result<FileStatus> {
4933 use proto::git_file_status::Variant;
4934
4935 let Some(variant) = status.and_then(|status| status.variant) else {
4936 let code = proto::GitStatus::from_i32(simple_status)
4937 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
4938 let result = match code {
4939 proto::GitStatus::Added => TrackedStatus {
4940 worktree_status: StatusCode::Added,
4941 index_status: StatusCode::Unmodified,
4942 }
4943 .into(),
4944 proto::GitStatus::Modified => TrackedStatus {
4945 worktree_status: StatusCode::Modified,
4946 index_status: StatusCode::Unmodified,
4947 }
4948 .into(),
4949 proto::GitStatus::Conflict => UnmergedStatus {
4950 first_head: UnmergedStatusCode::Updated,
4951 second_head: UnmergedStatusCode::Updated,
4952 }
4953 .into(),
4954 proto::GitStatus::Deleted => TrackedStatus {
4955 worktree_status: StatusCode::Deleted,
4956 index_status: StatusCode::Unmodified,
4957 }
4958 .into(),
4959 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
4960 };
4961 return Ok(result);
4962 };
4963
4964 let result = match variant {
4965 Variant::Untracked(_) => FileStatus::Untracked,
4966 Variant::Ignored(_) => FileStatus::Ignored,
4967 Variant::Unmerged(unmerged) => {
4968 let [first_head, second_head] =
4969 [unmerged.first_head, unmerged.second_head].map(|head| {
4970 let code = proto::GitStatus::from_i32(head)
4971 .with_context(|| format!("Invalid git status code: {head}"))?;
4972 let result = match code {
4973 proto::GitStatus::Added => UnmergedStatusCode::Added,
4974 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4975 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4976 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
4977 };
4978 Ok(result)
4979 });
4980 let [first_head, second_head] = [first_head?, second_head?];
4981 UnmergedStatus {
4982 first_head,
4983 second_head,
4984 }
4985 .into()
4986 }
4987 Variant::Tracked(tracked) => {
4988 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4989 .map(|status| {
4990 let code = proto::GitStatus::from_i32(status)
4991 .with_context(|| format!("Invalid git status code: {status}"))?;
4992 let result = match code {
4993 proto::GitStatus::Modified => StatusCode::Modified,
4994 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4995 proto::GitStatus::Added => StatusCode::Added,
4996 proto::GitStatus::Deleted => StatusCode::Deleted,
4997 proto::GitStatus::Renamed => StatusCode::Renamed,
4998 proto::GitStatus::Copied => StatusCode::Copied,
4999 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5000 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5001 };
5002 Ok(result)
5003 });
5004 let [index_status, worktree_status] = [index_status?, worktree_status?];
5005 TrackedStatus {
5006 index_status,
5007 worktree_status,
5008 }
5009 .into()
5010 }
5011 };
5012 Ok(result)
5013}
5014
5015fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5016 use proto::git_file_status::{Tracked, Unmerged, Variant};
5017
5018 let variant = match status {
5019 FileStatus::Untracked => Variant::Untracked(Default::default()),
5020 FileStatus::Ignored => Variant::Ignored(Default::default()),
5021 FileStatus::Unmerged(UnmergedStatus {
5022 first_head,
5023 second_head,
5024 }) => Variant::Unmerged(Unmerged {
5025 first_head: unmerged_status_to_proto(first_head),
5026 second_head: unmerged_status_to_proto(second_head),
5027 }),
5028 FileStatus::Tracked(TrackedStatus {
5029 index_status,
5030 worktree_status,
5031 }) => Variant::Tracked(Tracked {
5032 index_status: tracked_status_to_proto(index_status),
5033 worktree_status: tracked_status_to_proto(worktree_status),
5034 }),
5035 };
5036 proto::GitFileStatus {
5037 variant: Some(variant),
5038 }
5039}
5040
5041fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5042 match code {
5043 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5044 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5045 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5046 }
5047}
5048
5049fn tracked_status_to_proto(code: StatusCode) -> i32 {
5050 match code {
5051 StatusCode::Added => proto::GitStatus::Added as _,
5052 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5053 StatusCode::Modified => proto::GitStatus::Modified as _,
5054 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5055 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5056 StatusCode::Copied => proto::GitStatus::Copied as _,
5057 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5058 }
5059}