1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 status::{
32 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
33 },
34};
35use gpui::{
36 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
37 WeakEntity,
38};
39use language::{
40 Buffer, BufferEvent, Language, LanguageRegistry,
41 proto::{deserialize_version, serialize_version},
42};
43use parking_lot::Mutex;
44use postage::stream::Stream as _;
45use rpc::{
46 AnyProtoClient, TypedEnvelope,
47 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
48};
49use serde::Deserialize;
50use std::{
51 cmp::Ordering,
52 collections::{BTreeSet, VecDeque},
53 future::Future,
54 mem,
55 ops::Range,
56 path::{Path, PathBuf},
57 sync::{
58 Arc,
59 atomic::{self, AtomicU64},
60 },
61 time::Instant,
62};
63use sum_tree::{Edit, SumTree, TreeSet};
64use text::{Bias, BufferId};
65use util::{ResultExt, debug_panic, post_inc};
66use worktree::{
67 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
68 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
69};
70
71pub struct GitStore {
72 state: GitStoreState,
73 buffer_store: Entity<BufferStore>,
74 worktree_store: Entity<WorktreeStore>,
75 repositories: HashMap<RepositoryId, Entity<Repository>>,
76 active_repo_id: Option<RepositoryId>,
77 #[allow(clippy::type_complexity)]
78 loading_diffs:
79 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
80 diffs: HashMap<BufferId, Entity<BufferGitState>>,
81 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
82 _subscriptions: Vec<Subscription>,
83}
84
85#[derive(Default)]
86struct SharedDiffs {
87 unstaged: Option<Entity<BufferDiff>>,
88 uncommitted: Option<Entity<BufferDiff>>,
89}
90
91struct BufferGitState {
92 unstaged_diff: Option<WeakEntity<BufferDiff>>,
93 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
94 conflict_set: Option<WeakEntity<ConflictSet>>,
95 recalculate_diff_task: Option<Task<Result<()>>>,
96 reparse_conflict_markers_task: Option<Task<Result<()>>>,
97 language: Option<Arc<Language>>,
98 language_registry: Option<Arc<LanguageRegistry>>,
99 conflict_updated_futures: Vec<oneshot::Sender<()>>,
100 recalculating_tx: postage::watch::Sender<bool>,
101
102 /// These operation counts are used to ensure that head and index text
103 /// values read from the git repository are up-to-date with any hunk staging
104 /// operations that have been performed on the BufferDiff.
105 ///
106 /// The operation count is incremented immediately when the user initiates a
107 /// hunk stage/unstage operation. Then, upon finishing writing the new index
108 /// text do disk, the `operation count as of write` is updated to reflect
109 /// the operation count that prompted the write.
110 hunk_staging_operation_count: usize,
111 hunk_staging_operation_count_as_of_write: usize,
112
113 head_text: Option<Arc<String>>,
114 index_text: Option<Arc<String>>,
115 head_changed: bool,
116 index_changed: bool,
117 language_changed: bool,
118}
119
120#[derive(Clone, Debug)]
121enum DiffBasesChange {
122 SetIndex(Option<String>),
123 SetHead(Option<String>),
124 SetEach {
125 index: Option<String>,
126 head: Option<String>,
127 },
128 SetBoth(Option<String>),
129}
130
131#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
132enum DiffKind {
133 Unstaged,
134 Uncommitted,
135}
136
137enum GitStoreState {
138 Local {
139 next_repository_id: Arc<AtomicU64>,
140 downstream: Option<LocalDownstreamState>,
141 project_environment: Entity<ProjectEnvironment>,
142 fs: Arc<dyn Fs>,
143 },
144 Ssh {
145 upstream_client: AnyProtoClient,
146 upstream_project_id: ProjectId,
147 downstream: Option<(AnyProtoClient, ProjectId)>,
148 },
149 Remote {
150 upstream_client: AnyProtoClient,
151 upstream_project_id: ProjectId,
152 },
153}
154
155enum DownstreamUpdate {
156 UpdateRepository(RepositorySnapshot),
157 RemoveRepository(RepositoryId),
158}
159
160struct LocalDownstreamState {
161 client: AnyProtoClient,
162 project_id: ProjectId,
163 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
164 _task: Task<Result<()>>,
165}
166
167#[derive(Clone, Debug)]
168pub struct GitStoreCheckpoint {
169 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
170}
171
172#[derive(Clone, Debug, PartialEq, Eq)]
173pub struct StatusEntry {
174 pub repo_path: RepoPath,
175 pub status: FileStatus,
176}
177
178impl StatusEntry {
179 fn to_proto(&self) -> proto::StatusEntry {
180 let simple_status = match self.status {
181 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
182 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
183 FileStatus::Tracked(TrackedStatus {
184 index_status,
185 worktree_status,
186 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
187 worktree_status
188 } else {
189 index_status
190 }),
191 };
192
193 proto::StatusEntry {
194 repo_path: self.repo_path.as_ref().to_proto(),
195 simple_status,
196 status: Some(status_to_proto(self.status)),
197 }
198 }
199}
200
201impl TryFrom<proto::StatusEntry> for StatusEntry {
202 type Error = anyhow::Error;
203
204 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
205 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
206 let status = status_from_proto(value.simple_status, value.status)?;
207 Ok(Self { repo_path, status })
208 }
209}
210
211impl sum_tree::Item for StatusEntry {
212 type Summary = PathSummary<GitSummary>;
213
214 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
215 PathSummary {
216 max_path: self.repo_path.0.clone(),
217 item_summary: self.status.summary(),
218 }
219 }
220}
221
222impl sum_tree::KeyedItem for StatusEntry {
223 type Key = PathKey;
224
225 fn key(&self) -> Self::Key {
226 PathKey(self.repo_path.0.clone())
227 }
228}
229
230#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
231pub struct RepositoryId(pub u64);
232
233#[derive(Clone, Debug, Default, PartialEq, Eq)]
234pub struct MergeDetails {
235 pub conflicted_paths: TreeSet<RepoPath>,
236 pub message: Option<SharedString>,
237 pub heads: Vec<Option<SharedString>>,
238}
239
240#[derive(Clone, Debug, PartialEq, Eq)]
241pub struct RepositorySnapshot {
242 pub id: RepositoryId,
243 pub statuses_by_path: SumTree<StatusEntry>,
244 pub work_directory_abs_path: Arc<Path>,
245 pub branch: Option<Branch>,
246 pub head_commit: Option<CommitDetails>,
247 pub scan_id: u64,
248 pub merge: MergeDetails,
249 pub remote_origin_url: Option<String>,
250 pub remote_upstream_url: Option<String>,
251}
252
253type JobId = u64;
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct JobInfo {
257 pub start: Instant,
258 pub message: SharedString,
259}
260
261pub struct Repository {
262 this: WeakEntity<Self>,
263 snapshot: RepositorySnapshot,
264 commit_message_buffer: Option<Entity<Buffer>>,
265 git_store: WeakEntity<GitStore>,
266 // For a local repository, holds paths that have had worktree events since the last status scan completed,
267 // and that should be examined during the next status scan.
268 paths_needing_status_update: BTreeSet<RepoPath>,
269 job_sender: mpsc::UnboundedSender<GitJob>,
270 active_jobs: HashMap<JobId, JobInfo>,
271 job_id: JobId,
272 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
273 latest_askpass_id: u64,
274}
275
276impl std::ops::Deref for Repository {
277 type Target = RepositorySnapshot;
278
279 fn deref(&self) -> &Self::Target {
280 &self.snapshot
281 }
282}
283
284#[derive(Clone)]
285pub enum RepositoryState {
286 Local {
287 backend: Arc<dyn GitRepository>,
288 environment: Arc<HashMap<String, String>>,
289 },
290 Remote {
291 project_id: ProjectId,
292 client: AnyProtoClient,
293 },
294}
295
296#[derive(Clone, Debug)]
297pub enum RepositoryEvent {
298 Updated { full_scan: bool, new_instance: bool },
299 MergeHeadsChanged,
300}
301
302#[derive(Clone, Debug)]
303pub struct JobsUpdated;
304
305#[derive(Debug)]
306pub enum GitStoreEvent {
307 ActiveRepositoryChanged(Option<RepositoryId>),
308 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
309 RepositoryAdded(RepositoryId),
310 RepositoryRemoved(RepositoryId),
311 IndexWriteError(anyhow::Error),
312 JobsUpdated,
313 ConflictsUpdated,
314}
315
316impl EventEmitter<RepositoryEvent> for Repository {}
317impl EventEmitter<JobsUpdated> for Repository {}
318impl EventEmitter<GitStoreEvent> for GitStore {}
319
320pub struct GitJob {
321 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
322 key: Option<GitJobKey>,
323}
324
325#[derive(PartialEq, Eq)]
326enum GitJobKey {
327 WriteIndex(RepoPath),
328 ReloadBufferDiffBases,
329 RefreshStatuses,
330 ReloadGitState,
331}
332
333impl GitStore {
334 pub fn local(
335 worktree_store: &Entity<WorktreeStore>,
336 buffer_store: Entity<BufferStore>,
337 environment: Entity<ProjectEnvironment>,
338 fs: Arc<dyn Fs>,
339 cx: &mut Context<Self>,
340 ) -> Self {
341 Self::new(
342 worktree_store.clone(),
343 buffer_store,
344 GitStoreState::Local {
345 next_repository_id: Arc::new(AtomicU64::new(1)),
346 downstream: None,
347 project_environment: environment,
348 fs,
349 },
350 cx,
351 )
352 }
353
354 pub fn remote(
355 worktree_store: &Entity<WorktreeStore>,
356 buffer_store: Entity<BufferStore>,
357 upstream_client: AnyProtoClient,
358 project_id: ProjectId,
359 cx: &mut Context<Self>,
360 ) -> Self {
361 Self::new(
362 worktree_store.clone(),
363 buffer_store,
364 GitStoreState::Remote {
365 upstream_client,
366 upstream_project_id: project_id,
367 },
368 cx,
369 )
370 }
371
372 pub fn ssh(
373 worktree_store: &Entity<WorktreeStore>,
374 buffer_store: Entity<BufferStore>,
375 upstream_client: AnyProtoClient,
376 cx: &mut Context<Self>,
377 ) -> Self {
378 Self::new(
379 worktree_store.clone(),
380 buffer_store,
381 GitStoreState::Ssh {
382 upstream_client,
383 upstream_project_id: ProjectId(SSH_PROJECT_ID),
384 downstream: None,
385 },
386 cx,
387 )
388 }
389
390 fn new(
391 worktree_store: Entity<WorktreeStore>,
392 buffer_store: Entity<BufferStore>,
393 state: GitStoreState,
394 cx: &mut Context<Self>,
395 ) -> Self {
396 let _subscriptions = vec![
397 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
398 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
399 ];
400
401 GitStore {
402 state,
403 buffer_store,
404 worktree_store,
405 repositories: HashMap::default(),
406 active_repo_id: None,
407 _subscriptions,
408 loading_diffs: HashMap::default(),
409 shared_diffs: HashMap::default(),
410 diffs: HashMap::default(),
411 }
412 }
413
414 pub fn init(client: &AnyProtoClient) {
415 client.add_entity_request_handler(Self::handle_get_remotes);
416 client.add_entity_request_handler(Self::handle_get_branches);
417 client.add_entity_request_handler(Self::handle_get_default_branch);
418 client.add_entity_request_handler(Self::handle_change_branch);
419 client.add_entity_request_handler(Self::handle_create_branch);
420 client.add_entity_request_handler(Self::handle_git_init);
421 client.add_entity_request_handler(Self::handle_push);
422 client.add_entity_request_handler(Self::handle_pull);
423 client.add_entity_request_handler(Self::handle_fetch);
424 client.add_entity_request_handler(Self::handle_stage);
425 client.add_entity_request_handler(Self::handle_unstage);
426 client.add_entity_request_handler(Self::handle_stash);
427 client.add_entity_request_handler(Self::handle_stash_pop);
428 client.add_entity_request_handler(Self::handle_commit);
429 client.add_entity_request_handler(Self::handle_reset);
430 client.add_entity_request_handler(Self::handle_show);
431 client.add_entity_request_handler(Self::handle_load_commit_diff);
432 client.add_entity_request_handler(Self::handle_checkout_files);
433 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
434 client.add_entity_request_handler(Self::handle_set_index_text);
435 client.add_entity_request_handler(Self::handle_askpass);
436 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
437 client.add_entity_request_handler(Self::handle_git_diff);
438 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
439 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
440 client.add_entity_message_handler(Self::handle_update_diff_bases);
441 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
442 client.add_entity_request_handler(Self::handle_blame_buffer);
443 client.add_entity_message_handler(Self::handle_update_repository);
444 client.add_entity_message_handler(Self::handle_remove_repository);
445 client.add_entity_request_handler(Self::handle_git_clone);
446 }
447
448 pub fn is_local(&self) -> bool {
449 matches!(self.state, GitStoreState::Local { .. })
450 }
451
452 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
453 match &mut self.state {
454 GitStoreState::Ssh {
455 downstream: downstream_client,
456 ..
457 } => {
458 for repo in self.repositories.values() {
459 let update = repo.read(cx).snapshot.initial_update(project_id);
460 for update in split_repository_update(update) {
461 client.send(update).log_err();
462 }
463 }
464 *downstream_client = Some((client, ProjectId(project_id)));
465 }
466 GitStoreState::Local {
467 downstream: downstream_client,
468 ..
469 } => {
470 let mut snapshots = HashMap::default();
471 let (updates_tx, mut updates_rx) = mpsc::unbounded();
472 for repo in self.repositories.values() {
473 updates_tx
474 .unbounded_send(DownstreamUpdate::UpdateRepository(
475 repo.read(cx).snapshot.clone(),
476 ))
477 .ok();
478 }
479 *downstream_client = Some(LocalDownstreamState {
480 client: client.clone(),
481 project_id: ProjectId(project_id),
482 updates_tx,
483 _task: cx.spawn(async move |this, cx| {
484 cx.background_spawn(async move {
485 while let Some(update) = updates_rx.next().await {
486 match update {
487 DownstreamUpdate::UpdateRepository(snapshot) => {
488 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
489 {
490 let update =
491 snapshot.build_update(old_snapshot, project_id);
492 *old_snapshot = snapshot;
493 for update in split_repository_update(update) {
494 client.send(update)?;
495 }
496 } else {
497 let update = snapshot.initial_update(project_id);
498 for update in split_repository_update(update) {
499 client.send(update)?;
500 }
501 snapshots.insert(snapshot.id, snapshot);
502 }
503 }
504 DownstreamUpdate::RemoveRepository(id) => {
505 client.send(proto::RemoveRepository {
506 project_id,
507 id: id.to_proto(),
508 })?;
509 }
510 }
511 }
512 anyhow::Ok(())
513 })
514 .await
515 .ok();
516 this.update(cx, |this, _| {
517 if let GitStoreState::Local {
518 downstream: downstream_client,
519 ..
520 } = &mut this.state
521 {
522 downstream_client.take();
523 } else {
524 unreachable!("unshared called on remote store");
525 }
526 })
527 }),
528 });
529 }
530 GitStoreState::Remote { .. } => {
531 debug_panic!("shared called on remote store");
532 }
533 }
534 }
535
536 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
537 match &mut self.state {
538 GitStoreState::Local {
539 downstream: downstream_client,
540 ..
541 } => {
542 downstream_client.take();
543 }
544 GitStoreState::Ssh {
545 downstream: downstream_client,
546 ..
547 } => {
548 downstream_client.take();
549 }
550 GitStoreState::Remote { .. } => {
551 debug_panic!("unshared called on remote store");
552 }
553 }
554 self.shared_diffs.clear();
555 }
556
557 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
558 self.shared_diffs.remove(peer_id);
559 }
560
561 pub fn active_repository(&self) -> Option<Entity<Repository>> {
562 self.active_repo_id
563 .as_ref()
564 .map(|id| self.repositories[id].clone())
565 }
566
567 pub fn open_unstaged_diff(
568 &mut self,
569 buffer: Entity<Buffer>,
570 cx: &mut Context<Self>,
571 ) -> Task<Result<Entity<BufferDiff>>> {
572 let buffer_id = buffer.read(cx).remote_id();
573 if let Some(diff_state) = self.diffs.get(&buffer_id) {
574 if let Some(unstaged_diff) = diff_state
575 .read(cx)
576 .unstaged_diff
577 .as_ref()
578 .and_then(|weak| weak.upgrade())
579 {
580 if let Some(task) =
581 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
582 {
583 return cx.background_executor().spawn(async move {
584 task.await;
585 Ok(unstaged_diff)
586 });
587 }
588 return Task::ready(Ok(unstaged_diff));
589 }
590 }
591
592 let Some((repo, repo_path)) =
593 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
594 else {
595 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
596 };
597
598 let task = self
599 .loading_diffs
600 .entry((buffer_id, DiffKind::Unstaged))
601 .or_insert_with(|| {
602 let staged_text = repo.update(cx, |repo, cx| {
603 repo.load_staged_text(buffer_id, repo_path, cx)
604 });
605 cx.spawn(async move |this, cx| {
606 Self::open_diff_internal(
607 this,
608 DiffKind::Unstaged,
609 staged_text.await.map(DiffBasesChange::SetIndex),
610 buffer,
611 cx,
612 )
613 .await
614 .map_err(Arc::new)
615 })
616 .shared()
617 })
618 .clone();
619
620 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
621 }
622
623 pub fn open_uncommitted_diff(
624 &mut self,
625 buffer: Entity<Buffer>,
626 cx: &mut Context<Self>,
627 ) -> Task<Result<Entity<BufferDiff>>> {
628 let buffer_id = buffer.read(cx).remote_id();
629
630 if let Some(diff_state) = self.diffs.get(&buffer_id) {
631 if let Some(uncommitted_diff) = diff_state
632 .read(cx)
633 .uncommitted_diff
634 .as_ref()
635 .and_then(|weak| weak.upgrade())
636 {
637 if let Some(task) =
638 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
639 {
640 return cx.background_executor().spawn(async move {
641 task.await;
642 Ok(uncommitted_diff)
643 });
644 }
645 return Task::ready(Ok(uncommitted_diff));
646 }
647 }
648
649 let Some((repo, repo_path)) =
650 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
651 else {
652 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
653 };
654
655 let task = self
656 .loading_diffs
657 .entry((buffer_id, DiffKind::Uncommitted))
658 .or_insert_with(|| {
659 let changes = repo.update(cx, |repo, cx| {
660 repo.load_committed_text(buffer_id, repo_path, cx)
661 });
662
663 cx.spawn(async move |this, cx| {
664 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
665 .await
666 .map_err(Arc::new)
667 })
668 .shared()
669 })
670 .clone();
671
672 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
673 }
674
675 async fn open_diff_internal(
676 this: WeakEntity<Self>,
677 kind: DiffKind,
678 texts: Result<DiffBasesChange>,
679 buffer_entity: Entity<Buffer>,
680 cx: &mut AsyncApp,
681 ) -> Result<Entity<BufferDiff>> {
682 let diff_bases_change = match texts {
683 Err(e) => {
684 this.update(cx, |this, cx| {
685 let buffer = buffer_entity.read(cx);
686 let buffer_id = buffer.remote_id();
687 this.loading_diffs.remove(&(buffer_id, kind));
688 })?;
689 return Err(e);
690 }
691 Ok(change) => change,
692 };
693
694 this.update(cx, |this, cx| {
695 let buffer = buffer_entity.read(cx);
696 let buffer_id = buffer.remote_id();
697 let language = buffer.language().cloned();
698 let language_registry = buffer.language_registry();
699 let text_snapshot = buffer.text_snapshot();
700 this.loading_diffs.remove(&(buffer_id, kind));
701
702 let git_store = cx.weak_entity();
703 let diff_state = this
704 .diffs
705 .entry(buffer_id)
706 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
707
708 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
709
710 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
711 diff_state.update(cx, |diff_state, cx| {
712 diff_state.language = language;
713 diff_state.language_registry = language_registry;
714
715 match kind {
716 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
717 DiffKind::Uncommitted => {
718 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
719 diff
720 } else {
721 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
722 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
723 unstaged_diff
724 };
725
726 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
727 diff_state.uncommitted_diff = Some(diff.downgrade())
728 }
729 }
730
731 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
732 let rx = diff_state.wait_for_recalculation();
733
734 anyhow::Ok(async move {
735 if let Some(rx) = rx {
736 rx.await;
737 }
738 Ok(diff)
739 })
740 })
741 })??
742 .await
743 }
744
745 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
746 let diff_state = self.diffs.get(&buffer_id)?;
747 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
748 }
749
750 pub fn get_uncommitted_diff(
751 &self,
752 buffer_id: BufferId,
753 cx: &App,
754 ) -> Option<Entity<BufferDiff>> {
755 let diff_state = self.diffs.get(&buffer_id)?;
756 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
757 }
758
759 pub fn open_conflict_set(
760 &mut self,
761 buffer: Entity<Buffer>,
762 cx: &mut Context<Self>,
763 ) -> Entity<ConflictSet> {
764 log::debug!("open conflict set");
765 let buffer_id = buffer.read(cx).remote_id();
766
767 if let Some(git_state) = self.diffs.get(&buffer_id) {
768 if let Some(conflict_set) = git_state
769 .read(cx)
770 .conflict_set
771 .as_ref()
772 .and_then(|weak| weak.upgrade())
773 {
774 let conflict_set = conflict_set.clone();
775 let buffer_snapshot = buffer.read(cx).text_snapshot();
776
777 git_state.update(cx, |state, cx| {
778 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
779 });
780
781 return conflict_set;
782 }
783 }
784
785 let is_unmerged = self
786 .repository_and_path_for_buffer_id(buffer_id, cx)
787 .map_or(false, |(repo, path)| {
788 repo.read(cx).snapshot.has_conflict(&path)
789 });
790 let git_store = cx.weak_entity();
791 let buffer_git_state = self
792 .diffs
793 .entry(buffer_id)
794 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
795 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
796
797 self._subscriptions
798 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
799 cx.emit(GitStoreEvent::ConflictsUpdated);
800 }));
801
802 buffer_git_state.update(cx, |state, cx| {
803 state.conflict_set = Some(conflict_set.downgrade());
804 let buffer_snapshot = buffer.read(cx).text_snapshot();
805 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
806 });
807
808 conflict_set
809 }
810
811 pub fn project_path_git_status(
812 &self,
813 project_path: &ProjectPath,
814 cx: &App,
815 ) -> Option<FileStatus> {
816 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
817 Some(repo.read(cx).status_for_path(&repo_path)?.status)
818 }
819
820 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
821 let mut work_directory_abs_paths = Vec::new();
822 let mut checkpoints = Vec::new();
823 for repository in self.repositories.values() {
824 repository.update(cx, |repository, _| {
825 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
826 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
827 });
828 }
829
830 cx.background_executor().spawn(async move {
831 let checkpoints = future::try_join_all(checkpoints).await?;
832 Ok(GitStoreCheckpoint {
833 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
834 .into_iter()
835 .zip(checkpoints)
836 .collect(),
837 })
838 })
839 }
840
841 pub fn restore_checkpoint(
842 &self,
843 checkpoint: GitStoreCheckpoint,
844 cx: &mut App,
845 ) -> Task<Result<()>> {
846 let repositories_by_work_dir_abs_path = self
847 .repositories
848 .values()
849 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
850 .collect::<HashMap<_, _>>();
851
852 let mut tasks = Vec::new();
853 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
854 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
855 let restore = repository.update(cx, |repository, _| {
856 repository.restore_checkpoint(checkpoint)
857 });
858 tasks.push(async move { restore.await? });
859 }
860 }
861 cx.background_spawn(async move {
862 future::try_join_all(tasks).await?;
863 Ok(())
864 })
865 }
866
867 /// Compares two checkpoints, returning true if they are equal.
868 pub fn compare_checkpoints(
869 &self,
870 left: GitStoreCheckpoint,
871 mut right: GitStoreCheckpoint,
872 cx: &mut App,
873 ) -> Task<Result<bool>> {
874 let repositories_by_work_dir_abs_path = self
875 .repositories
876 .values()
877 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
878 .collect::<HashMap<_, _>>();
879
880 let mut tasks = Vec::new();
881 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
882 if let Some(right_checkpoint) = right
883 .checkpoints_by_work_dir_abs_path
884 .remove(&work_dir_abs_path)
885 {
886 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
887 {
888 let compare = repository.update(cx, |repository, _| {
889 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
890 });
891
892 tasks.push(async move { compare.await? });
893 }
894 } else {
895 return Task::ready(Ok(false));
896 }
897 }
898 cx.background_spawn(async move {
899 Ok(future::try_join_all(tasks)
900 .await?
901 .into_iter()
902 .all(|result| result))
903 })
904 }
905
906 /// Blames a buffer.
907 pub fn blame_buffer(
908 &self,
909 buffer: &Entity<Buffer>,
910 version: Option<clock::Global>,
911 cx: &mut App,
912 ) -> Task<Result<Option<Blame>>> {
913 let buffer = buffer.read(cx);
914 let Some((repo, repo_path)) =
915 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
916 else {
917 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
918 };
919 let content = match &version {
920 Some(version) => buffer.rope_for_version(version).clone(),
921 None => buffer.as_rope().clone(),
922 };
923 let version = version.unwrap_or(buffer.version());
924 let buffer_id = buffer.remote_id();
925
926 let rx = repo.update(cx, |repo, _| {
927 repo.send_job(None, move |state, _| async move {
928 match state {
929 RepositoryState::Local { backend, .. } => backend
930 .blame(repo_path.clone(), content)
931 .await
932 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
933 .map(Some),
934 RepositoryState::Remote { project_id, client } => {
935 let response = client
936 .request(proto::BlameBuffer {
937 project_id: project_id.to_proto(),
938 buffer_id: buffer_id.into(),
939 version: serialize_version(&version),
940 })
941 .await?;
942 Ok(deserialize_blame_buffer_response(response))
943 }
944 }
945 })
946 });
947
948 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
949 }
950
951 pub fn get_permalink_to_line(
952 &self,
953 buffer: &Entity<Buffer>,
954 selection: Range<u32>,
955 cx: &mut App,
956 ) -> Task<Result<url::Url>> {
957 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
958 return Task::ready(Err(anyhow!("buffer has no file")));
959 };
960
961 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
962 &(file.worktree.read(cx).id(), file.path.clone()).into(),
963 cx,
964 ) else {
965 // If we're not in a Git repo, check whether this is a Rust source
966 // file in the Cargo registry (presumably opened with go-to-definition
967 // from a normal Rust file). If so, we can put together a permalink
968 // using crate metadata.
969 if buffer
970 .read(cx)
971 .language()
972 .is_none_or(|lang| lang.name() != "Rust".into())
973 {
974 return Task::ready(Err(anyhow!("no permalink available")));
975 }
976 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
977 return Task::ready(Err(anyhow!("no permalink available")));
978 };
979 return cx.spawn(async move |cx| {
980 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
981 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
982 .context("no permalink available")
983 });
984
985 // TODO remote case
986 };
987
988 let buffer_id = buffer.read(cx).remote_id();
989 let branch = repo.read(cx).branch.clone();
990 let remote = branch
991 .as_ref()
992 .and_then(|b| b.upstream.as_ref())
993 .and_then(|b| b.remote_name())
994 .unwrap_or("origin")
995 .to_string();
996
997 let rx = repo.update(cx, |repo, _| {
998 repo.send_job(None, move |state, cx| async move {
999 match state {
1000 RepositoryState::Local { backend, .. } => {
1001 let origin_url = backend
1002 .remote_url(&remote)
1003 .with_context(|| format!("remote \"{remote}\" not found"))?;
1004
1005 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1006
1007 let provider_registry =
1008 cx.update(GitHostingProviderRegistry::default_global)?;
1009
1010 let (provider, remote) =
1011 parse_git_remote_url(provider_registry, &origin_url)
1012 .context("parsing Git remote URL")?;
1013
1014 let path = repo_path.to_str().with_context(|| {
1015 format!("converting repo path {repo_path:?} to string")
1016 })?;
1017
1018 Ok(provider.build_permalink(
1019 remote,
1020 BuildPermalinkParams {
1021 sha: &sha,
1022 path,
1023 selection: Some(selection),
1024 },
1025 ))
1026 }
1027 RepositoryState::Remote { project_id, client } => {
1028 let response = client
1029 .request(proto::GetPermalinkToLine {
1030 project_id: project_id.to_proto(),
1031 buffer_id: buffer_id.into(),
1032 selection: Some(proto::Range {
1033 start: selection.start as u64,
1034 end: selection.end as u64,
1035 }),
1036 })
1037 .await?;
1038
1039 url::Url::parse(&response.permalink).context("failed to parse permalink")
1040 }
1041 }
1042 })
1043 });
1044 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1045 }
1046
1047 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1048 match &self.state {
1049 GitStoreState::Local {
1050 downstream: downstream_client,
1051 ..
1052 } => downstream_client
1053 .as_ref()
1054 .map(|state| (state.client.clone(), state.project_id)),
1055 GitStoreState::Ssh {
1056 downstream: downstream_client,
1057 ..
1058 } => downstream_client.clone(),
1059 GitStoreState::Remote { .. } => None,
1060 }
1061 }
1062
1063 fn upstream_client(&self) -> Option<AnyProtoClient> {
1064 match &self.state {
1065 GitStoreState::Local { .. } => None,
1066 GitStoreState::Ssh {
1067 upstream_client, ..
1068 }
1069 | GitStoreState::Remote {
1070 upstream_client, ..
1071 } => Some(upstream_client.clone()),
1072 }
1073 }
1074
1075 fn on_worktree_store_event(
1076 &mut self,
1077 worktree_store: Entity<WorktreeStore>,
1078 event: &WorktreeStoreEvent,
1079 cx: &mut Context<Self>,
1080 ) {
1081 let GitStoreState::Local {
1082 project_environment,
1083 downstream,
1084 next_repository_id,
1085 fs,
1086 } = &self.state
1087 else {
1088 return;
1089 };
1090
1091 match event {
1092 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1093 if let Some(worktree) = self
1094 .worktree_store
1095 .read(cx)
1096 .worktree_for_id(*worktree_id, cx)
1097 {
1098 let paths_by_git_repo =
1099 self.process_updated_entries(&worktree, updated_entries, cx);
1100 let downstream = downstream
1101 .as_ref()
1102 .map(|downstream| downstream.updates_tx.clone());
1103 cx.spawn(async move |_, cx| {
1104 let paths_by_git_repo = paths_by_git_repo.await;
1105 for (repo, paths) in paths_by_git_repo {
1106 repo.update(cx, |repo, cx| {
1107 repo.paths_changed(paths, downstream.clone(), cx);
1108 })
1109 .ok();
1110 }
1111 })
1112 .detach();
1113 }
1114 }
1115 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1116 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1117 else {
1118 return;
1119 };
1120 if !worktree.read(cx).is_visible() {
1121 log::debug!(
1122 "not adding repositories for local worktree {:?} because it's not visible",
1123 worktree.read(cx).abs_path()
1124 );
1125 return;
1126 }
1127 self.update_repositories_from_worktree(
1128 project_environment.clone(),
1129 next_repository_id.clone(),
1130 downstream
1131 .as_ref()
1132 .map(|downstream| downstream.updates_tx.clone()),
1133 changed_repos.clone(),
1134 fs.clone(),
1135 cx,
1136 );
1137 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1138 }
1139 _ => {}
1140 }
1141 }
1142
1143 fn on_repository_event(
1144 &mut self,
1145 repo: Entity<Repository>,
1146 event: &RepositoryEvent,
1147 cx: &mut Context<Self>,
1148 ) {
1149 let id = repo.read(cx).id;
1150 let repo_snapshot = repo.read(cx).snapshot.clone();
1151 for (buffer_id, diff) in self.diffs.iter() {
1152 if let Some((buffer_repo, repo_path)) =
1153 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1154 {
1155 if buffer_repo == repo {
1156 diff.update(cx, |diff, cx| {
1157 if let Some(conflict_set) = &diff.conflict_set {
1158 let conflict_status_changed =
1159 conflict_set.update(cx, |conflict_set, cx| {
1160 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1161 conflict_set.set_has_conflict(has_conflict, cx)
1162 })?;
1163 if conflict_status_changed {
1164 let buffer_store = self.buffer_store.read(cx);
1165 if let Some(buffer) = buffer_store.get(*buffer_id) {
1166 let _ = diff.reparse_conflict_markers(
1167 buffer.read(cx).text_snapshot(),
1168 cx,
1169 );
1170 }
1171 }
1172 }
1173 anyhow::Ok(())
1174 })
1175 .ok();
1176 }
1177 }
1178 }
1179 cx.emit(GitStoreEvent::RepositoryUpdated(
1180 id,
1181 event.clone(),
1182 self.active_repo_id == Some(id),
1183 ))
1184 }
1185
1186 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1187 cx.emit(GitStoreEvent::JobsUpdated)
1188 }
1189
1190 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1191 fn update_repositories_from_worktree(
1192 &mut self,
1193 project_environment: Entity<ProjectEnvironment>,
1194 next_repository_id: Arc<AtomicU64>,
1195 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1196 updated_git_repositories: UpdatedGitRepositoriesSet,
1197 fs: Arc<dyn Fs>,
1198 cx: &mut Context<Self>,
1199 ) {
1200 let mut removed_ids = Vec::new();
1201 for update in updated_git_repositories.iter() {
1202 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1203 let existing_work_directory_abs_path =
1204 repo.read(cx).work_directory_abs_path.clone();
1205 Some(&existing_work_directory_abs_path)
1206 == update.old_work_directory_abs_path.as_ref()
1207 || Some(&existing_work_directory_abs_path)
1208 == update.new_work_directory_abs_path.as_ref()
1209 }) {
1210 if let Some(new_work_directory_abs_path) =
1211 update.new_work_directory_abs_path.clone()
1212 {
1213 existing.update(cx, |existing, cx| {
1214 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1215 existing.schedule_scan(updates_tx.clone(), cx);
1216 });
1217 } else {
1218 removed_ids.push(*id);
1219 }
1220 } else if let UpdatedGitRepository {
1221 new_work_directory_abs_path: Some(work_directory_abs_path),
1222 dot_git_abs_path: Some(dot_git_abs_path),
1223 repository_dir_abs_path: Some(repository_dir_abs_path),
1224 common_dir_abs_path: Some(common_dir_abs_path),
1225 ..
1226 } = update
1227 {
1228 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1229 let git_store = cx.weak_entity();
1230 let repo = cx.new(|cx| {
1231 let mut repo = Repository::local(
1232 id,
1233 work_directory_abs_path.clone(),
1234 dot_git_abs_path.clone(),
1235 repository_dir_abs_path.clone(),
1236 common_dir_abs_path.clone(),
1237 project_environment.downgrade(),
1238 fs.clone(),
1239 git_store,
1240 cx,
1241 );
1242 repo.schedule_scan(updates_tx.clone(), cx);
1243 repo
1244 });
1245 self._subscriptions
1246 .push(cx.subscribe(&repo, Self::on_repository_event));
1247 self._subscriptions
1248 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1249 self.repositories.insert(id, repo);
1250 cx.emit(GitStoreEvent::RepositoryAdded(id));
1251 self.active_repo_id.get_or_insert_with(|| {
1252 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1253 id
1254 });
1255 }
1256 }
1257
1258 for id in removed_ids {
1259 if self.active_repo_id == Some(id) {
1260 self.active_repo_id = None;
1261 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1262 }
1263 self.repositories.remove(&id);
1264 if let Some(updates_tx) = updates_tx.as_ref() {
1265 updates_tx
1266 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1267 .ok();
1268 }
1269 }
1270 }
1271
1272 fn on_buffer_store_event(
1273 &mut self,
1274 _: Entity<BufferStore>,
1275 event: &BufferStoreEvent,
1276 cx: &mut Context<Self>,
1277 ) {
1278 match event {
1279 BufferStoreEvent::BufferAdded(buffer) => {
1280 cx.subscribe(buffer, |this, buffer, event, cx| {
1281 if let BufferEvent::LanguageChanged = event {
1282 let buffer_id = buffer.read(cx).remote_id();
1283 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1284 diff_state.update(cx, |diff_state, cx| {
1285 diff_state.buffer_language_changed(buffer, cx);
1286 });
1287 }
1288 }
1289 })
1290 .detach();
1291 }
1292 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1293 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1294 diffs.remove(buffer_id);
1295 }
1296 }
1297 BufferStoreEvent::BufferDropped(buffer_id) => {
1298 self.diffs.remove(buffer_id);
1299 for diffs in self.shared_diffs.values_mut() {
1300 diffs.remove(buffer_id);
1301 }
1302 }
1303
1304 _ => {}
1305 }
1306 }
1307
1308 pub fn recalculate_buffer_diffs(
1309 &mut self,
1310 buffers: Vec<Entity<Buffer>>,
1311 cx: &mut Context<Self>,
1312 ) -> impl Future<Output = ()> + use<> {
1313 let mut futures = Vec::new();
1314 for buffer in buffers {
1315 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1316 let buffer = buffer.read(cx).text_snapshot();
1317 diff_state.update(cx, |diff_state, cx| {
1318 diff_state.recalculate_diffs(buffer.clone(), cx);
1319 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1320 });
1321 futures.push(diff_state.update(cx, |diff_state, cx| {
1322 diff_state
1323 .reparse_conflict_markers(buffer, cx)
1324 .map(|_| {})
1325 .boxed()
1326 }));
1327 }
1328 }
1329 async move {
1330 futures::future::join_all(futures).await;
1331 }
1332 }
1333
1334 fn on_buffer_diff_event(
1335 &mut self,
1336 diff: Entity<buffer_diff::BufferDiff>,
1337 event: &BufferDiffEvent,
1338 cx: &mut Context<Self>,
1339 ) {
1340 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1341 let buffer_id = diff.read(cx).buffer_id;
1342 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1343 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1344 diff_state.hunk_staging_operation_count += 1;
1345 diff_state.hunk_staging_operation_count
1346 });
1347 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1348 let recv = repo.update(cx, |repo, cx| {
1349 log::debug!("hunks changed for {}", path.display());
1350 repo.spawn_set_index_text_job(
1351 path,
1352 new_index_text.as_ref().map(|rope| rope.to_string()),
1353 Some(hunk_staging_operation_count),
1354 cx,
1355 )
1356 });
1357 let diff = diff.downgrade();
1358 cx.spawn(async move |this, cx| {
1359 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1360 diff.update(cx, |diff, cx| {
1361 diff.clear_pending_hunks(cx);
1362 })
1363 .ok();
1364 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1365 .ok();
1366 }
1367 })
1368 .detach();
1369 }
1370 }
1371 }
1372 }
1373
1374 fn local_worktree_git_repos_changed(
1375 &mut self,
1376 worktree: Entity<Worktree>,
1377 changed_repos: &UpdatedGitRepositoriesSet,
1378 cx: &mut Context<Self>,
1379 ) {
1380 log::debug!("local worktree repos changed");
1381 debug_assert!(worktree.read(cx).is_local());
1382
1383 for repository in self.repositories.values() {
1384 repository.update(cx, |repository, cx| {
1385 let repo_abs_path = &repository.work_directory_abs_path;
1386 if changed_repos.iter().any(|update| {
1387 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1388 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1389 }) {
1390 repository.reload_buffer_diff_bases(cx);
1391 }
1392 });
1393 }
1394 }
1395
1396 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1397 &self.repositories
1398 }
1399
1400 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1401 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1402 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1403 Some(status.status)
1404 }
1405
1406 pub fn repository_and_path_for_buffer_id(
1407 &self,
1408 buffer_id: BufferId,
1409 cx: &App,
1410 ) -> Option<(Entity<Repository>, RepoPath)> {
1411 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1412 let project_path = buffer.read(cx).project_path(cx)?;
1413 self.repository_and_path_for_project_path(&project_path, cx)
1414 }
1415
1416 pub fn repository_and_path_for_project_path(
1417 &self,
1418 path: &ProjectPath,
1419 cx: &App,
1420 ) -> Option<(Entity<Repository>, RepoPath)> {
1421 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1422 self.repositories
1423 .values()
1424 .filter_map(|repo| {
1425 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1426 Some((repo.clone(), repo_path))
1427 })
1428 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1429 }
1430
1431 pub fn git_init(
1432 &self,
1433 path: Arc<Path>,
1434 fallback_branch_name: String,
1435 cx: &App,
1436 ) -> Task<Result<()>> {
1437 match &self.state {
1438 GitStoreState::Local { fs, .. } => {
1439 let fs = fs.clone();
1440 cx.background_executor()
1441 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1442 }
1443 GitStoreState::Ssh {
1444 upstream_client,
1445 upstream_project_id: project_id,
1446 ..
1447 }
1448 | GitStoreState::Remote {
1449 upstream_client,
1450 upstream_project_id: project_id,
1451 ..
1452 } => {
1453 let client = upstream_client.clone();
1454 let project_id = *project_id;
1455 cx.background_executor().spawn(async move {
1456 client
1457 .request(proto::GitInit {
1458 project_id: project_id.0,
1459 abs_path: path.to_string_lossy().to_string(),
1460 fallback_branch_name,
1461 })
1462 .await?;
1463 Ok(())
1464 })
1465 }
1466 }
1467 }
1468
1469 pub fn git_clone(
1470 &self,
1471 repo: String,
1472 path: impl Into<Arc<std::path::Path>>,
1473 cx: &App,
1474 ) -> Task<Result<()>> {
1475 let path = path.into();
1476 match &self.state {
1477 GitStoreState::Local { fs, .. } => {
1478 let fs = fs.clone();
1479 cx.background_executor()
1480 .spawn(async move { fs.git_clone(&repo, &path).await })
1481 }
1482 GitStoreState::Ssh {
1483 upstream_client,
1484 upstream_project_id,
1485 ..
1486 } => {
1487 let request = upstream_client.request(proto::GitClone {
1488 project_id: upstream_project_id.0,
1489 abs_path: path.to_string_lossy().to_string(),
1490 remote_repo: repo,
1491 });
1492
1493 cx.background_spawn(async move {
1494 let result = request.await?;
1495
1496 match result.success {
1497 true => Ok(()),
1498 false => Err(anyhow!("Git Clone failed")),
1499 }
1500 })
1501 }
1502 GitStoreState::Remote { .. } => {
1503 Task::ready(Err(anyhow!("Git Clone isn't supported for remote users")))
1504 }
1505 }
1506 }
1507
1508 async fn handle_update_repository(
1509 this: Entity<Self>,
1510 envelope: TypedEnvelope<proto::UpdateRepository>,
1511 mut cx: AsyncApp,
1512 ) -> Result<()> {
1513 this.update(&mut cx, |this, cx| {
1514 let mut update = envelope.payload;
1515
1516 let id = RepositoryId::from_proto(update.id);
1517 let client = this
1518 .upstream_client()
1519 .context("no upstream client")?
1520 .clone();
1521
1522 let mut is_new = false;
1523 let repo = this.repositories.entry(id).or_insert_with(|| {
1524 is_new = true;
1525 let git_store = cx.weak_entity();
1526 cx.new(|cx| {
1527 Repository::remote(
1528 id,
1529 Path::new(&update.abs_path).into(),
1530 ProjectId(update.project_id),
1531 client,
1532 git_store,
1533 cx,
1534 )
1535 })
1536 });
1537 if is_new {
1538 this._subscriptions
1539 .push(cx.subscribe(repo, Self::on_repository_event))
1540 }
1541
1542 repo.update(cx, {
1543 let update = update.clone();
1544 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1545 })?;
1546
1547 this.active_repo_id.get_or_insert_with(|| {
1548 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1549 id
1550 });
1551
1552 if let Some((client, project_id)) = this.downstream_client() {
1553 update.project_id = project_id.to_proto();
1554 client.send(update).log_err();
1555 }
1556 Ok(())
1557 })?
1558 }
1559
1560 async fn handle_remove_repository(
1561 this: Entity<Self>,
1562 envelope: TypedEnvelope<proto::RemoveRepository>,
1563 mut cx: AsyncApp,
1564 ) -> Result<()> {
1565 this.update(&mut cx, |this, cx| {
1566 let mut update = envelope.payload;
1567 let id = RepositoryId::from_proto(update.id);
1568 this.repositories.remove(&id);
1569 if let Some((client, project_id)) = this.downstream_client() {
1570 update.project_id = project_id.to_proto();
1571 client.send(update).log_err();
1572 }
1573 if this.active_repo_id == Some(id) {
1574 this.active_repo_id = None;
1575 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1576 }
1577 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1578 })
1579 }
1580
1581 async fn handle_git_init(
1582 this: Entity<Self>,
1583 envelope: TypedEnvelope<proto::GitInit>,
1584 cx: AsyncApp,
1585 ) -> Result<proto::Ack> {
1586 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1587 let name = envelope.payload.fallback_branch_name;
1588 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1589 .await?;
1590
1591 Ok(proto::Ack {})
1592 }
1593
1594 async fn handle_git_clone(
1595 this: Entity<Self>,
1596 envelope: TypedEnvelope<proto::GitClone>,
1597 cx: AsyncApp,
1598 ) -> Result<proto::GitCloneResponse> {
1599 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1600 let repo_name = envelope.payload.remote_repo;
1601 let result = cx
1602 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1603 .await;
1604
1605 Ok(proto::GitCloneResponse {
1606 success: result.is_ok(),
1607 })
1608 }
1609
1610 async fn handle_fetch(
1611 this: Entity<Self>,
1612 envelope: TypedEnvelope<proto::Fetch>,
1613 mut cx: AsyncApp,
1614 ) -> Result<proto::RemoteMessageResponse> {
1615 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1616 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1617 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1618 let askpass_id = envelope.payload.askpass_id;
1619
1620 let askpass = make_remote_delegate(
1621 this,
1622 envelope.payload.project_id,
1623 repository_id,
1624 askpass_id,
1625 &mut cx,
1626 );
1627
1628 let remote_output = repository_handle
1629 .update(&mut cx, |repository_handle, cx| {
1630 repository_handle.fetch(fetch_options, askpass, cx)
1631 })?
1632 .await??;
1633
1634 Ok(proto::RemoteMessageResponse {
1635 stdout: remote_output.stdout,
1636 stderr: remote_output.stderr,
1637 })
1638 }
1639
1640 async fn handle_push(
1641 this: Entity<Self>,
1642 envelope: TypedEnvelope<proto::Push>,
1643 mut cx: AsyncApp,
1644 ) -> Result<proto::RemoteMessageResponse> {
1645 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1646 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1647
1648 let askpass_id = envelope.payload.askpass_id;
1649 let askpass = make_remote_delegate(
1650 this,
1651 envelope.payload.project_id,
1652 repository_id,
1653 askpass_id,
1654 &mut cx,
1655 );
1656
1657 let options = envelope
1658 .payload
1659 .options
1660 .as_ref()
1661 .map(|_| match envelope.payload.options() {
1662 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1663 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1664 });
1665
1666 let branch_name = envelope.payload.branch_name.into();
1667 let remote_name = envelope.payload.remote_name.into();
1668
1669 let remote_output = repository_handle
1670 .update(&mut cx, |repository_handle, cx| {
1671 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1672 })?
1673 .await??;
1674 Ok(proto::RemoteMessageResponse {
1675 stdout: remote_output.stdout,
1676 stderr: remote_output.stderr,
1677 })
1678 }
1679
1680 async fn handle_pull(
1681 this: Entity<Self>,
1682 envelope: TypedEnvelope<proto::Pull>,
1683 mut cx: AsyncApp,
1684 ) -> Result<proto::RemoteMessageResponse> {
1685 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1686 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1687 let askpass_id = envelope.payload.askpass_id;
1688 let askpass = make_remote_delegate(
1689 this,
1690 envelope.payload.project_id,
1691 repository_id,
1692 askpass_id,
1693 &mut cx,
1694 );
1695
1696 let branch_name = envelope.payload.branch_name.into();
1697 let remote_name = envelope.payload.remote_name.into();
1698
1699 let remote_message = repository_handle
1700 .update(&mut cx, |repository_handle, cx| {
1701 repository_handle.pull(branch_name, remote_name, askpass, cx)
1702 })?
1703 .await??;
1704
1705 Ok(proto::RemoteMessageResponse {
1706 stdout: remote_message.stdout,
1707 stderr: remote_message.stderr,
1708 })
1709 }
1710
1711 async fn handle_stage(
1712 this: Entity<Self>,
1713 envelope: TypedEnvelope<proto::Stage>,
1714 mut cx: AsyncApp,
1715 ) -> Result<proto::Ack> {
1716 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1717 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1718
1719 let entries = envelope
1720 .payload
1721 .paths
1722 .into_iter()
1723 .map(PathBuf::from)
1724 .map(RepoPath::new)
1725 .collect();
1726
1727 repository_handle
1728 .update(&mut cx, |repository_handle, cx| {
1729 repository_handle.stage_entries(entries, cx)
1730 })?
1731 .await?;
1732 Ok(proto::Ack {})
1733 }
1734
1735 async fn handle_unstage(
1736 this: Entity<Self>,
1737 envelope: TypedEnvelope<proto::Unstage>,
1738 mut cx: AsyncApp,
1739 ) -> Result<proto::Ack> {
1740 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1741 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1742
1743 let entries = envelope
1744 .payload
1745 .paths
1746 .into_iter()
1747 .map(PathBuf::from)
1748 .map(RepoPath::new)
1749 .collect();
1750
1751 repository_handle
1752 .update(&mut cx, |repository_handle, cx| {
1753 repository_handle.unstage_entries(entries, cx)
1754 })?
1755 .await?;
1756
1757 Ok(proto::Ack {})
1758 }
1759
1760 async fn handle_stash(
1761 this: Entity<Self>,
1762 envelope: TypedEnvelope<proto::Stash>,
1763 mut cx: AsyncApp,
1764 ) -> Result<proto::Ack> {
1765 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1766 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1767
1768 let entries = envelope
1769 .payload
1770 .paths
1771 .into_iter()
1772 .map(PathBuf::from)
1773 .map(RepoPath::new)
1774 .collect();
1775
1776 repository_handle
1777 .update(&mut cx, |repository_handle, cx| {
1778 repository_handle.stash_entries(entries, cx)
1779 })?
1780 .await?;
1781
1782 Ok(proto::Ack {})
1783 }
1784
1785 async fn handle_stash_pop(
1786 this: Entity<Self>,
1787 envelope: TypedEnvelope<proto::StashPop>,
1788 mut cx: AsyncApp,
1789 ) -> Result<proto::Ack> {
1790 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1791 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1792
1793 repository_handle
1794 .update(&mut cx, |repository_handle, cx| {
1795 repository_handle.stash_pop(cx)
1796 })?
1797 .await?;
1798
1799 Ok(proto::Ack {})
1800 }
1801
1802 async fn handle_set_index_text(
1803 this: Entity<Self>,
1804 envelope: TypedEnvelope<proto::SetIndexText>,
1805 mut cx: AsyncApp,
1806 ) -> Result<proto::Ack> {
1807 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1808 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1809 let repo_path = RepoPath::from_str(&envelope.payload.path);
1810
1811 repository_handle
1812 .update(&mut cx, |repository_handle, cx| {
1813 repository_handle.spawn_set_index_text_job(
1814 repo_path,
1815 envelope.payload.text,
1816 None,
1817 cx,
1818 )
1819 })?
1820 .await??;
1821 Ok(proto::Ack {})
1822 }
1823
1824 async fn handle_commit(
1825 this: Entity<Self>,
1826 envelope: TypedEnvelope<proto::Commit>,
1827 mut cx: AsyncApp,
1828 ) -> Result<proto::Ack> {
1829 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1830 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1831
1832 let message = SharedString::from(envelope.payload.message);
1833 let name = envelope.payload.name.map(SharedString::from);
1834 let email = envelope.payload.email.map(SharedString::from);
1835 let options = envelope.payload.options.unwrap_or_default();
1836
1837 repository_handle
1838 .update(&mut cx, |repository_handle, cx| {
1839 repository_handle.commit(
1840 message,
1841 name.zip(email),
1842 CommitOptions {
1843 amend: options.amend,
1844 signoff: options.signoff,
1845 },
1846 cx,
1847 )
1848 })?
1849 .await??;
1850 Ok(proto::Ack {})
1851 }
1852
1853 async fn handle_get_remotes(
1854 this: Entity<Self>,
1855 envelope: TypedEnvelope<proto::GetRemotes>,
1856 mut cx: AsyncApp,
1857 ) -> Result<proto::GetRemotesResponse> {
1858 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1859 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1860
1861 let branch_name = envelope.payload.branch_name;
1862
1863 let remotes = repository_handle
1864 .update(&mut cx, |repository_handle, _| {
1865 repository_handle.get_remotes(branch_name)
1866 })?
1867 .await??;
1868
1869 Ok(proto::GetRemotesResponse {
1870 remotes: remotes
1871 .into_iter()
1872 .map(|remotes| proto::get_remotes_response::Remote {
1873 name: remotes.name.to_string(),
1874 })
1875 .collect::<Vec<_>>(),
1876 })
1877 }
1878
1879 async fn handle_get_branches(
1880 this: Entity<Self>,
1881 envelope: TypedEnvelope<proto::GitGetBranches>,
1882 mut cx: AsyncApp,
1883 ) -> Result<proto::GitBranchesResponse> {
1884 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1885 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1886
1887 let branches = repository_handle
1888 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1889 .await??;
1890
1891 Ok(proto::GitBranchesResponse {
1892 branches: branches
1893 .into_iter()
1894 .map(|branch| branch_to_proto(&branch))
1895 .collect::<Vec<_>>(),
1896 })
1897 }
1898 async fn handle_get_default_branch(
1899 this: Entity<Self>,
1900 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1901 mut cx: AsyncApp,
1902 ) -> Result<proto::GetDefaultBranchResponse> {
1903 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1904 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1905
1906 let branch = repository_handle
1907 .update(&mut cx, |repository_handle, _| {
1908 repository_handle.default_branch()
1909 })?
1910 .await??
1911 .map(Into::into);
1912
1913 Ok(proto::GetDefaultBranchResponse { branch })
1914 }
1915 async fn handle_create_branch(
1916 this: Entity<Self>,
1917 envelope: TypedEnvelope<proto::GitCreateBranch>,
1918 mut cx: AsyncApp,
1919 ) -> Result<proto::Ack> {
1920 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1921 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1922 let branch_name = envelope.payload.branch_name;
1923
1924 repository_handle
1925 .update(&mut cx, |repository_handle, _| {
1926 repository_handle.create_branch(branch_name)
1927 })?
1928 .await??;
1929
1930 Ok(proto::Ack {})
1931 }
1932
1933 async fn handle_change_branch(
1934 this: Entity<Self>,
1935 envelope: TypedEnvelope<proto::GitChangeBranch>,
1936 mut cx: AsyncApp,
1937 ) -> Result<proto::Ack> {
1938 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1939 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1940 let branch_name = envelope.payload.branch_name;
1941
1942 repository_handle
1943 .update(&mut cx, |repository_handle, _| {
1944 repository_handle.change_branch(branch_name)
1945 })?
1946 .await??;
1947
1948 Ok(proto::Ack {})
1949 }
1950
1951 async fn handle_show(
1952 this: Entity<Self>,
1953 envelope: TypedEnvelope<proto::GitShow>,
1954 mut cx: AsyncApp,
1955 ) -> Result<proto::GitCommitDetails> {
1956 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1957 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1958
1959 let commit = repository_handle
1960 .update(&mut cx, |repository_handle, _| {
1961 repository_handle.show(envelope.payload.commit)
1962 })?
1963 .await??;
1964 Ok(proto::GitCommitDetails {
1965 sha: commit.sha.into(),
1966 message: commit.message.into(),
1967 commit_timestamp: commit.commit_timestamp,
1968 author_email: commit.author_email.into(),
1969 author_name: commit.author_name.into(),
1970 })
1971 }
1972
1973 async fn handle_load_commit_diff(
1974 this: Entity<Self>,
1975 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1976 mut cx: AsyncApp,
1977 ) -> Result<proto::LoadCommitDiffResponse> {
1978 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1979 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1980
1981 let commit_diff = repository_handle
1982 .update(&mut cx, |repository_handle, _| {
1983 repository_handle.load_commit_diff(envelope.payload.commit)
1984 })?
1985 .await??;
1986 Ok(proto::LoadCommitDiffResponse {
1987 files: commit_diff
1988 .files
1989 .into_iter()
1990 .map(|file| proto::CommitFile {
1991 path: file.path.to_string(),
1992 old_text: file.old_text,
1993 new_text: file.new_text,
1994 })
1995 .collect(),
1996 })
1997 }
1998
1999 async fn handle_reset(
2000 this: Entity<Self>,
2001 envelope: TypedEnvelope<proto::GitReset>,
2002 mut cx: AsyncApp,
2003 ) -> Result<proto::Ack> {
2004 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2005 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2006
2007 let mode = match envelope.payload.mode() {
2008 git_reset::ResetMode::Soft => ResetMode::Soft,
2009 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2010 };
2011
2012 repository_handle
2013 .update(&mut cx, |repository_handle, cx| {
2014 repository_handle.reset(envelope.payload.commit, mode, cx)
2015 })?
2016 .await??;
2017 Ok(proto::Ack {})
2018 }
2019
2020 async fn handle_checkout_files(
2021 this: Entity<Self>,
2022 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2023 mut cx: AsyncApp,
2024 ) -> Result<proto::Ack> {
2025 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2026 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2027 let paths = envelope
2028 .payload
2029 .paths
2030 .iter()
2031 .map(|s| RepoPath::from_str(s))
2032 .collect();
2033
2034 repository_handle
2035 .update(&mut cx, |repository_handle, cx| {
2036 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2037 })?
2038 .await??;
2039 Ok(proto::Ack {})
2040 }
2041
2042 async fn handle_open_commit_message_buffer(
2043 this: Entity<Self>,
2044 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2045 mut cx: AsyncApp,
2046 ) -> Result<proto::OpenBufferResponse> {
2047 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2048 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2049 let buffer = repository
2050 .update(&mut cx, |repository, cx| {
2051 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2052 })?
2053 .await?;
2054
2055 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2056 this.update(&mut cx, |this, cx| {
2057 this.buffer_store.update(cx, |buffer_store, cx| {
2058 buffer_store
2059 .create_buffer_for_peer(
2060 &buffer,
2061 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2062 cx,
2063 )
2064 .detach_and_log_err(cx);
2065 })
2066 })?;
2067
2068 Ok(proto::OpenBufferResponse {
2069 buffer_id: buffer_id.to_proto(),
2070 })
2071 }
2072
2073 async fn handle_askpass(
2074 this: Entity<Self>,
2075 envelope: TypedEnvelope<proto::AskPassRequest>,
2076 mut cx: AsyncApp,
2077 ) -> Result<proto::AskPassResponse> {
2078 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2079 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2080
2081 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2082 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2083 debug_panic!("no askpass found");
2084 anyhow::bail!("no askpass found");
2085 };
2086
2087 let response = askpass.ask_password(envelope.payload.prompt).await?;
2088
2089 delegates
2090 .lock()
2091 .insert(envelope.payload.askpass_id, askpass);
2092
2093 Ok(proto::AskPassResponse { response })
2094 }
2095
2096 async fn handle_check_for_pushed_commits(
2097 this: Entity<Self>,
2098 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2099 mut cx: AsyncApp,
2100 ) -> Result<proto::CheckForPushedCommitsResponse> {
2101 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2102 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2103
2104 let branches = repository_handle
2105 .update(&mut cx, |repository_handle, _| {
2106 repository_handle.check_for_pushed_commits()
2107 })?
2108 .await??;
2109 Ok(proto::CheckForPushedCommitsResponse {
2110 pushed_to: branches
2111 .into_iter()
2112 .map(|commit| commit.to_string())
2113 .collect(),
2114 })
2115 }
2116
2117 async fn handle_git_diff(
2118 this: Entity<Self>,
2119 envelope: TypedEnvelope<proto::GitDiff>,
2120 mut cx: AsyncApp,
2121 ) -> Result<proto::GitDiffResponse> {
2122 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2123 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2124 let diff_type = match envelope.payload.diff_type() {
2125 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2126 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2127 };
2128
2129 let mut diff = repository_handle
2130 .update(&mut cx, |repository_handle, cx| {
2131 repository_handle.diff(diff_type, cx)
2132 })?
2133 .await??;
2134 const ONE_MB: usize = 1_000_000;
2135 if diff.len() > ONE_MB {
2136 diff = diff.chars().take(ONE_MB).collect()
2137 }
2138
2139 Ok(proto::GitDiffResponse { diff })
2140 }
2141
2142 async fn handle_open_unstaged_diff(
2143 this: Entity<Self>,
2144 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2145 mut cx: AsyncApp,
2146 ) -> Result<proto::OpenUnstagedDiffResponse> {
2147 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2148 let diff = this
2149 .update(&mut cx, |this, cx| {
2150 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2151 Some(this.open_unstaged_diff(buffer, cx))
2152 })?
2153 .context("missing buffer")?
2154 .await?;
2155 this.update(&mut cx, |this, _| {
2156 let shared_diffs = this
2157 .shared_diffs
2158 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2159 .or_default();
2160 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2161 })?;
2162 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2163 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2164 }
2165
2166 async fn handle_open_uncommitted_diff(
2167 this: Entity<Self>,
2168 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2169 mut cx: AsyncApp,
2170 ) -> Result<proto::OpenUncommittedDiffResponse> {
2171 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2172 let diff = this
2173 .update(&mut cx, |this, cx| {
2174 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2175 Some(this.open_uncommitted_diff(buffer, cx))
2176 })?
2177 .context("missing buffer")?
2178 .await?;
2179 this.update(&mut cx, |this, _| {
2180 let shared_diffs = this
2181 .shared_diffs
2182 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2183 .or_default();
2184 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2185 })?;
2186 diff.read_with(&cx, |diff, cx| {
2187 use proto::open_uncommitted_diff_response::Mode;
2188
2189 let unstaged_diff = diff.secondary_diff();
2190 let index_snapshot = unstaged_diff.and_then(|diff| {
2191 let diff = diff.read(cx);
2192 diff.base_text_exists().then(|| diff.base_text())
2193 });
2194
2195 let mode;
2196 let staged_text;
2197 let committed_text;
2198 if diff.base_text_exists() {
2199 let committed_snapshot = diff.base_text();
2200 committed_text = Some(committed_snapshot.text());
2201 if let Some(index_text) = index_snapshot {
2202 if index_text.remote_id() == committed_snapshot.remote_id() {
2203 mode = Mode::IndexMatchesHead;
2204 staged_text = None;
2205 } else {
2206 mode = Mode::IndexAndHead;
2207 staged_text = Some(index_text.text());
2208 }
2209 } else {
2210 mode = Mode::IndexAndHead;
2211 staged_text = None;
2212 }
2213 } else {
2214 mode = Mode::IndexAndHead;
2215 committed_text = None;
2216 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2217 }
2218
2219 proto::OpenUncommittedDiffResponse {
2220 committed_text,
2221 staged_text,
2222 mode: mode.into(),
2223 }
2224 })
2225 }
2226
2227 async fn handle_update_diff_bases(
2228 this: Entity<Self>,
2229 request: TypedEnvelope<proto::UpdateDiffBases>,
2230 mut cx: AsyncApp,
2231 ) -> Result<()> {
2232 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2233 this.update(&mut cx, |this, cx| {
2234 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2235 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2236 let buffer = buffer.read(cx).text_snapshot();
2237 diff_state.update(cx, |diff_state, cx| {
2238 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2239 })
2240 }
2241 }
2242 })
2243 }
2244
2245 async fn handle_blame_buffer(
2246 this: Entity<Self>,
2247 envelope: TypedEnvelope<proto::BlameBuffer>,
2248 mut cx: AsyncApp,
2249 ) -> Result<proto::BlameBufferResponse> {
2250 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2251 let version = deserialize_version(&envelope.payload.version);
2252 let buffer = this.read_with(&cx, |this, cx| {
2253 this.buffer_store.read(cx).get_existing(buffer_id)
2254 })??;
2255 buffer
2256 .update(&mut cx, |buffer, _| {
2257 buffer.wait_for_version(version.clone())
2258 })?
2259 .await?;
2260 let blame = this
2261 .update(&mut cx, |this, cx| {
2262 this.blame_buffer(&buffer, Some(version), cx)
2263 })?
2264 .await?;
2265 Ok(serialize_blame_buffer_response(blame))
2266 }
2267
2268 async fn handle_get_permalink_to_line(
2269 this: Entity<Self>,
2270 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2271 mut cx: AsyncApp,
2272 ) -> Result<proto::GetPermalinkToLineResponse> {
2273 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2274 // let version = deserialize_version(&envelope.payload.version);
2275 let selection = {
2276 let proto_selection = envelope
2277 .payload
2278 .selection
2279 .context("no selection to get permalink for defined")?;
2280 proto_selection.start as u32..proto_selection.end as u32
2281 };
2282 let buffer = this.read_with(&cx, |this, cx| {
2283 this.buffer_store.read(cx).get_existing(buffer_id)
2284 })??;
2285 let permalink = this
2286 .update(&mut cx, |this, cx| {
2287 this.get_permalink_to_line(&buffer, selection, cx)
2288 })?
2289 .await?;
2290 Ok(proto::GetPermalinkToLineResponse {
2291 permalink: permalink.to_string(),
2292 })
2293 }
2294
2295 fn repository_for_request(
2296 this: &Entity<Self>,
2297 id: RepositoryId,
2298 cx: &mut AsyncApp,
2299 ) -> Result<Entity<Repository>> {
2300 this.read_with(cx, |this, _| {
2301 this.repositories
2302 .get(&id)
2303 .context("missing repository handle")
2304 .cloned()
2305 })?
2306 }
2307
2308 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2309 self.repositories
2310 .iter()
2311 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2312 .collect()
2313 }
2314
2315 fn process_updated_entries(
2316 &self,
2317 worktree: &Entity<Worktree>,
2318 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2319 cx: &mut App,
2320 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2321 let mut repo_paths = self
2322 .repositories
2323 .values()
2324 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2325 .collect::<Vec<_>>();
2326 let mut entries: Vec<_> = updated_entries
2327 .iter()
2328 .map(|(path, _, _)| path.clone())
2329 .collect();
2330 entries.sort();
2331 let worktree = worktree.read(cx);
2332
2333 let entries = entries
2334 .into_iter()
2335 .filter_map(|path| worktree.absolutize(&path).ok())
2336 .collect::<Arc<[_]>>();
2337
2338 let executor = cx.background_executor().clone();
2339 cx.background_executor().spawn(async move {
2340 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2341 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2342 let mut tasks = FuturesOrdered::new();
2343 for (repo_path, repo) in repo_paths.into_iter().rev() {
2344 let entries = entries.clone();
2345 let task = executor.spawn(async move {
2346 // Find all repository paths that belong to this repo
2347 let mut ix = entries.partition_point(|path| path < &*repo_path);
2348 if ix == entries.len() {
2349 return None;
2350 };
2351
2352 let mut paths = Vec::new();
2353 // All paths prefixed by a given repo will constitute a continuous range.
2354 while let Some(path) = entries.get(ix)
2355 && let Some(repo_path) =
2356 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, path)
2357 {
2358 paths.push((repo_path, ix));
2359 ix += 1;
2360 }
2361 if paths.is_empty() {
2362 None
2363 } else {
2364 Some((repo, paths))
2365 }
2366 });
2367 tasks.push_back(task);
2368 }
2369
2370 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2371 let mut path_was_used = vec![false; entries.len()];
2372 let tasks = tasks.collect::<Vec<_>>().await;
2373 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2374 // We always want to assign a path to it's innermost repository.
2375 for t in tasks {
2376 let Some((repo, paths)) = t else {
2377 continue;
2378 };
2379 let entry = paths_by_git_repo.entry(repo).or_default();
2380 for (repo_path, ix) in paths {
2381 if path_was_used[ix] {
2382 continue;
2383 }
2384 path_was_used[ix] = true;
2385 entry.push(repo_path);
2386 }
2387 }
2388
2389 paths_by_git_repo
2390 })
2391 }
2392}
2393
2394impl BufferGitState {
2395 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2396 Self {
2397 unstaged_diff: Default::default(),
2398 uncommitted_diff: Default::default(),
2399 recalculate_diff_task: Default::default(),
2400 language: Default::default(),
2401 language_registry: Default::default(),
2402 recalculating_tx: postage::watch::channel_with(false).0,
2403 hunk_staging_operation_count: 0,
2404 hunk_staging_operation_count_as_of_write: 0,
2405 head_text: Default::default(),
2406 index_text: Default::default(),
2407 head_changed: Default::default(),
2408 index_changed: Default::default(),
2409 language_changed: Default::default(),
2410 conflict_updated_futures: Default::default(),
2411 conflict_set: Default::default(),
2412 reparse_conflict_markers_task: Default::default(),
2413 }
2414 }
2415
2416 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2417 self.language = buffer.read(cx).language().cloned();
2418 self.language_changed = true;
2419 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2420 }
2421
2422 fn reparse_conflict_markers(
2423 &mut self,
2424 buffer: text::BufferSnapshot,
2425 cx: &mut Context<Self>,
2426 ) -> oneshot::Receiver<()> {
2427 let (tx, rx) = oneshot::channel();
2428
2429 let Some(conflict_set) = self
2430 .conflict_set
2431 .as_ref()
2432 .and_then(|conflict_set| conflict_set.upgrade())
2433 else {
2434 return rx;
2435 };
2436
2437 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2438 if conflict_set.has_conflict {
2439 Some(conflict_set.snapshot())
2440 } else {
2441 None
2442 }
2443 });
2444
2445 if let Some(old_snapshot) = old_snapshot {
2446 self.conflict_updated_futures.push(tx);
2447 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2448 let (snapshot, changed_range) = cx
2449 .background_spawn(async move {
2450 let new_snapshot = ConflictSet::parse(&buffer);
2451 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2452 (new_snapshot, changed_range)
2453 })
2454 .await;
2455 this.update(cx, |this, cx| {
2456 if let Some(conflict_set) = &this.conflict_set {
2457 conflict_set
2458 .update(cx, |conflict_set, cx| {
2459 conflict_set.set_snapshot(snapshot, changed_range, cx);
2460 })
2461 .ok();
2462 }
2463 let futures = std::mem::take(&mut this.conflict_updated_futures);
2464 for tx in futures {
2465 tx.send(()).ok();
2466 }
2467 })
2468 }))
2469 }
2470
2471 rx
2472 }
2473
2474 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2475 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2476 }
2477
2478 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2479 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2480 }
2481
2482 fn handle_base_texts_updated(
2483 &mut self,
2484 buffer: text::BufferSnapshot,
2485 message: proto::UpdateDiffBases,
2486 cx: &mut Context<Self>,
2487 ) {
2488 use proto::update_diff_bases::Mode;
2489
2490 let Some(mode) = Mode::from_i32(message.mode) else {
2491 return;
2492 };
2493
2494 let diff_bases_change = match mode {
2495 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2496 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2497 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2498 Mode::IndexAndHead => DiffBasesChange::SetEach {
2499 index: message.staged_text,
2500 head: message.committed_text,
2501 },
2502 };
2503
2504 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2505 }
2506
2507 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2508 if *self.recalculating_tx.borrow() {
2509 let mut rx = self.recalculating_tx.subscribe();
2510 return Some(async move {
2511 loop {
2512 let is_recalculating = rx.recv().await;
2513 if is_recalculating != Some(true) {
2514 break;
2515 }
2516 }
2517 });
2518 } else {
2519 None
2520 }
2521 }
2522
2523 fn diff_bases_changed(
2524 &mut self,
2525 buffer: text::BufferSnapshot,
2526 diff_bases_change: Option<DiffBasesChange>,
2527 cx: &mut Context<Self>,
2528 ) {
2529 match diff_bases_change {
2530 Some(DiffBasesChange::SetIndex(index)) => {
2531 self.index_text = index.map(|mut index| {
2532 text::LineEnding::normalize(&mut index);
2533 Arc::new(index)
2534 });
2535 self.index_changed = true;
2536 }
2537 Some(DiffBasesChange::SetHead(head)) => {
2538 self.head_text = head.map(|mut head| {
2539 text::LineEnding::normalize(&mut head);
2540 Arc::new(head)
2541 });
2542 self.head_changed = true;
2543 }
2544 Some(DiffBasesChange::SetBoth(text)) => {
2545 let text = text.map(|mut text| {
2546 text::LineEnding::normalize(&mut text);
2547 Arc::new(text)
2548 });
2549 self.head_text = text.clone();
2550 self.index_text = text;
2551 self.head_changed = true;
2552 self.index_changed = true;
2553 }
2554 Some(DiffBasesChange::SetEach { index, head }) => {
2555 self.index_text = index.map(|mut index| {
2556 text::LineEnding::normalize(&mut index);
2557 Arc::new(index)
2558 });
2559 self.index_changed = true;
2560 self.head_text = head.map(|mut head| {
2561 text::LineEnding::normalize(&mut head);
2562 Arc::new(head)
2563 });
2564 self.head_changed = true;
2565 }
2566 None => {}
2567 }
2568
2569 self.recalculate_diffs(buffer, cx)
2570 }
2571
2572 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2573 *self.recalculating_tx.borrow_mut() = true;
2574
2575 let language = self.language.clone();
2576 let language_registry = self.language_registry.clone();
2577 let unstaged_diff = self.unstaged_diff();
2578 let uncommitted_diff = self.uncommitted_diff();
2579 let head = self.head_text.clone();
2580 let index = self.index_text.clone();
2581 let index_changed = self.index_changed;
2582 let head_changed = self.head_changed;
2583 let language_changed = self.language_changed;
2584 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2585 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2586 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2587 (None, None) => true,
2588 _ => false,
2589 };
2590 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2591 log::debug!(
2592 "start recalculating diffs for buffer {}",
2593 buffer.remote_id()
2594 );
2595
2596 let mut new_unstaged_diff = None;
2597 if let Some(unstaged_diff) = &unstaged_diff {
2598 new_unstaged_diff = Some(
2599 BufferDiff::update_diff(
2600 unstaged_diff.clone(),
2601 buffer.clone(),
2602 index,
2603 index_changed,
2604 language_changed,
2605 language.clone(),
2606 language_registry.clone(),
2607 cx,
2608 )
2609 .await?,
2610 );
2611 }
2612
2613 let mut new_uncommitted_diff = None;
2614 if let Some(uncommitted_diff) = &uncommitted_diff {
2615 new_uncommitted_diff = if index_matches_head {
2616 new_unstaged_diff.clone()
2617 } else {
2618 Some(
2619 BufferDiff::update_diff(
2620 uncommitted_diff.clone(),
2621 buffer.clone(),
2622 head,
2623 head_changed,
2624 language_changed,
2625 language.clone(),
2626 language_registry.clone(),
2627 cx,
2628 )
2629 .await?,
2630 )
2631 }
2632 }
2633
2634 let cancel = this.update(cx, |this, _| {
2635 // This checks whether all pending stage/unstage operations
2636 // have quiesced (i.e. both the corresponding write and the
2637 // read of that write have completed). If not, then we cancel
2638 // this recalculation attempt to avoid invalidating pending
2639 // state too quickly; another recalculation will come along
2640 // later and clear the pending state once the state of the index has settled.
2641 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2642 *this.recalculating_tx.borrow_mut() = false;
2643 true
2644 } else {
2645 false
2646 }
2647 })?;
2648 if cancel {
2649 log::debug!(
2650 concat!(
2651 "aborting recalculating diffs for buffer {}",
2652 "due to subsequent hunk operations",
2653 ),
2654 buffer.remote_id()
2655 );
2656 return Ok(());
2657 }
2658
2659 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2660 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2661 {
2662 unstaged_diff.update(cx, |diff, cx| {
2663 if language_changed {
2664 diff.language_changed(cx);
2665 }
2666 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2667 })?
2668 } else {
2669 None
2670 };
2671
2672 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2673 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2674 {
2675 uncommitted_diff.update(cx, |diff, cx| {
2676 if language_changed {
2677 diff.language_changed(cx);
2678 }
2679 diff.set_snapshot_with_secondary(
2680 new_uncommitted_diff,
2681 &buffer,
2682 unstaged_changed_range,
2683 true,
2684 cx,
2685 );
2686 })?;
2687 }
2688
2689 log::debug!(
2690 "finished recalculating diffs for buffer {}",
2691 buffer.remote_id()
2692 );
2693
2694 if let Some(this) = this.upgrade() {
2695 this.update(cx, |this, _| {
2696 this.index_changed = false;
2697 this.head_changed = false;
2698 this.language_changed = false;
2699 *this.recalculating_tx.borrow_mut() = false;
2700 })?;
2701 }
2702
2703 Ok(())
2704 }));
2705 }
2706}
2707
2708fn make_remote_delegate(
2709 this: Entity<GitStore>,
2710 project_id: u64,
2711 repository_id: RepositoryId,
2712 askpass_id: u64,
2713 cx: &mut AsyncApp,
2714) -> AskPassDelegate {
2715 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2716 this.update(cx, |this, cx| {
2717 let Some((client, _)) = this.downstream_client() else {
2718 return;
2719 };
2720 let response = client.request(proto::AskPassRequest {
2721 project_id,
2722 repository_id: repository_id.to_proto(),
2723 askpass_id,
2724 prompt,
2725 });
2726 cx.spawn(async move |_, _| {
2727 tx.send(response.await?.response).ok();
2728 anyhow::Ok(())
2729 })
2730 .detach_and_log_err(cx);
2731 })
2732 .log_err();
2733 })
2734}
2735
2736impl RepositoryId {
2737 pub fn to_proto(self) -> u64 {
2738 self.0
2739 }
2740
2741 pub fn from_proto(id: u64) -> Self {
2742 RepositoryId(id)
2743 }
2744}
2745
2746impl RepositorySnapshot {
2747 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2748 Self {
2749 id,
2750 statuses_by_path: Default::default(),
2751 work_directory_abs_path,
2752 branch: None,
2753 head_commit: None,
2754 scan_id: 0,
2755 merge: Default::default(),
2756 remote_origin_url: None,
2757 remote_upstream_url: None,
2758 }
2759 }
2760
2761 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2762 proto::UpdateRepository {
2763 branch_summary: self.branch.as_ref().map(branch_to_proto),
2764 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2765 updated_statuses: self
2766 .statuses_by_path
2767 .iter()
2768 .map(|entry| entry.to_proto())
2769 .collect(),
2770 removed_statuses: Default::default(),
2771 current_merge_conflicts: self
2772 .merge
2773 .conflicted_paths
2774 .iter()
2775 .map(|repo_path| repo_path.to_proto())
2776 .collect(),
2777 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2778 project_id,
2779 id: self.id.to_proto(),
2780 abs_path: self.work_directory_abs_path.to_proto(),
2781 entry_ids: vec![self.id.to_proto()],
2782 scan_id: self.scan_id,
2783 is_last_update: true,
2784 }
2785 }
2786
2787 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2788 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2789 let mut removed_statuses: Vec<String> = Vec::new();
2790
2791 let mut new_statuses = self.statuses_by_path.iter().peekable();
2792 let mut old_statuses = old.statuses_by_path.iter().peekable();
2793
2794 let mut current_new_entry = new_statuses.next();
2795 let mut current_old_entry = old_statuses.next();
2796 loop {
2797 match (current_new_entry, current_old_entry) {
2798 (Some(new_entry), Some(old_entry)) => {
2799 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2800 Ordering::Less => {
2801 updated_statuses.push(new_entry.to_proto());
2802 current_new_entry = new_statuses.next();
2803 }
2804 Ordering::Equal => {
2805 if new_entry.status != old_entry.status {
2806 updated_statuses.push(new_entry.to_proto());
2807 }
2808 current_old_entry = old_statuses.next();
2809 current_new_entry = new_statuses.next();
2810 }
2811 Ordering::Greater => {
2812 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2813 current_old_entry = old_statuses.next();
2814 }
2815 }
2816 }
2817 (None, Some(old_entry)) => {
2818 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2819 current_old_entry = old_statuses.next();
2820 }
2821 (Some(new_entry), None) => {
2822 updated_statuses.push(new_entry.to_proto());
2823 current_new_entry = new_statuses.next();
2824 }
2825 (None, None) => break,
2826 }
2827 }
2828
2829 proto::UpdateRepository {
2830 branch_summary: self.branch.as_ref().map(branch_to_proto),
2831 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2832 updated_statuses,
2833 removed_statuses,
2834 current_merge_conflicts: self
2835 .merge
2836 .conflicted_paths
2837 .iter()
2838 .map(|path| path.as_ref().to_proto())
2839 .collect(),
2840 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2841 project_id,
2842 id: self.id.to_proto(),
2843 abs_path: self.work_directory_abs_path.to_proto(),
2844 entry_ids: vec![],
2845 scan_id: self.scan_id,
2846 is_last_update: true,
2847 }
2848 }
2849
2850 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2851 self.statuses_by_path.iter().cloned()
2852 }
2853
2854 pub fn status_summary(&self) -> GitSummary {
2855 self.statuses_by_path.summary().item_summary
2856 }
2857
2858 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2859 self.statuses_by_path
2860 .get(&PathKey(path.0.clone()), &())
2861 .cloned()
2862 }
2863
2864 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2865 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2866 }
2867
2868 #[inline]
2869 fn abs_path_to_repo_path_inner(
2870 work_directory_abs_path: &Path,
2871 abs_path: &Path,
2872 ) -> Option<RepoPath> {
2873 abs_path
2874 .strip_prefix(&work_directory_abs_path)
2875 .map(RepoPath::from)
2876 .ok()
2877 }
2878
2879 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2880 self.merge.conflicted_paths.contains(repo_path)
2881 }
2882
2883 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2884 let had_conflict_on_last_merge_head_change =
2885 self.merge.conflicted_paths.contains(repo_path);
2886 let has_conflict_currently = self
2887 .status_for_path(repo_path)
2888 .map_or(false, |entry| entry.status.is_conflicted());
2889 had_conflict_on_last_merge_head_change || has_conflict_currently
2890 }
2891
2892 /// This is the name that will be displayed in the repository selector for this repository.
2893 pub fn display_name(&self) -> SharedString {
2894 self.work_directory_abs_path
2895 .file_name()
2896 .unwrap_or_default()
2897 .to_string_lossy()
2898 .to_string()
2899 .into()
2900 }
2901}
2902
2903impl MergeDetails {
2904 async fn load(
2905 backend: &Arc<dyn GitRepository>,
2906 status: &SumTree<StatusEntry>,
2907 prev_snapshot: &RepositorySnapshot,
2908 ) -> Result<(MergeDetails, bool)> {
2909 log::debug!("load merge details");
2910 let message = backend.merge_message().await;
2911 let heads = backend
2912 .revparse_batch(vec![
2913 "MERGE_HEAD".into(),
2914 "CHERRY_PICK_HEAD".into(),
2915 "REBASE_HEAD".into(),
2916 "REVERT_HEAD".into(),
2917 "APPLY_HEAD".into(),
2918 ])
2919 .await
2920 .log_err()
2921 .unwrap_or_default()
2922 .into_iter()
2923 .map(|opt| opt.map(SharedString::from))
2924 .collect::<Vec<_>>();
2925 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2926 let conflicted_paths = if merge_heads_changed {
2927 let current_conflicted_paths = TreeSet::from_ordered_entries(
2928 status
2929 .iter()
2930 .filter(|entry| entry.status.is_conflicted())
2931 .map(|entry| entry.repo_path.clone()),
2932 );
2933
2934 // It can happen that we run a scan while a lengthy merge is in progress
2935 // that will eventually result in conflicts, but before those conflicts
2936 // are reported by `git status`. Since for the moment we only care about
2937 // the merge heads state for the purposes of tracking conflicts, don't update
2938 // this state until we see some conflicts.
2939 if heads.iter().any(Option::is_some)
2940 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2941 && current_conflicted_paths.is_empty()
2942 {
2943 log::debug!("not updating merge heads because no conflicts found");
2944 return Ok((
2945 MergeDetails {
2946 message: message.map(SharedString::from),
2947 ..prev_snapshot.merge.clone()
2948 },
2949 false,
2950 ));
2951 }
2952
2953 current_conflicted_paths
2954 } else {
2955 prev_snapshot.merge.conflicted_paths.clone()
2956 };
2957 let details = MergeDetails {
2958 conflicted_paths,
2959 message: message.map(SharedString::from),
2960 heads,
2961 };
2962 Ok((details, merge_heads_changed))
2963 }
2964}
2965
2966impl Repository {
2967 pub fn snapshot(&self) -> RepositorySnapshot {
2968 self.snapshot.clone()
2969 }
2970
2971 fn local(
2972 id: RepositoryId,
2973 work_directory_abs_path: Arc<Path>,
2974 dot_git_abs_path: Arc<Path>,
2975 repository_dir_abs_path: Arc<Path>,
2976 common_dir_abs_path: Arc<Path>,
2977 project_environment: WeakEntity<ProjectEnvironment>,
2978 fs: Arc<dyn Fs>,
2979 git_store: WeakEntity<GitStore>,
2980 cx: &mut Context<Self>,
2981 ) -> Self {
2982 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2983 Repository {
2984 this: cx.weak_entity(),
2985 git_store,
2986 snapshot,
2987 commit_message_buffer: None,
2988 askpass_delegates: Default::default(),
2989 paths_needing_status_update: Default::default(),
2990 latest_askpass_id: 0,
2991 job_sender: Repository::spawn_local_git_worker(
2992 work_directory_abs_path,
2993 dot_git_abs_path,
2994 repository_dir_abs_path,
2995 common_dir_abs_path,
2996 project_environment,
2997 fs,
2998 cx,
2999 ),
3000 job_id: 0,
3001 active_jobs: Default::default(),
3002 }
3003 }
3004
3005 fn remote(
3006 id: RepositoryId,
3007 work_directory_abs_path: Arc<Path>,
3008 project_id: ProjectId,
3009 client: AnyProtoClient,
3010 git_store: WeakEntity<GitStore>,
3011 cx: &mut Context<Self>,
3012 ) -> Self {
3013 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
3014 Self {
3015 this: cx.weak_entity(),
3016 snapshot,
3017 commit_message_buffer: None,
3018 git_store,
3019 paths_needing_status_update: Default::default(),
3020 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3021 askpass_delegates: Default::default(),
3022 latest_askpass_id: 0,
3023 active_jobs: Default::default(),
3024 job_id: 0,
3025 }
3026 }
3027
3028 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3029 self.git_store.upgrade()
3030 }
3031
3032 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3033 let this = cx.weak_entity();
3034 let git_store = self.git_store.clone();
3035 let _ = self.send_keyed_job(
3036 Some(GitJobKey::ReloadBufferDiffBases),
3037 None,
3038 |state, mut cx| async move {
3039 let RepositoryState::Local { backend, .. } = state else {
3040 log::error!("tried to recompute diffs for a non-local repository");
3041 return Ok(());
3042 };
3043
3044 let Some(this) = this.upgrade() else {
3045 return Ok(());
3046 };
3047
3048 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3049 git_store.update(cx, |git_store, cx| {
3050 git_store
3051 .diffs
3052 .iter()
3053 .filter_map(|(buffer_id, diff_state)| {
3054 let buffer_store = git_store.buffer_store.read(cx);
3055 let buffer = buffer_store.get(*buffer_id)?;
3056 let file = File::from_dyn(buffer.read(cx).file())?;
3057 let abs_path =
3058 file.worktree.read(cx).absolutize(&file.path).ok()?;
3059 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3060 log::debug!(
3061 "start reload diff bases for repo path {}",
3062 repo_path.0.display()
3063 );
3064 diff_state.update(cx, |diff_state, _| {
3065 let has_unstaged_diff = diff_state
3066 .unstaged_diff
3067 .as_ref()
3068 .is_some_and(|diff| diff.is_upgradable());
3069 let has_uncommitted_diff = diff_state
3070 .uncommitted_diff
3071 .as_ref()
3072 .is_some_and(|set| set.is_upgradable());
3073
3074 Some((
3075 buffer,
3076 repo_path,
3077 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3078 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3079 ))
3080 })
3081 })
3082 .collect::<Vec<_>>()
3083 })
3084 })??;
3085
3086 let buffer_diff_base_changes = cx
3087 .background_spawn(async move {
3088 let mut changes = Vec::new();
3089 for (buffer, repo_path, current_index_text, current_head_text) in
3090 &repo_diff_state_updates
3091 {
3092 let index_text = if current_index_text.is_some() {
3093 backend.load_index_text(repo_path.clone()).await
3094 } else {
3095 None
3096 };
3097 let head_text = if current_head_text.is_some() {
3098 backend.load_committed_text(repo_path.clone()).await
3099 } else {
3100 None
3101 };
3102
3103 let change =
3104 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3105 (Some(current_index), Some(current_head)) => {
3106 let index_changed =
3107 index_text.as_ref() != current_index.as_deref();
3108 let head_changed =
3109 head_text.as_ref() != current_head.as_deref();
3110 if index_changed && head_changed {
3111 if index_text == head_text {
3112 Some(DiffBasesChange::SetBoth(head_text))
3113 } else {
3114 Some(DiffBasesChange::SetEach {
3115 index: index_text,
3116 head: head_text,
3117 })
3118 }
3119 } else if index_changed {
3120 Some(DiffBasesChange::SetIndex(index_text))
3121 } else if head_changed {
3122 Some(DiffBasesChange::SetHead(head_text))
3123 } else {
3124 None
3125 }
3126 }
3127 (Some(current_index), None) => {
3128 let index_changed =
3129 index_text.as_ref() != current_index.as_deref();
3130 index_changed
3131 .then_some(DiffBasesChange::SetIndex(index_text))
3132 }
3133 (None, Some(current_head)) => {
3134 let head_changed =
3135 head_text.as_ref() != current_head.as_deref();
3136 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3137 }
3138 (None, None) => None,
3139 };
3140
3141 changes.push((buffer.clone(), change))
3142 }
3143 changes
3144 })
3145 .await;
3146
3147 git_store.update(&mut cx, |git_store, cx| {
3148 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3149 let buffer_snapshot = buffer.read(cx).text_snapshot();
3150 let buffer_id = buffer_snapshot.remote_id();
3151 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3152 continue;
3153 };
3154
3155 let downstream_client = git_store.downstream_client();
3156 diff_state.update(cx, |diff_state, cx| {
3157 use proto::update_diff_bases::Mode;
3158
3159 if let Some((diff_bases_change, (client, project_id))) =
3160 diff_bases_change.clone().zip(downstream_client)
3161 {
3162 let (staged_text, committed_text, mode) = match diff_bases_change {
3163 DiffBasesChange::SetIndex(index) => {
3164 (index, None, Mode::IndexOnly)
3165 }
3166 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3167 DiffBasesChange::SetEach { index, head } => {
3168 (index, head, Mode::IndexAndHead)
3169 }
3170 DiffBasesChange::SetBoth(text) => {
3171 (None, text, Mode::IndexMatchesHead)
3172 }
3173 };
3174 client
3175 .send(proto::UpdateDiffBases {
3176 project_id: project_id.to_proto(),
3177 buffer_id: buffer_id.to_proto(),
3178 staged_text,
3179 committed_text,
3180 mode: mode as i32,
3181 })
3182 .log_err();
3183 }
3184
3185 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3186 });
3187 }
3188 })
3189 },
3190 );
3191 }
3192
3193 pub fn send_job<F, Fut, R>(
3194 &mut self,
3195 status: Option<SharedString>,
3196 job: F,
3197 ) -> oneshot::Receiver<R>
3198 where
3199 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3200 Fut: Future<Output = R> + 'static,
3201 R: Send + 'static,
3202 {
3203 self.send_keyed_job(None, status, job)
3204 }
3205
3206 fn send_keyed_job<F, Fut, R>(
3207 &mut self,
3208 key: Option<GitJobKey>,
3209 status: Option<SharedString>,
3210 job: F,
3211 ) -> oneshot::Receiver<R>
3212 where
3213 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3214 Fut: Future<Output = R> + 'static,
3215 R: Send + 'static,
3216 {
3217 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3218 let job_id = post_inc(&mut self.job_id);
3219 let this = self.this.clone();
3220 self.job_sender
3221 .unbounded_send(GitJob {
3222 key,
3223 job: Box::new(move |state, cx: &mut AsyncApp| {
3224 let job = job(state, cx.clone());
3225 cx.spawn(async move |cx| {
3226 if let Some(s) = status.clone() {
3227 this.update(cx, |this, cx| {
3228 this.active_jobs.insert(
3229 job_id,
3230 JobInfo {
3231 start: Instant::now(),
3232 message: s.clone(),
3233 },
3234 );
3235
3236 cx.notify();
3237 })
3238 .ok();
3239 }
3240 let result = job.await;
3241
3242 this.update(cx, |this, cx| {
3243 this.active_jobs.remove(&job_id);
3244 cx.notify();
3245 })
3246 .ok();
3247
3248 result_tx.send(result).ok();
3249 })
3250 }),
3251 })
3252 .ok();
3253 result_rx
3254 }
3255
3256 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3257 let Some(git_store) = self.git_store.upgrade() else {
3258 return;
3259 };
3260 let entity = cx.entity();
3261 git_store.update(cx, |git_store, cx| {
3262 let Some((&id, _)) = git_store
3263 .repositories
3264 .iter()
3265 .find(|(_, handle)| *handle == &entity)
3266 else {
3267 return;
3268 };
3269 git_store.active_repo_id = Some(id);
3270 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3271 });
3272 }
3273
3274 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3275 self.snapshot.status()
3276 }
3277
3278 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3279 let git_store = self.git_store.upgrade()?;
3280 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3281 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3282 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3283 Some(ProjectPath {
3284 worktree_id: worktree.read(cx).id(),
3285 path: relative_path.into(),
3286 })
3287 }
3288
3289 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3290 let git_store = self.git_store.upgrade()?;
3291 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3292 let abs_path = worktree_store.absolutize(path, cx)?;
3293 self.snapshot.abs_path_to_repo_path(&abs_path)
3294 }
3295
3296 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3297 other
3298 .read(cx)
3299 .snapshot
3300 .work_directory_abs_path
3301 .starts_with(&self.snapshot.work_directory_abs_path)
3302 }
3303
3304 pub fn open_commit_buffer(
3305 &mut self,
3306 languages: Option<Arc<LanguageRegistry>>,
3307 buffer_store: Entity<BufferStore>,
3308 cx: &mut Context<Self>,
3309 ) -> Task<Result<Entity<Buffer>>> {
3310 let id = self.id;
3311 if let Some(buffer) = self.commit_message_buffer.clone() {
3312 return Task::ready(Ok(buffer));
3313 }
3314 let this = cx.weak_entity();
3315
3316 let rx = self.send_job(None, move |state, mut cx| async move {
3317 let Some(this) = this.upgrade() else {
3318 bail!("git store was dropped");
3319 };
3320 match state {
3321 RepositoryState::Local { .. } => {
3322 this.update(&mut cx, |_, cx| {
3323 Self::open_local_commit_buffer(languages, buffer_store, cx)
3324 })?
3325 .await
3326 }
3327 RepositoryState::Remote { project_id, client } => {
3328 let request = client.request(proto::OpenCommitMessageBuffer {
3329 project_id: project_id.0,
3330 repository_id: id.to_proto(),
3331 });
3332 let response = request.await.context("requesting to open commit buffer")?;
3333 let buffer_id = BufferId::new(response.buffer_id)?;
3334 let buffer = buffer_store
3335 .update(&mut cx, |buffer_store, cx| {
3336 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3337 })?
3338 .await?;
3339 if let Some(language_registry) = languages {
3340 let git_commit_language =
3341 language_registry.language_for_name("Git Commit").await?;
3342 buffer.update(&mut cx, |buffer, cx| {
3343 buffer.set_language(Some(git_commit_language), cx);
3344 })?;
3345 }
3346 this.update(&mut cx, |this, _| {
3347 this.commit_message_buffer = Some(buffer.clone());
3348 })?;
3349 Ok(buffer)
3350 }
3351 }
3352 });
3353
3354 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3355 }
3356
3357 fn open_local_commit_buffer(
3358 language_registry: Option<Arc<LanguageRegistry>>,
3359 buffer_store: Entity<BufferStore>,
3360 cx: &mut Context<Self>,
3361 ) -> Task<Result<Entity<Buffer>>> {
3362 cx.spawn(async move |repository, cx| {
3363 let buffer = buffer_store
3364 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
3365 .await?;
3366
3367 if let Some(language_registry) = language_registry {
3368 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3369 buffer.update(cx, |buffer, cx| {
3370 buffer.set_language(Some(git_commit_language), cx);
3371 })?;
3372 }
3373
3374 repository.update(cx, |repository, _| {
3375 repository.commit_message_buffer = Some(buffer.clone());
3376 })?;
3377 Ok(buffer)
3378 })
3379 }
3380
3381 pub fn checkout_files(
3382 &mut self,
3383 commit: &str,
3384 paths: Vec<RepoPath>,
3385 _cx: &mut App,
3386 ) -> oneshot::Receiver<Result<()>> {
3387 let commit = commit.to_string();
3388 let id = self.id;
3389
3390 self.send_job(
3391 Some(format!("git checkout {}", commit).into()),
3392 move |git_repo, _| async move {
3393 match git_repo {
3394 RepositoryState::Local {
3395 backend,
3396 environment,
3397 ..
3398 } => {
3399 backend
3400 .checkout_files(commit, paths, environment.clone())
3401 .await
3402 }
3403 RepositoryState::Remote { project_id, client } => {
3404 client
3405 .request(proto::GitCheckoutFiles {
3406 project_id: project_id.0,
3407 repository_id: id.to_proto(),
3408 commit,
3409 paths: paths
3410 .into_iter()
3411 .map(|p| p.to_string_lossy().to_string())
3412 .collect(),
3413 })
3414 .await?;
3415
3416 Ok(())
3417 }
3418 }
3419 },
3420 )
3421 }
3422
3423 pub fn reset(
3424 &mut self,
3425 commit: String,
3426 reset_mode: ResetMode,
3427 _cx: &mut App,
3428 ) -> oneshot::Receiver<Result<()>> {
3429 let commit = commit.to_string();
3430 let id = self.id;
3431
3432 self.send_job(None, move |git_repo, _| async move {
3433 match git_repo {
3434 RepositoryState::Local {
3435 backend,
3436 environment,
3437 ..
3438 } => backend.reset(commit, reset_mode, environment).await,
3439 RepositoryState::Remote { project_id, client } => {
3440 client
3441 .request(proto::GitReset {
3442 project_id: project_id.0,
3443 repository_id: id.to_proto(),
3444 commit,
3445 mode: match reset_mode {
3446 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3447 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3448 },
3449 })
3450 .await?;
3451
3452 Ok(())
3453 }
3454 }
3455 })
3456 }
3457
3458 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3459 let id = self.id;
3460 self.send_job(None, move |git_repo, _cx| async move {
3461 match git_repo {
3462 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3463 RepositoryState::Remote { project_id, client } => {
3464 let resp = client
3465 .request(proto::GitShow {
3466 project_id: project_id.0,
3467 repository_id: id.to_proto(),
3468 commit,
3469 })
3470 .await?;
3471
3472 Ok(CommitDetails {
3473 sha: resp.sha.into(),
3474 message: resp.message.into(),
3475 commit_timestamp: resp.commit_timestamp,
3476 author_email: resp.author_email.into(),
3477 author_name: resp.author_name.into(),
3478 })
3479 }
3480 }
3481 })
3482 }
3483
3484 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3485 let id = self.id;
3486 self.send_job(None, move |git_repo, cx| async move {
3487 match git_repo {
3488 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3489 RepositoryState::Remote {
3490 client, project_id, ..
3491 } => {
3492 let response = client
3493 .request(proto::LoadCommitDiff {
3494 project_id: project_id.0,
3495 repository_id: id.to_proto(),
3496 commit,
3497 })
3498 .await?;
3499 Ok(CommitDiff {
3500 files: response
3501 .files
3502 .into_iter()
3503 .map(|file| CommitFile {
3504 path: Path::new(&file.path).into(),
3505 old_text: file.old_text,
3506 new_text: file.new_text,
3507 })
3508 .collect(),
3509 })
3510 }
3511 }
3512 })
3513 }
3514
3515 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3516 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3517 }
3518
3519 pub fn stage_entries(
3520 &self,
3521 entries: Vec<RepoPath>,
3522 cx: &mut Context<Self>,
3523 ) -> Task<anyhow::Result<()>> {
3524 if entries.is_empty() {
3525 return Task::ready(Ok(()));
3526 }
3527 let id = self.id;
3528
3529 let mut save_futures = Vec::new();
3530 if let Some(buffer_store) = self.buffer_store(cx) {
3531 buffer_store.update(cx, |buffer_store, cx| {
3532 for path in &entries {
3533 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3534 continue;
3535 };
3536 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3537 if buffer
3538 .read(cx)
3539 .file()
3540 .map_or(false, |file| file.disk_state().exists())
3541 {
3542 save_futures.push(buffer_store.save_buffer(buffer, cx));
3543 }
3544 }
3545 }
3546 })
3547 }
3548
3549 cx.spawn(async move |this, cx| {
3550 for save_future in save_futures {
3551 save_future.await?;
3552 }
3553
3554 this.update(cx, |this, _| {
3555 this.send_job(None, move |git_repo, _cx| async move {
3556 match git_repo {
3557 RepositoryState::Local {
3558 backend,
3559 environment,
3560 ..
3561 } => backend.stage_paths(entries, environment.clone()).await,
3562 RepositoryState::Remote { project_id, client } => {
3563 client
3564 .request(proto::Stage {
3565 project_id: project_id.0,
3566 repository_id: id.to_proto(),
3567 paths: entries
3568 .into_iter()
3569 .map(|repo_path| repo_path.as_ref().to_proto())
3570 .collect(),
3571 })
3572 .await
3573 .context("sending stage request")?;
3574
3575 Ok(())
3576 }
3577 }
3578 })
3579 })?
3580 .await??;
3581
3582 Ok(())
3583 })
3584 }
3585
3586 pub fn unstage_entries(
3587 &self,
3588 entries: Vec<RepoPath>,
3589 cx: &mut Context<Self>,
3590 ) -> Task<anyhow::Result<()>> {
3591 if entries.is_empty() {
3592 return Task::ready(Ok(()));
3593 }
3594 let id = self.id;
3595
3596 let mut save_futures = Vec::new();
3597 if let Some(buffer_store) = self.buffer_store(cx) {
3598 buffer_store.update(cx, |buffer_store, cx| {
3599 for path in &entries {
3600 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3601 continue;
3602 };
3603 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3604 if buffer
3605 .read(cx)
3606 .file()
3607 .map_or(false, |file| file.disk_state().exists())
3608 {
3609 save_futures.push(buffer_store.save_buffer(buffer, cx));
3610 }
3611 }
3612 }
3613 })
3614 }
3615
3616 cx.spawn(async move |this, cx| {
3617 for save_future in save_futures {
3618 save_future.await?;
3619 }
3620
3621 this.update(cx, |this, _| {
3622 this.send_job(None, move |git_repo, _cx| async move {
3623 match git_repo {
3624 RepositoryState::Local {
3625 backend,
3626 environment,
3627 ..
3628 } => backend.unstage_paths(entries, environment).await,
3629 RepositoryState::Remote { project_id, client } => {
3630 client
3631 .request(proto::Unstage {
3632 project_id: project_id.0,
3633 repository_id: id.to_proto(),
3634 paths: entries
3635 .into_iter()
3636 .map(|repo_path| repo_path.as_ref().to_proto())
3637 .collect(),
3638 })
3639 .await
3640 .context("sending unstage request")?;
3641
3642 Ok(())
3643 }
3644 }
3645 })
3646 })?
3647 .await??;
3648
3649 Ok(())
3650 })
3651 }
3652
3653 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3654 let to_stage = self
3655 .cached_status()
3656 .filter(|entry| !entry.status.staging().is_fully_staged())
3657 .map(|entry| entry.repo_path.clone())
3658 .collect();
3659 self.stage_entries(to_stage, cx)
3660 }
3661
3662 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3663 let to_unstage = self
3664 .cached_status()
3665 .filter(|entry| entry.status.staging().has_staged())
3666 .map(|entry| entry.repo_path.clone())
3667 .collect();
3668 self.unstage_entries(to_unstage, cx)
3669 }
3670
3671 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3672 let to_stash = self
3673 .cached_status()
3674 .map(|entry| entry.repo_path.clone())
3675 .collect();
3676
3677 self.stash_entries(to_stash, cx)
3678 }
3679
3680 pub fn stash_entries(
3681 &mut self,
3682 entries: Vec<RepoPath>,
3683 cx: &mut Context<Self>,
3684 ) -> Task<anyhow::Result<()>> {
3685 let id = self.id;
3686
3687 cx.spawn(async move |this, cx| {
3688 this.update(cx, |this, _| {
3689 this.send_job(None, move |git_repo, _cx| async move {
3690 match git_repo {
3691 RepositoryState::Local {
3692 backend,
3693 environment,
3694 ..
3695 } => backend.stash_paths(entries, environment).await,
3696 RepositoryState::Remote { project_id, client } => {
3697 client
3698 .request(proto::Stash {
3699 project_id: project_id.0,
3700 repository_id: id.to_proto(),
3701 paths: entries
3702 .into_iter()
3703 .map(|repo_path| repo_path.as_ref().to_proto())
3704 .collect(),
3705 })
3706 .await
3707 .context("sending stash request")?;
3708 Ok(())
3709 }
3710 }
3711 })
3712 })?
3713 .await??;
3714 Ok(())
3715 })
3716 }
3717
3718 pub fn stash_pop(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3719 let id = self.id;
3720 cx.spawn(async move |this, cx| {
3721 this.update(cx, |this, _| {
3722 this.send_job(None, move |git_repo, _cx| async move {
3723 match git_repo {
3724 RepositoryState::Local {
3725 backend,
3726 environment,
3727 ..
3728 } => backend.stash_pop(environment).await,
3729 RepositoryState::Remote { project_id, client } => {
3730 client
3731 .request(proto::StashPop {
3732 project_id: project_id.0,
3733 repository_id: id.to_proto(),
3734 })
3735 .await
3736 .context("sending stash pop request")?;
3737 Ok(())
3738 }
3739 }
3740 })
3741 })?
3742 .await??;
3743 Ok(())
3744 })
3745 }
3746
3747 pub fn commit(
3748 &mut self,
3749 message: SharedString,
3750 name_and_email: Option<(SharedString, SharedString)>,
3751 options: CommitOptions,
3752 _cx: &mut App,
3753 ) -> oneshot::Receiver<Result<()>> {
3754 let id = self.id;
3755
3756 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3757 match git_repo {
3758 RepositoryState::Local {
3759 backend,
3760 environment,
3761 ..
3762 } => {
3763 backend
3764 .commit(message, name_and_email, options, environment)
3765 .await
3766 }
3767 RepositoryState::Remote { project_id, client } => {
3768 let (name, email) = name_and_email.unzip();
3769 client
3770 .request(proto::Commit {
3771 project_id: project_id.0,
3772 repository_id: id.to_proto(),
3773 message: String::from(message),
3774 name: name.map(String::from),
3775 email: email.map(String::from),
3776 options: Some(proto::commit::CommitOptions {
3777 amend: options.amend,
3778 signoff: options.signoff,
3779 }),
3780 })
3781 .await
3782 .context("sending commit request")?;
3783
3784 Ok(())
3785 }
3786 }
3787 })
3788 }
3789
3790 pub fn fetch(
3791 &mut self,
3792 fetch_options: FetchOptions,
3793 askpass: AskPassDelegate,
3794 _cx: &mut App,
3795 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3796 let askpass_delegates = self.askpass_delegates.clone();
3797 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3798 let id = self.id;
3799
3800 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3801 match git_repo {
3802 RepositoryState::Local {
3803 backend,
3804 environment,
3805 ..
3806 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3807 RepositoryState::Remote { project_id, client } => {
3808 askpass_delegates.lock().insert(askpass_id, askpass);
3809 let _defer = util::defer(|| {
3810 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3811 debug_assert!(askpass_delegate.is_some());
3812 });
3813
3814 let response = client
3815 .request(proto::Fetch {
3816 project_id: project_id.0,
3817 repository_id: id.to_proto(),
3818 askpass_id,
3819 remote: fetch_options.to_proto(),
3820 })
3821 .await
3822 .context("sending fetch request")?;
3823
3824 Ok(RemoteCommandOutput {
3825 stdout: response.stdout,
3826 stderr: response.stderr,
3827 })
3828 }
3829 }
3830 })
3831 }
3832
3833 pub fn push(
3834 &mut self,
3835 branch: SharedString,
3836 remote: SharedString,
3837 options: Option<PushOptions>,
3838 askpass: AskPassDelegate,
3839 cx: &mut Context<Self>,
3840 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3841 let askpass_delegates = self.askpass_delegates.clone();
3842 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3843 let id = self.id;
3844
3845 let args = options
3846 .map(|option| match option {
3847 PushOptions::SetUpstream => " --set-upstream",
3848 PushOptions::Force => " --force-with-lease",
3849 })
3850 .unwrap_or("");
3851
3852 let updates_tx = self
3853 .git_store()
3854 .and_then(|git_store| match &git_store.read(cx).state {
3855 GitStoreState::Local { downstream, .. } => downstream
3856 .as_ref()
3857 .map(|downstream| downstream.updates_tx.clone()),
3858 _ => None,
3859 });
3860
3861 let this = cx.weak_entity();
3862 self.send_job(
3863 Some(format!("git push {} {} {}", args, branch, remote).into()),
3864 move |git_repo, mut cx| async move {
3865 match git_repo {
3866 RepositoryState::Local {
3867 backend,
3868 environment,
3869 ..
3870 } => {
3871 let result = backend
3872 .push(
3873 branch.to_string(),
3874 remote.to_string(),
3875 options,
3876 askpass,
3877 environment.clone(),
3878 cx.clone(),
3879 )
3880 .await;
3881 if result.is_ok() {
3882 let branches = backend.branches().await?;
3883 let branch = branches.into_iter().find(|branch| branch.is_head);
3884 log::info!("head branch after scan is {branch:?}");
3885 let snapshot = this.update(&mut cx, |this, cx| {
3886 this.snapshot.branch = branch;
3887 let snapshot = this.snapshot.clone();
3888 cx.emit(RepositoryEvent::Updated {
3889 full_scan: false,
3890 new_instance: false,
3891 });
3892 snapshot
3893 })?;
3894 if let Some(updates_tx) = updates_tx {
3895 updates_tx
3896 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3897 .ok();
3898 }
3899 }
3900 result
3901 }
3902 RepositoryState::Remote { project_id, client } => {
3903 askpass_delegates.lock().insert(askpass_id, askpass);
3904 let _defer = util::defer(|| {
3905 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3906 debug_assert!(askpass_delegate.is_some());
3907 });
3908 let response = client
3909 .request(proto::Push {
3910 project_id: project_id.0,
3911 repository_id: id.to_proto(),
3912 askpass_id,
3913 branch_name: branch.to_string(),
3914 remote_name: remote.to_string(),
3915 options: options.map(|options| match options {
3916 PushOptions::Force => proto::push::PushOptions::Force,
3917 PushOptions::SetUpstream => {
3918 proto::push::PushOptions::SetUpstream
3919 }
3920 }
3921 as i32),
3922 })
3923 .await
3924 .context("sending push request")?;
3925
3926 Ok(RemoteCommandOutput {
3927 stdout: response.stdout,
3928 stderr: response.stderr,
3929 })
3930 }
3931 }
3932 },
3933 )
3934 }
3935
3936 pub fn pull(
3937 &mut self,
3938 branch: SharedString,
3939 remote: SharedString,
3940 askpass: AskPassDelegate,
3941 _cx: &mut App,
3942 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3943 let askpass_delegates = self.askpass_delegates.clone();
3944 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3945 let id = self.id;
3946
3947 self.send_job(
3948 Some(format!("git pull {} {}", remote, branch).into()),
3949 move |git_repo, cx| async move {
3950 match git_repo {
3951 RepositoryState::Local {
3952 backend,
3953 environment,
3954 ..
3955 } => {
3956 backend
3957 .pull(
3958 branch.to_string(),
3959 remote.to_string(),
3960 askpass,
3961 environment.clone(),
3962 cx,
3963 )
3964 .await
3965 }
3966 RepositoryState::Remote { project_id, client } => {
3967 askpass_delegates.lock().insert(askpass_id, askpass);
3968 let _defer = util::defer(|| {
3969 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3970 debug_assert!(askpass_delegate.is_some());
3971 });
3972 let response = client
3973 .request(proto::Pull {
3974 project_id: project_id.0,
3975 repository_id: id.to_proto(),
3976 askpass_id,
3977 branch_name: branch.to_string(),
3978 remote_name: remote.to_string(),
3979 })
3980 .await
3981 .context("sending pull request")?;
3982
3983 Ok(RemoteCommandOutput {
3984 stdout: response.stdout,
3985 stderr: response.stderr,
3986 })
3987 }
3988 }
3989 },
3990 )
3991 }
3992
3993 fn spawn_set_index_text_job(
3994 &mut self,
3995 path: RepoPath,
3996 content: Option<String>,
3997 hunk_staging_operation_count: Option<usize>,
3998 cx: &mut Context<Self>,
3999 ) -> oneshot::Receiver<anyhow::Result<()>> {
4000 let id = self.id;
4001 let this = cx.weak_entity();
4002 let git_store = self.git_store.clone();
4003 self.send_keyed_job(
4004 Some(GitJobKey::WriteIndex(path.clone())),
4005 None,
4006 move |git_repo, mut cx| async move {
4007 log::debug!("start updating index text for buffer {}", path.display());
4008 match git_repo {
4009 RepositoryState::Local {
4010 backend,
4011 environment,
4012 ..
4013 } => {
4014 backend
4015 .set_index_text(path.clone(), content, environment.clone())
4016 .await?;
4017 }
4018 RepositoryState::Remote { project_id, client } => {
4019 client
4020 .request(proto::SetIndexText {
4021 project_id: project_id.0,
4022 repository_id: id.to_proto(),
4023 path: path.as_ref().to_proto(),
4024 text: content,
4025 })
4026 .await?;
4027 }
4028 }
4029 log::debug!("finish updating index text for buffer {}", path.display());
4030
4031 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4032 let project_path = this
4033 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4034 .ok()
4035 .flatten();
4036 git_store.update(&mut cx, |git_store, cx| {
4037 let buffer_id = git_store
4038 .buffer_store
4039 .read(cx)
4040 .get_by_path(&project_path?)?
4041 .read(cx)
4042 .remote_id();
4043 let diff_state = git_store.diffs.get(&buffer_id)?;
4044 diff_state.update(cx, |diff_state, _| {
4045 diff_state.hunk_staging_operation_count_as_of_write =
4046 hunk_staging_operation_count;
4047 });
4048 Some(())
4049 })?;
4050 }
4051 Ok(())
4052 },
4053 )
4054 }
4055
4056 pub fn get_remotes(
4057 &mut self,
4058 branch_name: Option<String>,
4059 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4060 let id = self.id;
4061 self.send_job(None, move |repo, _cx| async move {
4062 match repo {
4063 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4064 RepositoryState::Remote { project_id, client } => {
4065 let response = client
4066 .request(proto::GetRemotes {
4067 project_id: project_id.0,
4068 repository_id: id.to_proto(),
4069 branch_name,
4070 })
4071 .await?;
4072
4073 let remotes = response
4074 .remotes
4075 .into_iter()
4076 .map(|remotes| git::repository::Remote {
4077 name: remotes.name.into(),
4078 })
4079 .collect();
4080
4081 Ok(remotes)
4082 }
4083 }
4084 })
4085 }
4086
4087 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4088 let id = self.id;
4089 self.send_job(None, move |repo, _| async move {
4090 match repo {
4091 RepositoryState::Local { backend, .. } => backend.branches().await,
4092 RepositoryState::Remote { project_id, client } => {
4093 let response = client
4094 .request(proto::GitGetBranches {
4095 project_id: project_id.0,
4096 repository_id: id.to_proto(),
4097 })
4098 .await?;
4099
4100 let branches = response
4101 .branches
4102 .into_iter()
4103 .map(|branch| proto_to_branch(&branch))
4104 .collect();
4105
4106 Ok(branches)
4107 }
4108 }
4109 })
4110 }
4111
4112 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4113 let id = self.id;
4114 self.send_job(None, move |repo, _| async move {
4115 match repo {
4116 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4117 RepositoryState::Remote { project_id, client } => {
4118 let response = client
4119 .request(proto::GetDefaultBranch {
4120 project_id: project_id.0,
4121 repository_id: id.to_proto(),
4122 })
4123 .await?;
4124
4125 anyhow::Ok(response.branch.map(SharedString::from))
4126 }
4127 }
4128 })
4129 }
4130
4131 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4132 let id = self.id;
4133 self.send_job(None, move |repo, _cx| async move {
4134 match repo {
4135 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4136 RepositoryState::Remote { project_id, client } => {
4137 let response = client
4138 .request(proto::GitDiff {
4139 project_id: project_id.0,
4140 repository_id: id.to_proto(),
4141 diff_type: match diff_type {
4142 DiffType::HeadToIndex => {
4143 proto::git_diff::DiffType::HeadToIndex.into()
4144 }
4145 DiffType::HeadToWorktree => {
4146 proto::git_diff::DiffType::HeadToWorktree.into()
4147 }
4148 },
4149 })
4150 .await?;
4151
4152 Ok(response.diff)
4153 }
4154 }
4155 })
4156 }
4157
4158 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4159 let id = self.id;
4160 self.send_job(
4161 Some(format!("git switch -c {branch_name}").into()),
4162 move |repo, _cx| async move {
4163 match repo {
4164 RepositoryState::Local { backend, .. } => {
4165 backend.create_branch(branch_name).await
4166 }
4167 RepositoryState::Remote { project_id, client } => {
4168 client
4169 .request(proto::GitCreateBranch {
4170 project_id: project_id.0,
4171 repository_id: id.to_proto(),
4172 branch_name,
4173 })
4174 .await?;
4175
4176 Ok(())
4177 }
4178 }
4179 },
4180 )
4181 }
4182
4183 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4184 let id = self.id;
4185 self.send_job(
4186 Some(format!("git switch {branch_name}").into()),
4187 move |repo, _cx| async move {
4188 match repo {
4189 RepositoryState::Local { backend, .. } => {
4190 backend.change_branch(branch_name).await
4191 }
4192 RepositoryState::Remote { project_id, client } => {
4193 client
4194 .request(proto::GitChangeBranch {
4195 project_id: project_id.0,
4196 repository_id: id.to_proto(),
4197 branch_name,
4198 })
4199 .await?;
4200
4201 Ok(())
4202 }
4203 }
4204 },
4205 )
4206 }
4207
4208 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4209 let id = self.id;
4210 self.send_job(None, move |repo, _cx| async move {
4211 match repo {
4212 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4213 RepositoryState::Remote { project_id, client } => {
4214 let response = client
4215 .request(proto::CheckForPushedCommits {
4216 project_id: project_id.0,
4217 repository_id: id.to_proto(),
4218 })
4219 .await?;
4220
4221 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4222
4223 Ok(branches)
4224 }
4225 }
4226 })
4227 }
4228
4229 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4230 self.send_job(None, |repo, _cx| async move {
4231 match repo {
4232 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4233 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4234 }
4235 })
4236 }
4237
4238 pub fn restore_checkpoint(
4239 &mut self,
4240 checkpoint: GitRepositoryCheckpoint,
4241 ) -> oneshot::Receiver<Result<()>> {
4242 self.send_job(None, move |repo, _cx| async move {
4243 match repo {
4244 RepositoryState::Local { backend, .. } => {
4245 backend.restore_checkpoint(checkpoint).await
4246 }
4247 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4248 }
4249 })
4250 }
4251
4252 pub(crate) fn apply_remote_update(
4253 &mut self,
4254 update: proto::UpdateRepository,
4255 is_new: bool,
4256 cx: &mut Context<Self>,
4257 ) -> Result<()> {
4258 let conflicted_paths = TreeSet::from_ordered_entries(
4259 update
4260 .current_merge_conflicts
4261 .into_iter()
4262 .map(|path| RepoPath(Path::new(&path).into())),
4263 );
4264 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4265 self.snapshot.head_commit = update
4266 .head_commit_details
4267 .as_ref()
4268 .map(proto_to_commit_details);
4269
4270 self.snapshot.merge.conflicted_paths = conflicted_paths;
4271 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4272
4273 let edits = update
4274 .removed_statuses
4275 .into_iter()
4276 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4277 .chain(
4278 update
4279 .updated_statuses
4280 .into_iter()
4281 .filter_map(|updated_status| {
4282 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4283 }),
4284 )
4285 .collect::<Vec<_>>();
4286 self.snapshot.statuses_by_path.edit(edits, &());
4287 if update.is_last_update {
4288 self.snapshot.scan_id = update.scan_id;
4289 }
4290 cx.emit(RepositoryEvent::Updated {
4291 full_scan: true,
4292 new_instance: is_new,
4293 });
4294 Ok(())
4295 }
4296
4297 pub fn compare_checkpoints(
4298 &mut self,
4299 left: GitRepositoryCheckpoint,
4300 right: GitRepositoryCheckpoint,
4301 ) -> oneshot::Receiver<Result<bool>> {
4302 self.send_job(None, move |repo, _cx| async move {
4303 match repo {
4304 RepositoryState::Local { backend, .. } => {
4305 backend.compare_checkpoints(left, right).await
4306 }
4307 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4308 }
4309 })
4310 }
4311
4312 pub fn diff_checkpoints(
4313 &mut self,
4314 base_checkpoint: GitRepositoryCheckpoint,
4315 target_checkpoint: GitRepositoryCheckpoint,
4316 ) -> oneshot::Receiver<Result<String>> {
4317 self.send_job(None, move |repo, _cx| async move {
4318 match repo {
4319 RepositoryState::Local { backend, .. } => {
4320 backend
4321 .diff_checkpoints(base_checkpoint, target_checkpoint)
4322 .await
4323 }
4324 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4325 }
4326 })
4327 }
4328
4329 fn schedule_scan(
4330 &mut self,
4331 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4332 cx: &mut Context<Self>,
4333 ) {
4334 let this = cx.weak_entity();
4335 let _ = self.send_keyed_job(
4336 Some(GitJobKey::ReloadGitState),
4337 None,
4338 |state, mut cx| async move {
4339 log::debug!("run scheduled git status scan");
4340
4341 let Some(this) = this.upgrade() else {
4342 return Ok(());
4343 };
4344 let RepositoryState::Local { backend, .. } = state else {
4345 bail!("not a local repository")
4346 };
4347 let (snapshot, events) = this
4348 .update(&mut cx, |this, _| {
4349 this.paths_needing_status_update.clear();
4350 compute_snapshot(
4351 this.id,
4352 this.work_directory_abs_path.clone(),
4353 this.snapshot.clone(),
4354 backend.clone(),
4355 )
4356 })?
4357 .await?;
4358 this.update(&mut cx, |this, cx| {
4359 this.snapshot = snapshot.clone();
4360 for event in events {
4361 cx.emit(event);
4362 }
4363 })?;
4364 if let Some(updates_tx) = updates_tx {
4365 updates_tx
4366 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4367 .ok();
4368 }
4369 Ok(())
4370 },
4371 );
4372 }
4373
4374 fn spawn_local_git_worker(
4375 work_directory_abs_path: Arc<Path>,
4376 dot_git_abs_path: Arc<Path>,
4377 _repository_dir_abs_path: Arc<Path>,
4378 _common_dir_abs_path: Arc<Path>,
4379 project_environment: WeakEntity<ProjectEnvironment>,
4380 fs: Arc<dyn Fs>,
4381 cx: &mut Context<Self>,
4382 ) -> mpsc::UnboundedSender<GitJob> {
4383 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4384
4385 cx.spawn(async move |_, cx| {
4386 let environment = project_environment
4387 .upgrade()
4388 .context("missing project environment")?
4389 .update(cx, |project_environment, cx| {
4390 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4391 })?
4392 .await
4393 .unwrap_or_else(|| {
4394 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4395 HashMap::default()
4396 });
4397 let backend = cx
4398 .background_spawn(async move {
4399 fs.open_repo(&dot_git_abs_path)
4400 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4401 })
4402 .await?;
4403
4404 if let Some(git_hosting_provider_registry) =
4405 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4406 {
4407 git_hosting_providers::register_additional_providers(
4408 git_hosting_provider_registry,
4409 backend.clone(),
4410 );
4411 }
4412
4413 let state = RepositoryState::Local {
4414 backend,
4415 environment: Arc::new(environment),
4416 };
4417 let mut jobs = VecDeque::new();
4418 loop {
4419 while let Ok(Some(next_job)) = job_rx.try_next() {
4420 jobs.push_back(next_job);
4421 }
4422
4423 if let Some(job) = jobs.pop_front() {
4424 if let Some(current_key) = &job.key {
4425 if jobs
4426 .iter()
4427 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4428 {
4429 continue;
4430 }
4431 }
4432 (job.job)(state.clone(), cx).await;
4433 } else if let Some(job) = job_rx.next().await {
4434 jobs.push_back(job);
4435 } else {
4436 break;
4437 }
4438 }
4439 anyhow::Ok(())
4440 })
4441 .detach_and_log_err(cx);
4442
4443 job_tx
4444 }
4445
4446 fn spawn_remote_git_worker(
4447 project_id: ProjectId,
4448 client: AnyProtoClient,
4449 cx: &mut Context<Self>,
4450 ) -> mpsc::UnboundedSender<GitJob> {
4451 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4452
4453 cx.spawn(async move |_, cx| {
4454 let state = RepositoryState::Remote { project_id, client };
4455 let mut jobs = VecDeque::new();
4456 loop {
4457 while let Ok(Some(next_job)) = job_rx.try_next() {
4458 jobs.push_back(next_job);
4459 }
4460
4461 if let Some(job) = jobs.pop_front() {
4462 if let Some(current_key) = &job.key {
4463 if jobs
4464 .iter()
4465 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4466 {
4467 continue;
4468 }
4469 }
4470 (job.job)(state.clone(), cx).await;
4471 } else if let Some(job) = job_rx.next().await {
4472 jobs.push_back(job);
4473 } else {
4474 break;
4475 }
4476 }
4477 anyhow::Ok(())
4478 })
4479 .detach_and_log_err(cx);
4480
4481 job_tx
4482 }
4483
4484 fn load_staged_text(
4485 &mut self,
4486 buffer_id: BufferId,
4487 repo_path: RepoPath,
4488 cx: &App,
4489 ) -> Task<Result<Option<String>>> {
4490 let rx = self.send_job(None, move |state, _| async move {
4491 match state {
4492 RepositoryState::Local { backend, .. } => {
4493 anyhow::Ok(backend.load_index_text(repo_path).await)
4494 }
4495 RepositoryState::Remote { project_id, client } => {
4496 let response = client
4497 .request(proto::OpenUnstagedDiff {
4498 project_id: project_id.to_proto(),
4499 buffer_id: buffer_id.to_proto(),
4500 })
4501 .await?;
4502 Ok(response.staged_text)
4503 }
4504 }
4505 });
4506 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4507 }
4508
4509 fn load_committed_text(
4510 &mut self,
4511 buffer_id: BufferId,
4512 repo_path: RepoPath,
4513 cx: &App,
4514 ) -> Task<Result<DiffBasesChange>> {
4515 let rx = self.send_job(None, move |state, _| async move {
4516 match state {
4517 RepositoryState::Local { backend, .. } => {
4518 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4519 let staged_text = backend.load_index_text(repo_path).await;
4520 let diff_bases_change = if committed_text == staged_text {
4521 DiffBasesChange::SetBoth(committed_text)
4522 } else {
4523 DiffBasesChange::SetEach {
4524 index: staged_text,
4525 head: committed_text,
4526 }
4527 };
4528 anyhow::Ok(diff_bases_change)
4529 }
4530 RepositoryState::Remote { project_id, client } => {
4531 use proto::open_uncommitted_diff_response::Mode;
4532
4533 let response = client
4534 .request(proto::OpenUncommittedDiff {
4535 project_id: project_id.to_proto(),
4536 buffer_id: buffer_id.to_proto(),
4537 })
4538 .await?;
4539 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4540 let bases = match mode {
4541 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4542 Mode::IndexAndHead => DiffBasesChange::SetEach {
4543 head: response.committed_text,
4544 index: response.staged_text,
4545 },
4546 };
4547 Ok(bases)
4548 }
4549 }
4550 });
4551
4552 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4553 }
4554
4555 fn paths_changed(
4556 &mut self,
4557 paths: Vec<RepoPath>,
4558 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4559 cx: &mut Context<Self>,
4560 ) {
4561 self.paths_needing_status_update.extend(paths);
4562
4563 let this = cx.weak_entity();
4564 let _ = self.send_keyed_job(
4565 Some(GitJobKey::RefreshStatuses),
4566 None,
4567 |state, mut cx| async move {
4568 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4569 (
4570 this.snapshot.clone(),
4571 mem::take(&mut this.paths_needing_status_update),
4572 )
4573 })?;
4574 let RepositoryState::Local { backend, .. } = state else {
4575 bail!("not a local repository")
4576 };
4577
4578 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4579 if paths.is_empty() {
4580 return Ok(());
4581 }
4582 let statuses = backend.status(&paths).await?;
4583
4584 let changed_path_statuses = cx
4585 .background_spawn(async move {
4586 let mut changed_path_statuses = Vec::new();
4587 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4588 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4589
4590 for (repo_path, status) in &*statuses.entries {
4591 changed_paths.remove(repo_path);
4592 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) {
4593 if cursor.item().is_some_and(|entry| entry.status == *status) {
4594 continue;
4595 }
4596 }
4597
4598 changed_path_statuses.push(Edit::Insert(StatusEntry {
4599 repo_path: repo_path.clone(),
4600 status: *status,
4601 }));
4602 }
4603 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4604 for path in changed_paths.into_iter() {
4605 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4606 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4607 }
4608 }
4609 changed_path_statuses
4610 })
4611 .await;
4612
4613 this.update(&mut cx, |this, cx| {
4614 if !changed_path_statuses.is_empty() {
4615 this.snapshot
4616 .statuses_by_path
4617 .edit(changed_path_statuses, &());
4618 this.snapshot.scan_id += 1;
4619 if let Some(updates_tx) = updates_tx {
4620 updates_tx
4621 .unbounded_send(DownstreamUpdate::UpdateRepository(
4622 this.snapshot.clone(),
4623 ))
4624 .ok();
4625 }
4626 }
4627 cx.emit(RepositoryEvent::Updated {
4628 full_scan: false,
4629 new_instance: false,
4630 });
4631 })
4632 },
4633 );
4634 }
4635
4636 /// currently running git command and when it started
4637 pub fn current_job(&self) -> Option<JobInfo> {
4638 self.active_jobs.values().next().cloned()
4639 }
4640
4641 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4642 self.send_job(None, |_, _| async {})
4643 }
4644}
4645
4646fn get_permalink_in_rust_registry_src(
4647 provider_registry: Arc<GitHostingProviderRegistry>,
4648 path: PathBuf,
4649 selection: Range<u32>,
4650) -> Result<url::Url> {
4651 #[derive(Deserialize)]
4652 struct CargoVcsGit {
4653 sha1: String,
4654 }
4655
4656 #[derive(Deserialize)]
4657 struct CargoVcsInfo {
4658 git: CargoVcsGit,
4659 path_in_vcs: String,
4660 }
4661
4662 #[derive(Deserialize)]
4663 struct CargoPackage {
4664 repository: String,
4665 }
4666
4667 #[derive(Deserialize)]
4668 struct CargoToml {
4669 package: CargoPackage,
4670 }
4671
4672 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4673 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4674 Some((dir, json))
4675 }) else {
4676 bail!("No .cargo_vcs_info.json found in parent directories")
4677 };
4678 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4679 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4680 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4681 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4682 .context("parsing package.repository field of manifest")?;
4683 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4684 let permalink = provider.build_permalink(
4685 remote,
4686 BuildPermalinkParams {
4687 sha: &cargo_vcs_info.git.sha1,
4688 path: &path.to_string_lossy(),
4689 selection: Some(selection),
4690 },
4691 );
4692 Ok(permalink)
4693}
4694
4695fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4696 let Some(blame) = blame else {
4697 return proto::BlameBufferResponse {
4698 blame_response: None,
4699 };
4700 };
4701
4702 let entries = blame
4703 .entries
4704 .into_iter()
4705 .map(|entry| proto::BlameEntry {
4706 sha: entry.sha.as_bytes().into(),
4707 start_line: entry.range.start,
4708 end_line: entry.range.end,
4709 original_line_number: entry.original_line_number,
4710 author: entry.author,
4711 author_mail: entry.author_mail,
4712 author_time: entry.author_time,
4713 author_tz: entry.author_tz,
4714 committer: entry.committer_name,
4715 committer_mail: entry.committer_email,
4716 committer_time: entry.committer_time,
4717 committer_tz: entry.committer_tz,
4718 summary: entry.summary,
4719 previous: entry.previous,
4720 filename: entry.filename,
4721 })
4722 .collect::<Vec<_>>();
4723
4724 let messages = blame
4725 .messages
4726 .into_iter()
4727 .map(|(oid, message)| proto::CommitMessage {
4728 oid: oid.as_bytes().into(),
4729 message,
4730 })
4731 .collect::<Vec<_>>();
4732
4733 proto::BlameBufferResponse {
4734 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4735 entries,
4736 messages,
4737 remote_url: blame.remote_url,
4738 }),
4739 }
4740}
4741
4742fn deserialize_blame_buffer_response(
4743 response: proto::BlameBufferResponse,
4744) -> Option<git::blame::Blame> {
4745 let response = response.blame_response?;
4746 let entries = response
4747 .entries
4748 .into_iter()
4749 .filter_map(|entry| {
4750 Some(git::blame::BlameEntry {
4751 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4752 range: entry.start_line..entry.end_line,
4753 original_line_number: entry.original_line_number,
4754 committer_name: entry.committer,
4755 committer_time: entry.committer_time,
4756 committer_tz: entry.committer_tz,
4757 committer_email: entry.committer_mail,
4758 author: entry.author,
4759 author_mail: entry.author_mail,
4760 author_time: entry.author_time,
4761 author_tz: entry.author_tz,
4762 summary: entry.summary,
4763 previous: entry.previous,
4764 filename: entry.filename,
4765 })
4766 })
4767 .collect::<Vec<_>>();
4768
4769 let messages = response
4770 .messages
4771 .into_iter()
4772 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4773 .collect::<HashMap<_, _>>();
4774
4775 Some(Blame {
4776 entries,
4777 messages,
4778 remote_url: response.remote_url,
4779 })
4780}
4781
4782fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4783 proto::Branch {
4784 is_head: branch.is_head,
4785 ref_name: branch.ref_name.to_string(),
4786 unix_timestamp: branch
4787 .most_recent_commit
4788 .as_ref()
4789 .map(|commit| commit.commit_timestamp as u64),
4790 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4791 ref_name: upstream.ref_name.to_string(),
4792 tracking: upstream
4793 .tracking
4794 .status()
4795 .map(|upstream| proto::UpstreamTracking {
4796 ahead: upstream.ahead as u64,
4797 behind: upstream.behind as u64,
4798 }),
4799 }),
4800 most_recent_commit: branch
4801 .most_recent_commit
4802 .as_ref()
4803 .map(|commit| proto::CommitSummary {
4804 sha: commit.sha.to_string(),
4805 subject: commit.subject.to_string(),
4806 commit_timestamp: commit.commit_timestamp,
4807 }),
4808 }
4809}
4810
4811fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4812 git::repository::Branch {
4813 is_head: proto.is_head,
4814 ref_name: proto.ref_name.clone().into(),
4815 upstream: proto
4816 .upstream
4817 .as_ref()
4818 .map(|upstream| git::repository::Upstream {
4819 ref_name: upstream.ref_name.to_string().into(),
4820 tracking: upstream
4821 .tracking
4822 .as_ref()
4823 .map(|tracking| {
4824 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4825 ahead: tracking.ahead as u32,
4826 behind: tracking.behind as u32,
4827 })
4828 })
4829 .unwrap_or(git::repository::UpstreamTracking::Gone),
4830 }),
4831 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4832 git::repository::CommitSummary {
4833 sha: commit.sha.to_string().into(),
4834 subject: commit.subject.to_string().into(),
4835 commit_timestamp: commit.commit_timestamp,
4836 has_parent: true,
4837 }
4838 }),
4839 }
4840}
4841
4842fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4843 proto::GitCommitDetails {
4844 sha: commit.sha.to_string(),
4845 message: commit.message.to_string(),
4846 commit_timestamp: commit.commit_timestamp,
4847 author_email: commit.author_email.to_string(),
4848 author_name: commit.author_name.to_string(),
4849 }
4850}
4851
4852fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4853 CommitDetails {
4854 sha: proto.sha.clone().into(),
4855 message: proto.message.clone().into(),
4856 commit_timestamp: proto.commit_timestamp,
4857 author_email: proto.author_email.clone().into(),
4858 author_name: proto.author_name.clone().into(),
4859 }
4860}
4861
4862async fn compute_snapshot(
4863 id: RepositoryId,
4864 work_directory_abs_path: Arc<Path>,
4865 prev_snapshot: RepositorySnapshot,
4866 backend: Arc<dyn GitRepository>,
4867) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4868 let mut events = Vec::new();
4869 let branches = backend.branches().await?;
4870 let branch = branches.into_iter().find(|branch| branch.is_head);
4871 let statuses = backend
4872 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
4873 .await?;
4874 let statuses_by_path = SumTree::from_iter(
4875 statuses
4876 .entries
4877 .iter()
4878 .map(|(repo_path, status)| StatusEntry {
4879 repo_path: repo_path.clone(),
4880 status: *status,
4881 }),
4882 &(),
4883 );
4884 let (merge_details, merge_heads_changed) =
4885 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
4886 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
4887
4888 if merge_heads_changed
4889 || branch != prev_snapshot.branch
4890 || statuses_by_path != prev_snapshot.statuses_by_path
4891 {
4892 events.push(RepositoryEvent::Updated {
4893 full_scan: true,
4894 new_instance: false,
4895 });
4896 }
4897
4898 // Cache merge conflict paths so they don't change from staging/unstaging,
4899 // until the merge heads change (at commit time, etc.).
4900 if merge_heads_changed {
4901 events.push(RepositoryEvent::MergeHeadsChanged);
4902 }
4903
4904 // Useful when branch is None in detached head state
4905 let head_commit = match backend.head_sha().await {
4906 Some(head_sha) => backend.show(head_sha).await.log_err(),
4907 None => None,
4908 };
4909
4910 // Used by edit prediction data collection
4911 let remote_origin_url = backend.remote_url("origin");
4912 let remote_upstream_url = backend.remote_url("upstream");
4913
4914 let snapshot = RepositorySnapshot {
4915 id,
4916 statuses_by_path,
4917 work_directory_abs_path,
4918 scan_id: prev_snapshot.scan_id + 1,
4919 branch,
4920 head_commit,
4921 merge: merge_details,
4922 remote_origin_url,
4923 remote_upstream_url,
4924 };
4925
4926 Ok((snapshot, events))
4927}
4928
4929fn status_from_proto(
4930 simple_status: i32,
4931 status: Option<proto::GitFileStatus>,
4932) -> anyhow::Result<FileStatus> {
4933 use proto::git_file_status::Variant;
4934
4935 let Some(variant) = status.and_then(|status| status.variant) else {
4936 let code = proto::GitStatus::from_i32(simple_status)
4937 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
4938 let result = match code {
4939 proto::GitStatus::Added => TrackedStatus {
4940 worktree_status: StatusCode::Added,
4941 index_status: StatusCode::Unmodified,
4942 }
4943 .into(),
4944 proto::GitStatus::Modified => TrackedStatus {
4945 worktree_status: StatusCode::Modified,
4946 index_status: StatusCode::Unmodified,
4947 }
4948 .into(),
4949 proto::GitStatus::Conflict => UnmergedStatus {
4950 first_head: UnmergedStatusCode::Updated,
4951 second_head: UnmergedStatusCode::Updated,
4952 }
4953 .into(),
4954 proto::GitStatus::Deleted => TrackedStatus {
4955 worktree_status: StatusCode::Deleted,
4956 index_status: StatusCode::Unmodified,
4957 }
4958 .into(),
4959 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
4960 };
4961 return Ok(result);
4962 };
4963
4964 let result = match variant {
4965 Variant::Untracked(_) => FileStatus::Untracked,
4966 Variant::Ignored(_) => FileStatus::Ignored,
4967 Variant::Unmerged(unmerged) => {
4968 let [first_head, second_head] =
4969 [unmerged.first_head, unmerged.second_head].map(|head| {
4970 let code = proto::GitStatus::from_i32(head)
4971 .with_context(|| format!("Invalid git status code: {head}"))?;
4972 let result = match code {
4973 proto::GitStatus::Added => UnmergedStatusCode::Added,
4974 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4975 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4976 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
4977 };
4978 Ok(result)
4979 });
4980 let [first_head, second_head] = [first_head?, second_head?];
4981 UnmergedStatus {
4982 first_head,
4983 second_head,
4984 }
4985 .into()
4986 }
4987 Variant::Tracked(tracked) => {
4988 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4989 .map(|status| {
4990 let code = proto::GitStatus::from_i32(status)
4991 .with_context(|| format!("Invalid git status code: {status}"))?;
4992 let result = match code {
4993 proto::GitStatus::Modified => StatusCode::Modified,
4994 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4995 proto::GitStatus::Added => StatusCode::Added,
4996 proto::GitStatus::Deleted => StatusCode::Deleted,
4997 proto::GitStatus::Renamed => StatusCode::Renamed,
4998 proto::GitStatus::Copied => StatusCode::Copied,
4999 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5000 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5001 };
5002 Ok(result)
5003 });
5004 let [index_status, worktree_status] = [index_status?, worktree_status?];
5005 TrackedStatus {
5006 index_status,
5007 worktree_status,
5008 }
5009 .into()
5010 }
5011 };
5012 Ok(result)
5013}
5014
5015fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5016 use proto::git_file_status::{Tracked, Unmerged, Variant};
5017
5018 let variant = match status {
5019 FileStatus::Untracked => Variant::Untracked(Default::default()),
5020 FileStatus::Ignored => Variant::Ignored(Default::default()),
5021 FileStatus::Unmerged(UnmergedStatus {
5022 first_head,
5023 second_head,
5024 }) => Variant::Unmerged(Unmerged {
5025 first_head: unmerged_status_to_proto(first_head),
5026 second_head: unmerged_status_to_proto(second_head),
5027 }),
5028 FileStatus::Tracked(TrackedStatus {
5029 index_status,
5030 worktree_status,
5031 }) => Variant::Tracked(Tracked {
5032 index_status: tracked_status_to_proto(index_status),
5033 worktree_status: tracked_status_to_proto(worktree_status),
5034 }),
5035 };
5036 proto::GitFileStatus {
5037 variant: Some(variant),
5038 }
5039}
5040
5041fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5042 match code {
5043 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5044 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5045 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5046 }
5047}
5048
5049fn tracked_status_to_proto(code: StatusCode) -> i32 {
5050 match code {
5051 StatusCode::Added => proto::GitStatus::Added as _,
5052 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5053 StatusCode::Modified => proto::GitStatus::Modified as _,
5054 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5055 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5056 StatusCode::Copied => proto::GitStatus::Copied as _,
5057 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5058 }
5059}