1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 status::{
32 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
33 },
34};
35use gpui::{
36 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
37 WeakEntity,
38};
39use language::{
40 Buffer, BufferEvent, Language, LanguageRegistry,
41 proto::{deserialize_version, serialize_version},
42};
43use parking_lot::Mutex;
44use postage::stream::Stream as _;
45use rpc::{
46 AnyProtoClient, TypedEnvelope,
47 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
48};
49use serde::Deserialize;
50use std::{
51 cmp::Ordering,
52 collections::{BTreeSet, VecDeque},
53 future::Future,
54 mem,
55 ops::Range,
56 path::{Path, PathBuf},
57 sync::{
58 Arc,
59 atomic::{self, AtomicU64},
60 },
61 time::Instant,
62};
63use sum_tree::{Edit, SumTree, TreeSet};
64use text::{Bias, BufferId};
65use util::{ResultExt, debug_panic, post_inc};
66use worktree::{
67 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
68 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
69};
70
71pub struct GitStore {
72 state: GitStoreState,
73 buffer_store: Entity<BufferStore>,
74 worktree_store: Entity<WorktreeStore>,
75 repositories: HashMap<RepositoryId, Entity<Repository>>,
76 active_repo_id: Option<RepositoryId>,
77 #[allow(clippy::type_complexity)]
78 loading_diffs:
79 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
80 diffs: HashMap<BufferId, Entity<BufferGitState>>,
81 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
82 _subscriptions: Vec<Subscription>,
83}
84
85#[derive(Default)]
86struct SharedDiffs {
87 unstaged: Option<Entity<BufferDiff>>,
88 uncommitted: Option<Entity<BufferDiff>>,
89}
90
91struct BufferGitState {
92 unstaged_diff: Option<WeakEntity<BufferDiff>>,
93 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
94 conflict_set: Option<WeakEntity<ConflictSet>>,
95 recalculate_diff_task: Option<Task<Result<()>>>,
96 reparse_conflict_markers_task: Option<Task<Result<()>>>,
97 language: Option<Arc<Language>>,
98 language_registry: Option<Arc<LanguageRegistry>>,
99 conflict_updated_futures: Vec<oneshot::Sender<()>>,
100 recalculating_tx: postage::watch::Sender<bool>,
101
102 /// These operation counts are used to ensure that head and index text
103 /// values read from the git repository are up-to-date with any hunk staging
104 /// operations that have been performed on the BufferDiff.
105 ///
106 /// The operation count is incremented immediately when the user initiates a
107 /// hunk stage/unstage operation. Then, upon finishing writing the new index
108 /// text do disk, the `operation count as of write` is updated to reflect
109 /// the operation count that prompted the write.
110 hunk_staging_operation_count: usize,
111 hunk_staging_operation_count_as_of_write: usize,
112
113 head_text: Option<Arc<String>>,
114 index_text: Option<Arc<String>>,
115 head_changed: bool,
116 index_changed: bool,
117 language_changed: bool,
118}
119
120#[derive(Clone, Debug)]
121enum DiffBasesChange {
122 SetIndex(Option<String>),
123 SetHead(Option<String>),
124 SetEach {
125 index: Option<String>,
126 head: Option<String>,
127 },
128 SetBoth(Option<String>),
129}
130
131#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
132enum DiffKind {
133 Unstaged,
134 Uncommitted,
135}
136
137enum GitStoreState {
138 Local {
139 next_repository_id: Arc<AtomicU64>,
140 downstream: Option<LocalDownstreamState>,
141 project_environment: Entity<ProjectEnvironment>,
142 fs: Arc<dyn Fs>,
143 },
144 Ssh {
145 upstream_client: AnyProtoClient,
146 upstream_project_id: ProjectId,
147 downstream: Option<(AnyProtoClient, ProjectId)>,
148 },
149 Remote {
150 upstream_client: AnyProtoClient,
151 upstream_project_id: ProjectId,
152 },
153}
154
155enum DownstreamUpdate {
156 UpdateRepository(RepositorySnapshot),
157 RemoveRepository(RepositoryId),
158}
159
160struct LocalDownstreamState {
161 client: AnyProtoClient,
162 project_id: ProjectId,
163 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
164 _task: Task<Result<()>>,
165}
166
167#[derive(Clone, Debug)]
168pub struct GitStoreCheckpoint {
169 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
170}
171
172#[derive(Clone, Debug, PartialEq, Eq)]
173pub struct StatusEntry {
174 pub repo_path: RepoPath,
175 pub status: FileStatus,
176}
177
178impl StatusEntry {
179 fn to_proto(&self) -> proto::StatusEntry {
180 let simple_status = match self.status {
181 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
182 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
183 FileStatus::Tracked(TrackedStatus {
184 index_status,
185 worktree_status,
186 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
187 worktree_status
188 } else {
189 index_status
190 }),
191 };
192
193 proto::StatusEntry {
194 repo_path: self.repo_path.as_ref().to_proto(),
195 simple_status,
196 status: Some(status_to_proto(self.status)),
197 }
198 }
199}
200
201impl TryFrom<proto::StatusEntry> for StatusEntry {
202 type Error = anyhow::Error;
203
204 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
205 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
206 let status = status_from_proto(value.simple_status, value.status)?;
207 Ok(Self { repo_path, status })
208 }
209}
210
211impl sum_tree::Item for StatusEntry {
212 type Summary = PathSummary<GitSummary>;
213
214 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
215 PathSummary {
216 max_path: self.repo_path.0.clone(),
217 item_summary: self.status.summary(),
218 }
219 }
220}
221
222impl sum_tree::KeyedItem for StatusEntry {
223 type Key = PathKey;
224
225 fn key(&self) -> Self::Key {
226 PathKey(self.repo_path.0.clone())
227 }
228}
229
230#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
231pub struct RepositoryId(pub u64);
232
233#[derive(Clone, Debug, Default, PartialEq, Eq)]
234pub struct MergeDetails {
235 pub conflicted_paths: TreeSet<RepoPath>,
236 pub message: Option<SharedString>,
237 pub heads: Vec<Option<SharedString>>,
238}
239
240#[derive(Clone, Debug, PartialEq, Eq)]
241pub struct RepositorySnapshot {
242 pub id: RepositoryId,
243 pub statuses_by_path: SumTree<StatusEntry>,
244 pub work_directory_abs_path: Arc<Path>,
245 pub branch: Option<Branch>,
246 pub head_commit: Option<CommitDetails>,
247 pub scan_id: u64,
248 pub merge: MergeDetails,
249 pub remote_origin_url: Option<String>,
250 pub remote_upstream_url: Option<String>,
251}
252
253type JobId = u64;
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct JobInfo {
257 pub start: Instant,
258 pub message: SharedString,
259}
260
261pub struct Repository {
262 this: WeakEntity<Self>,
263 snapshot: RepositorySnapshot,
264 commit_message_buffer: Option<Entity<Buffer>>,
265 git_store: WeakEntity<GitStore>,
266 // For a local repository, holds paths that have had worktree events since the last status scan completed,
267 // and that should be examined during the next status scan.
268 paths_needing_status_update: BTreeSet<RepoPath>,
269 job_sender: mpsc::UnboundedSender<GitJob>,
270 active_jobs: HashMap<JobId, JobInfo>,
271 job_id: JobId,
272 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
273 latest_askpass_id: u64,
274}
275
276impl std::ops::Deref for Repository {
277 type Target = RepositorySnapshot;
278
279 fn deref(&self) -> &Self::Target {
280 &self.snapshot
281 }
282}
283
284#[derive(Clone)]
285pub enum RepositoryState {
286 Local {
287 backend: Arc<dyn GitRepository>,
288 environment: Arc<HashMap<String, String>>,
289 },
290 Remote {
291 project_id: ProjectId,
292 client: AnyProtoClient,
293 },
294}
295
296#[derive(Clone, Debug)]
297pub enum RepositoryEvent {
298 Updated { full_scan: bool, new_instance: bool },
299 MergeHeadsChanged,
300}
301
302#[derive(Clone, Debug)]
303pub struct JobsUpdated;
304
305#[derive(Debug)]
306pub enum GitStoreEvent {
307 ActiveRepositoryChanged(Option<RepositoryId>),
308 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
309 RepositoryAdded(RepositoryId),
310 RepositoryRemoved(RepositoryId),
311 IndexWriteError(anyhow::Error),
312 JobsUpdated,
313 ConflictsUpdated,
314}
315
316impl EventEmitter<RepositoryEvent> for Repository {}
317impl EventEmitter<JobsUpdated> for Repository {}
318impl EventEmitter<GitStoreEvent> for GitStore {}
319
320pub struct GitJob {
321 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
322 key: Option<GitJobKey>,
323}
324
325#[derive(PartialEq, Eq)]
326enum GitJobKey {
327 WriteIndex(RepoPath),
328 ReloadBufferDiffBases,
329 RefreshStatuses,
330 ReloadGitState,
331}
332
333impl GitStore {
334 pub fn local(
335 worktree_store: &Entity<WorktreeStore>,
336 buffer_store: Entity<BufferStore>,
337 environment: Entity<ProjectEnvironment>,
338 fs: Arc<dyn Fs>,
339 cx: &mut Context<Self>,
340 ) -> Self {
341 Self::new(
342 worktree_store.clone(),
343 buffer_store,
344 GitStoreState::Local {
345 next_repository_id: Arc::new(AtomicU64::new(1)),
346 downstream: None,
347 project_environment: environment,
348 fs,
349 },
350 cx,
351 )
352 }
353
354 pub fn remote(
355 worktree_store: &Entity<WorktreeStore>,
356 buffer_store: Entity<BufferStore>,
357 upstream_client: AnyProtoClient,
358 project_id: ProjectId,
359 cx: &mut Context<Self>,
360 ) -> Self {
361 Self::new(
362 worktree_store.clone(),
363 buffer_store,
364 GitStoreState::Remote {
365 upstream_client,
366 upstream_project_id: project_id,
367 },
368 cx,
369 )
370 }
371
372 pub fn ssh(
373 worktree_store: &Entity<WorktreeStore>,
374 buffer_store: Entity<BufferStore>,
375 upstream_client: AnyProtoClient,
376 cx: &mut Context<Self>,
377 ) -> Self {
378 Self::new(
379 worktree_store.clone(),
380 buffer_store,
381 GitStoreState::Ssh {
382 upstream_client,
383 upstream_project_id: ProjectId(SSH_PROJECT_ID),
384 downstream: None,
385 },
386 cx,
387 )
388 }
389
390 fn new(
391 worktree_store: Entity<WorktreeStore>,
392 buffer_store: Entity<BufferStore>,
393 state: GitStoreState,
394 cx: &mut Context<Self>,
395 ) -> Self {
396 let _subscriptions = vec![
397 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
398 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
399 ];
400
401 GitStore {
402 state,
403 buffer_store,
404 worktree_store,
405 repositories: HashMap::default(),
406 active_repo_id: None,
407 _subscriptions,
408 loading_diffs: HashMap::default(),
409 shared_diffs: HashMap::default(),
410 diffs: HashMap::default(),
411 }
412 }
413
414 pub fn init(client: &AnyProtoClient) {
415 client.add_entity_request_handler(Self::handle_get_remotes);
416 client.add_entity_request_handler(Self::handle_get_branches);
417 client.add_entity_request_handler(Self::handle_get_default_branch);
418 client.add_entity_request_handler(Self::handle_change_branch);
419 client.add_entity_request_handler(Self::handle_create_branch);
420 client.add_entity_request_handler(Self::handle_rename_branch);
421 client.add_entity_request_handler(Self::handle_git_init);
422 client.add_entity_request_handler(Self::handle_push);
423 client.add_entity_request_handler(Self::handle_pull);
424 client.add_entity_request_handler(Self::handle_fetch);
425 client.add_entity_request_handler(Self::handle_stage);
426 client.add_entity_request_handler(Self::handle_unstage);
427 client.add_entity_request_handler(Self::handle_stash);
428 client.add_entity_request_handler(Self::handle_stash_pop);
429 client.add_entity_request_handler(Self::handle_commit);
430 client.add_entity_request_handler(Self::handle_reset);
431 client.add_entity_request_handler(Self::handle_show);
432 client.add_entity_request_handler(Self::handle_load_commit_diff);
433 client.add_entity_request_handler(Self::handle_checkout_files);
434 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
435 client.add_entity_request_handler(Self::handle_set_index_text);
436 client.add_entity_request_handler(Self::handle_askpass);
437 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
438 client.add_entity_request_handler(Self::handle_git_diff);
439 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
440 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
441 client.add_entity_message_handler(Self::handle_update_diff_bases);
442 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
443 client.add_entity_request_handler(Self::handle_blame_buffer);
444 client.add_entity_message_handler(Self::handle_update_repository);
445 client.add_entity_message_handler(Self::handle_remove_repository);
446 client.add_entity_request_handler(Self::handle_git_clone);
447 }
448
449 pub fn is_local(&self) -> bool {
450 matches!(self.state, GitStoreState::Local { .. })
451 }
452
453 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
454 match &mut self.state {
455 GitStoreState::Ssh {
456 downstream: downstream_client,
457 ..
458 } => {
459 for repo in self.repositories.values() {
460 let update = repo.read(cx).snapshot.initial_update(project_id);
461 for update in split_repository_update(update) {
462 client.send(update).log_err();
463 }
464 }
465 *downstream_client = Some((client, ProjectId(project_id)));
466 }
467 GitStoreState::Local {
468 downstream: downstream_client,
469 ..
470 } => {
471 let mut snapshots = HashMap::default();
472 let (updates_tx, mut updates_rx) = mpsc::unbounded();
473 for repo in self.repositories.values() {
474 updates_tx
475 .unbounded_send(DownstreamUpdate::UpdateRepository(
476 repo.read(cx).snapshot.clone(),
477 ))
478 .ok();
479 }
480 *downstream_client = Some(LocalDownstreamState {
481 client: client.clone(),
482 project_id: ProjectId(project_id),
483 updates_tx,
484 _task: cx.spawn(async move |this, cx| {
485 cx.background_spawn(async move {
486 while let Some(update) = updates_rx.next().await {
487 match update {
488 DownstreamUpdate::UpdateRepository(snapshot) => {
489 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
490 {
491 let update =
492 snapshot.build_update(old_snapshot, project_id);
493 *old_snapshot = snapshot;
494 for update in split_repository_update(update) {
495 client.send(update)?;
496 }
497 } else {
498 let update = snapshot.initial_update(project_id);
499 for update in split_repository_update(update) {
500 client.send(update)?;
501 }
502 snapshots.insert(snapshot.id, snapshot);
503 }
504 }
505 DownstreamUpdate::RemoveRepository(id) => {
506 client.send(proto::RemoveRepository {
507 project_id,
508 id: id.to_proto(),
509 })?;
510 }
511 }
512 }
513 anyhow::Ok(())
514 })
515 .await
516 .ok();
517 this.update(cx, |this, _| {
518 if let GitStoreState::Local {
519 downstream: downstream_client,
520 ..
521 } = &mut this.state
522 {
523 downstream_client.take();
524 } else {
525 unreachable!("unshared called on remote store");
526 }
527 })
528 }),
529 });
530 }
531 GitStoreState::Remote { .. } => {
532 debug_panic!("shared called on remote store");
533 }
534 }
535 }
536
537 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
538 match &mut self.state {
539 GitStoreState::Local {
540 downstream: downstream_client,
541 ..
542 } => {
543 downstream_client.take();
544 }
545 GitStoreState::Ssh {
546 downstream: downstream_client,
547 ..
548 } => {
549 downstream_client.take();
550 }
551 GitStoreState::Remote { .. } => {
552 debug_panic!("unshared called on remote store");
553 }
554 }
555 self.shared_diffs.clear();
556 }
557
558 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
559 self.shared_diffs.remove(peer_id);
560 }
561
562 pub fn active_repository(&self) -> Option<Entity<Repository>> {
563 self.active_repo_id
564 .as_ref()
565 .map(|id| self.repositories[&id].clone())
566 }
567
568 pub fn open_unstaged_diff(
569 &mut self,
570 buffer: Entity<Buffer>,
571 cx: &mut Context<Self>,
572 ) -> Task<Result<Entity<BufferDiff>>> {
573 let buffer_id = buffer.read(cx).remote_id();
574 if let Some(diff_state) = self.diffs.get(&buffer_id) {
575 if let Some(unstaged_diff) = diff_state
576 .read(cx)
577 .unstaged_diff
578 .as_ref()
579 .and_then(|weak| weak.upgrade())
580 {
581 if let Some(task) =
582 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
583 {
584 return cx.background_executor().spawn(async move {
585 task.await;
586 Ok(unstaged_diff)
587 });
588 }
589 return Task::ready(Ok(unstaged_diff));
590 }
591 }
592
593 let Some((repo, repo_path)) =
594 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
595 else {
596 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
597 };
598
599 let task = self
600 .loading_diffs
601 .entry((buffer_id, DiffKind::Unstaged))
602 .or_insert_with(|| {
603 let staged_text = repo.update(cx, |repo, cx| {
604 repo.load_staged_text(buffer_id, repo_path, cx)
605 });
606 cx.spawn(async move |this, cx| {
607 Self::open_diff_internal(
608 this,
609 DiffKind::Unstaged,
610 staged_text.await.map(DiffBasesChange::SetIndex),
611 buffer,
612 cx,
613 )
614 .await
615 .map_err(Arc::new)
616 })
617 .shared()
618 })
619 .clone();
620
621 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
622 }
623
624 pub fn open_uncommitted_diff(
625 &mut self,
626 buffer: Entity<Buffer>,
627 cx: &mut Context<Self>,
628 ) -> Task<Result<Entity<BufferDiff>>> {
629 let buffer_id = buffer.read(cx).remote_id();
630
631 if let Some(diff_state) = self.diffs.get(&buffer_id) {
632 if let Some(uncommitted_diff) = diff_state
633 .read(cx)
634 .uncommitted_diff
635 .as_ref()
636 .and_then(|weak| weak.upgrade())
637 {
638 if let Some(task) =
639 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
640 {
641 return cx.background_executor().spawn(async move {
642 task.await;
643 Ok(uncommitted_diff)
644 });
645 }
646 return Task::ready(Ok(uncommitted_diff));
647 }
648 }
649
650 let Some((repo, repo_path)) =
651 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
652 else {
653 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
654 };
655
656 let task = self
657 .loading_diffs
658 .entry((buffer_id, DiffKind::Uncommitted))
659 .or_insert_with(|| {
660 let changes = repo.update(cx, |repo, cx| {
661 repo.load_committed_text(buffer_id, repo_path, cx)
662 });
663
664 cx.spawn(async move |this, cx| {
665 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
666 .await
667 .map_err(Arc::new)
668 })
669 .shared()
670 })
671 .clone();
672
673 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
674 }
675
676 async fn open_diff_internal(
677 this: WeakEntity<Self>,
678 kind: DiffKind,
679 texts: Result<DiffBasesChange>,
680 buffer_entity: Entity<Buffer>,
681 cx: &mut AsyncApp,
682 ) -> Result<Entity<BufferDiff>> {
683 let diff_bases_change = match texts {
684 Err(e) => {
685 this.update(cx, |this, cx| {
686 let buffer = buffer_entity.read(cx);
687 let buffer_id = buffer.remote_id();
688 this.loading_diffs.remove(&(buffer_id, kind));
689 })?;
690 return Err(e);
691 }
692 Ok(change) => change,
693 };
694
695 this.update(cx, |this, cx| {
696 let buffer = buffer_entity.read(cx);
697 let buffer_id = buffer.remote_id();
698 let language = buffer.language().cloned();
699 let language_registry = buffer.language_registry();
700 let text_snapshot = buffer.text_snapshot();
701 this.loading_diffs.remove(&(buffer_id, kind));
702
703 let git_store = cx.weak_entity();
704 let diff_state = this
705 .diffs
706 .entry(buffer_id)
707 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
708
709 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
710
711 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
712 diff_state.update(cx, |diff_state, cx| {
713 diff_state.language = language;
714 diff_state.language_registry = language_registry;
715
716 match kind {
717 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
718 DiffKind::Uncommitted => {
719 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
720 diff
721 } else {
722 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
723 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
724 unstaged_diff
725 };
726
727 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
728 diff_state.uncommitted_diff = Some(diff.downgrade())
729 }
730 }
731
732 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
733 let rx = diff_state.wait_for_recalculation();
734
735 anyhow::Ok(async move {
736 if let Some(rx) = rx {
737 rx.await;
738 }
739 Ok(diff)
740 })
741 })
742 })??
743 .await
744 }
745
746 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
747 let diff_state = self.diffs.get(&buffer_id)?;
748 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
749 }
750
751 pub fn get_uncommitted_diff(
752 &self,
753 buffer_id: BufferId,
754 cx: &App,
755 ) -> Option<Entity<BufferDiff>> {
756 let diff_state = self.diffs.get(&buffer_id)?;
757 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
758 }
759
760 pub fn open_conflict_set(
761 &mut self,
762 buffer: Entity<Buffer>,
763 cx: &mut Context<Self>,
764 ) -> Entity<ConflictSet> {
765 log::debug!("open conflict set");
766 let buffer_id = buffer.read(cx).remote_id();
767
768 if let Some(git_state) = self.diffs.get(&buffer_id) {
769 if let Some(conflict_set) = git_state
770 .read(cx)
771 .conflict_set
772 .as_ref()
773 .and_then(|weak| weak.upgrade())
774 {
775 let conflict_set = conflict_set.clone();
776 let buffer_snapshot = buffer.read(cx).text_snapshot();
777
778 git_state.update(cx, |state, cx| {
779 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
780 });
781
782 return conflict_set;
783 }
784 }
785
786 let is_unmerged = self
787 .repository_and_path_for_buffer_id(buffer_id, cx)
788 .map_or(false, |(repo, path)| {
789 repo.read(cx).snapshot.has_conflict(&path)
790 });
791 let git_store = cx.weak_entity();
792 let buffer_git_state = self
793 .diffs
794 .entry(buffer_id)
795 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
796 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
797
798 self._subscriptions
799 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
800 cx.emit(GitStoreEvent::ConflictsUpdated);
801 }));
802
803 buffer_git_state.update(cx, |state, cx| {
804 state.conflict_set = Some(conflict_set.downgrade());
805 let buffer_snapshot = buffer.read(cx).text_snapshot();
806 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
807 });
808
809 conflict_set
810 }
811
812 pub fn project_path_git_status(
813 &self,
814 project_path: &ProjectPath,
815 cx: &App,
816 ) -> Option<FileStatus> {
817 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
818 Some(repo.read(cx).status_for_path(&repo_path)?.status)
819 }
820
821 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
822 let mut work_directory_abs_paths = Vec::new();
823 let mut checkpoints = Vec::new();
824 for repository in self.repositories.values() {
825 repository.update(cx, |repository, _| {
826 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
827 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
828 });
829 }
830
831 cx.background_executor().spawn(async move {
832 let checkpoints = future::try_join_all(checkpoints).await?;
833 Ok(GitStoreCheckpoint {
834 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
835 .into_iter()
836 .zip(checkpoints)
837 .collect(),
838 })
839 })
840 }
841
842 pub fn restore_checkpoint(
843 &self,
844 checkpoint: GitStoreCheckpoint,
845 cx: &mut App,
846 ) -> Task<Result<()>> {
847 let repositories_by_work_dir_abs_path = self
848 .repositories
849 .values()
850 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
851 .collect::<HashMap<_, _>>();
852
853 let mut tasks = Vec::new();
854 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
855 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
856 let restore = repository.update(cx, |repository, _| {
857 repository.restore_checkpoint(checkpoint)
858 });
859 tasks.push(async move { restore.await? });
860 }
861 }
862 cx.background_spawn(async move {
863 future::try_join_all(tasks).await?;
864 Ok(())
865 })
866 }
867
868 /// Compares two checkpoints, returning true if they are equal.
869 pub fn compare_checkpoints(
870 &self,
871 left: GitStoreCheckpoint,
872 mut right: GitStoreCheckpoint,
873 cx: &mut App,
874 ) -> Task<Result<bool>> {
875 let repositories_by_work_dir_abs_path = self
876 .repositories
877 .values()
878 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
879 .collect::<HashMap<_, _>>();
880
881 let mut tasks = Vec::new();
882 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
883 if let Some(right_checkpoint) = right
884 .checkpoints_by_work_dir_abs_path
885 .remove(&work_dir_abs_path)
886 {
887 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
888 {
889 let compare = repository.update(cx, |repository, _| {
890 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
891 });
892
893 tasks.push(async move { compare.await? });
894 }
895 } else {
896 return Task::ready(Ok(false));
897 }
898 }
899 cx.background_spawn(async move {
900 Ok(future::try_join_all(tasks)
901 .await?
902 .into_iter()
903 .all(|result| result))
904 })
905 }
906
907 /// Blames a buffer.
908 pub fn blame_buffer(
909 &self,
910 buffer: &Entity<Buffer>,
911 version: Option<clock::Global>,
912 cx: &mut App,
913 ) -> Task<Result<Option<Blame>>> {
914 let buffer = buffer.read(cx);
915 let Some((repo, repo_path)) =
916 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
917 else {
918 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
919 };
920 let content = match &version {
921 Some(version) => buffer.rope_for_version(version).clone(),
922 None => buffer.as_rope().clone(),
923 };
924 let version = version.unwrap_or(buffer.version());
925 let buffer_id = buffer.remote_id();
926
927 let rx = repo.update(cx, |repo, _| {
928 repo.send_job(None, move |state, _| async move {
929 match state {
930 RepositoryState::Local { backend, .. } => backend
931 .blame(repo_path.clone(), content)
932 .await
933 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
934 .map(Some),
935 RepositoryState::Remote { project_id, client } => {
936 let response = client
937 .request(proto::BlameBuffer {
938 project_id: project_id.to_proto(),
939 buffer_id: buffer_id.into(),
940 version: serialize_version(&version),
941 })
942 .await?;
943 Ok(deserialize_blame_buffer_response(response))
944 }
945 }
946 })
947 });
948
949 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
950 }
951
952 pub fn get_permalink_to_line(
953 &self,
954 buffer: &Entity<Buffer>,
955 selection: Range<u32>,
956 cx: &mut App,
957 ) -> Task<Result<url::Url>> {
958 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
959 return Task::ready(Err(anyhow!("buffer has no file")));
960 };
961
962 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
963 &(file.worktree.read(cx).id(), file.path.clone()).into(),
964 cx,
965 ) else {
966 // If we're not in a Git repo, check whether this is a Rust source
967 // file in the Cargo registry (presumably opened with go-to-definition
968 // from a normal Rust file). If so, we can put together a permalink
969 // using crate metadata.
970 if buffer
971 .read(cx)
972 .language()
973 .is_none_or(|lang| lang.name() != "Rust".into())
974 {
975 return Task::ready(Err(anyhow!("no permalink available")));
976 }
977 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
978 return Task::ready(Err(anyhow!("no permalink available")));
979 };
980 return cx.spawn(async move |cx| {
981 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
982 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
983 .context("no permalink available")
984 });
985
986 // TODO remote case
987 };
988
989 let buffer_id = buffer.read(cx).remote_id();
990 let branch = repo.read(cx).branch.clone();
991 let remote = branch
992 .as_ref()
993 .and_then(|b| b.upstream.as_ref())
994 .and_then(|b| b.remote_name())
995 .unwrap_or("origin")
996 .to_string();
997
998 let rx = repo.update(cx, |repo, _| {
999 repo.send_job(None, move |state, cx| async move {
1000 match state {
1001 RepositoryState::Local { backend, .. } => {
1002 let origin_url = backend
1003 .remote_url(&remote)
1004 .with_context(|| format!("remote \"{remote}\" not found"))?;
1005
1006 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1007
1008 let provider_registry =
1009 cx.update(GitHostingProviderRegistry::default_global)?;
1010
1011 let (provider, remote) =
1012 parse_git_remote_url(provider_registry, &origin_url)
1013 .context("parsing Git remote URL")?;
1014
1015 let path = repo_path.to_str().with_context(|| {
1016 format!("converting repo path {repo_path:?} to string")
1017 })?;
1018
1019 Ok(provider.build_permalink(
1020 remote,
1021 BuildPermalinkParams {
1022 sha: &sha,
1023 path,
1024 selection: Some(selection),
1025 },
1026 ))
1027 }
1028 RepositoryState::Remote { project_id, client } => {
1029 let response = client
1030 .request(proto::GetPermalinkToLine {
1031 project_id: project_id.to_proto(),
1032 buffer_id: buffer_id.into(),
1033 selection: Some(proto::Range {
1034 start: selection.start as u64,
1035 end: selection.end as u64,
1036 }),
1037 })
1038 .await?;
1039
1040 url::Url::parse(&response.permalink).context("failed to parse permalink")
1041 }
1042 }
1043 })
1044 });
1045 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1046 }
1047
1048 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1049 match &self.state {
1050 GitStoreState::Local {
1051 downstream: downstream_client,
1052 ..
1053 } => downstream_client
1054 .as_ref()
1055 .map(|state| (state.client.clone(), state.project_id)),
1056 GitStoreState::Ssh {
1057 downstream: downstream_client,
1058 ..
1059 } => downstream_client.clone(),
1060 GitStoreState::Remote { .. } => None,
1061 }
1062 }
1063
1064 fn upstream_client(&self) -> Option<AnyProtoClient> {
1065 match &self.state {
1066 GitStoreState::Local { .. } => None,
1067 GitStoreState::Ssh {
1068 upstream_client, ..
1069 }
1070 | GitStoreState::Remote {
1071 upstream_client, ..
1072 } => Some(upstream_client.clone()),
1073 }
1074 }
1075
1076 fn on_worktree_store_event(
1077 &mut self,
1078 worktree_store: Entity<WorktreeStore>,
1079 event: &WorktreeStoreEvent,
1080 cx: &mut Context<Self>,
1081 ) {
1082 let GitStoreState::Local {
1083 project_environment,
1084 downstream,
1085 next_repository_id,
1086 fs,
1087 } = &self.state
1088 else {
1089 return;
1090 };
1091
1092 match event {
1093 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1094 if let Some(worktree) = self
1095 .worktree_store
1096 .read(cx)
1097 .worktree_for_id(*worktree_id, cx)
1098 {
1099 let paths_by_git_repo =
1100 self.process_updated_entries(&worktree, updated_entries, cx);
1101 let downstream = downstream
1102 .as_ref()
1103 .map(|downstream| downstream.updates_tx.clone());
1104 cx.spawn(async move |_, cx| {
1105 let paths_by_git_repo = paths_by_git_repo.await;
1106 for (repo, paths) in paths_by_git_repo {
1107 repo.update(cx, |repo, cx| {
1108 repo.paths_changed(paths, downstream.clone(), cx);
1109 })
1110 .ok();
1111 }
1112 })
1113 .detach();
1114 }
1115 }
1116 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1117 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1118 else {
1119 return;
1120 };
1121 if !worktree.read(cx).is_visible() {
1122 log::debug!(
1123 "not adding repositories for local worktree {:?} because it's not visible",
1124 worktree.read(cx).abs_path()
1125 );
1126 return;
1127 }
1128 self.update_repositories_from_worktree(
1129 project_environment.clone(),
1130 next_repository_id.clone(),
1131 downstream
1132 .as_ref()
1133 .map(|downstream| downstream.updates_tx.clone()),
1134 changed_repos.clone(),
1135 fs.clone(),
1136 cx,
1137 );
1138 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1139 }
1140 _ => {}
1141 }
1142 }
1143
1144 fn on_repository_event(
1145 &mut self,
1146 repo: Entity<Repository>,
1147 event: &RepositoryEvent,
1148 cx: &mut Context<Self>,
1149 ) {
1150 let id = repo.read(cx).id;
1151 let repo_snapshot = repo.read(cx).snapshot.clone();
1152 for (buffer_id, diff) in self.diffs.iter() {
1153 if let Some((buffer_repo, repo_path)) =
1154 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1155 {
1156 if buffer_repo == repo {
1157 diff.update(cx, |diff, cx| {
1158 if let Some(conflict_set) = &diff.conflict_set {
1159 let conflict_status_changed =
1160 conflict_set.update(cx, |conflict_set, cx| {
1161 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1162 conflict_set.set_has_conflict(has_conflict, cx)
1163 })?;
1164 if conflict_status_changed {
1165 let buffer_store = self.buffer_store.read(cx);
1166 if let Some(buffer) = buffer_store.get(*buffer_id) {
1167 let _ = diff.reparse_conflict_markers(
1168 buffer.read(cx).text_snapshot(),
1169 cx,
1170 );
1171 }
1172 }
1173 }
1174 anyhow::Ok(())
1175 })
1176 .ok();
1177 }
1178 }
1179 }
1180 cx.emit(GitStoreEvent::RepositoryUpdated(
1181 id,
1182 event.clone(),
1183 self.active_repo_id == Some(id),
1184 ))
1185 }
1186
1187 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1188 cx.emit(GitStoreEvent::JobsUpdated)
1189 }
1190
1191 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1192 fn update_repositories_from_worktree(
1193 &mut self,
1194 project_environment: Entity<ProjectEnvironment>,
1195 next_repository_id: Arc<AtomicU64>,
1196 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1197 updated_git_repositories: UpdatedGitRepositoriesSet,
1198 fs: Arc<dyn Fs>,
1199 cx: &mut Context<Self>,
1200 ) {
1201 let mut removed_ids = Vec::new();
1202 for update in updated_git_repositories.iter() {
1203 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1204 let existing_work_directory_abs_path =
1205 repo.read(cx).work_directory_abs_path.clone();
1206 Some(&existing_work_directory_abs_path)
1207 == update.old_work_directory_abs_path.as_ref()
1208 || Some(&existing_work_directory_abs_path)
1209 == update.new_work_directory_abs_path.as_ref()
1210 }) {
1211 if let Some(new_work_directory_abs_path) =
1212 update.new_work_directory_abs_path.clone()
1213 {
1214 existing.update(cx, |existing, cx| {
1215 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1216 existing.schedule_scan(updates_tx.clone(), cx);
1217 });
1218 } else {
1219 removed_ids.push(*id);
1220 }
1221 } else if let UpdatedGitRepository {
1222 new_work_directory_abs_path: Some(work_directory_abs_path),
1223 dot_git_abs_path: Some(dot_git_abs_path),
1224 repository_dir_abs_path: Some(repository_dir_abs_path),
1225 common_dir_abs_path: Some(common_dir_abs_path),
1226 ..
1227 } = update
1228 {
1229 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1230 let git_store = cx.weak_entity();
1231 let repo = cx.new(|cx| {
1232 let mut repo = Repository::local(
1233 id,
1234 work_directory_abs_path.clone(),
1235 dot_git_abs_path.clone(),
1236 repository_dir_abs_path.clone(),
1237 common_dir_abs_path.clone(),
1238 project_environment.downgrade(),
1239 fs.clone(),
1240 git_store,
1241 cx,
1242 );
1243 repo.schedule_scan(updates_tx.clone(), cx);
1244 repo
1245 });
1246 self._subscriptions
1247 .push(cx.subscribe(&repo, Self::on_repository_event));
1248 self._subscriptions
1249 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1250 self.repositories.insert(id, repo);
1251 cx.emit(GitStoreEvent::RepositoryAdded(id));
1252 self.active_repo_id.get_or_insert_with(|| {
1253 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1254 id
1255 });
1256 }
1257 }
1258
1259 for id in removed_ids {
1260 if self.active_repo_id == Some(id) {
1261 self.active_repo_id = None;
1262 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1263 }
1264 self.repositories.remove(&id);
1265 if let Some(updates_tx) = updates_tx.as_ref() {
1266 updates_tx
1267 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1268 .ok();
1269 }
1270 }
1271 }
1272
1273 fn on_buffer_store_event(
1274 &mut self,
1275 _: Entity<BufferStore>,
1276 event: &BufferStoreEvent,
1277 cx: &mut Context<Self>,
1278 ) {
1279 match event {
1280 BufferStoreEvent::BufferAdded(buffer) => {
1281 cx.subscribe(&buffer, |this, buffer, event, cx| {
1282 if let BufferEvent::LanguageChanged = event {
1283 let buffer_id = buffer.read(cx).remote_id();
1284 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1285 diff_state.update(cx, |diff_state, cx| {
1286 diff_state.buffer_language_changed(buffer, cx);
1287 });
1288 }
1289 }
1290 })
1291 .detach();
1292 }
1293 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1294 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1295 diffs.remove(buffer_id);
1296 }
1297 }
1298 BufferStoreEvent::BufferDropped(buffer_id) => {
1299 self.diffs.remove(&buffer_id);
1300 for diffs in self.shared_diffs.values_mut() {
1301 diffs.remove(buffer_id);
1302 }
1303 }
1304
1305 _ => {}
1306 }
1307 }
1308
1309 pub fn recalculate_buffer_diffs(
1310 &mut self,
1311 buffers: Vec<Entity<Buffer>>,
1312 cx: &mut Context<Self>,
1313 ) -> impl Future<Output = ()> + use<> {
1314 let mut futures = Vec::new();
1315 for buffer in buffers {
1316 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1317 let buffer = buffer.read(cx).text_snapshot();
1318 diff_state.update(cx, |diff_state, cx| {
1319 diff_state.recalculate_diffs(buffer.clone(), cx);
1320 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1321 });
1322 futures.push(diff_state.update(cx, |diff_state, cx| {
1323 diff_state
1324 .reparse_conflict_markers(buffer, cx)
1325 .map(|_| {})
1326 .boxed()
1327 }));
1328 }
1329 }
1330 async move {
1331 futures::future::join_all(futures).await;
1332 }
1333 }
1334
1335 fn on_buffer_diff_event(
1336 &mut self,
1337 diff: Entity<buffer_diff::BufferDiff>,
1338 event: &BufferDiffEvent,
1339 cx: &mut Context<Self>,
1340 ) {
1341 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1342 let buffer_id = diff.read(cx).buffer_id;
1343 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1344 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1345 diff_state.hunk_staging_operation_count += 1;
1346 diff_state.hunk_staging_operation_count
1347 });
1348 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1349 let recv = repo.update(cx, |repo, cx| {
1350 log::debug!("hunks changed for {}", path.display());
1351 repo.spawn_set_index_text_job(
1352 path,
1353 new_index_text.as_ref().map(|rope| rope.to_string()),
1354 Some(hunk_staging_operation_count),
1355 cx,
1356 )
1357 });
1358 let diff = diff.downgrade();
1359 cx.spawn(async move |this, cx| {
1360 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1361 diff.update(cx, |diff, cx| {
1362 diff.clear_pending_hunks(cx);
1363 })
1364 .ok();
1365 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1366 .ok();
1367 }
1368 })
1369 .detach();
1370 }
1371 }
1372 }
1373 }
1374
1375 fn local_worktree_git_repos_changed(
1376 &mut self,
1377 worktree: Entity<Worktree>,
1378 changed_repos: &UpdatedGitRepositoriesSet,
1379 cx: &mut Context<Self>,
1380 ) {
1381 log::debug!("local worktree repos changed");
1382 debug_assert!(worktree.read(cx).is_local());
1383
1384 for repository in self.repositories.values() {
1385 repository.update(cx, |repository, cx| {
1386 let repo_abs_path = &repository.work_directory_abs_path;
1387 if changed_repos.iter().any(|update| {
1388 update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1389 || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1390 }) {
1391 repository.reload_buffer_diff_bases(cx);
1392 }
1393 });
1394 }
1395 }
1396
1397 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1398 &self.repositories
1399 }
1400
1401 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1402 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1403 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1404 Some(status.status)
1405 }
1406
1407 pub fn repository_and_path_for_buffer_id(
1408 &self,
1409 buffer_id: BufferId,
1410 cx: &App,
1411 ) -> Option<(Entity<Repository>, RepoPath)> {
1412 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1413 let project_path = buffer.read(cx).project_path(cx)?;
1414 self.repository_and_path_for_project_path(&project_path, cx)
1415 }
1416
1417 pub fn repository_and_path_for_project_path(
1418 &self,
1419 path: &ProjectPath,
1420 cx: &App,
1421 ) -> Option<(Entity<Repository>, RepoPath)> {
1422 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1423 self.repositories
1424 .values()
1425 .filter_map(|repo| {
1426 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1427 Some((repo.clone(), repo_path))
1428 })
1429 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1430 }
1431
1432 pub fn git_init(
1433 &self,
1434 path: Arc<Path>,
1435 fallback_branch_name: String,
1436 cx: &App,
1437 ) -> Task<Result<()>> {
1438 match &self.state {
1439 GitStoreState::Local { fs, .. } => {
1440 let fs = fs.clone();
1441 cx.background_executor()
1442 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1443 }
1444 GitStoreState::Ssh {
1445 upstream_client,
1446 upstream_project_id: project_id,
1447 ..
1448 }
1449 | GitStoreState::Remote {
1450 upstream_client,
1451 upstream_project_id: project_id,
1452 ..
1453 } => {
1454 let client = upstream_client.clone();
1455 let project_id = *project_id;
1456 cx.background_executor().spawn(async move {
1457 client
1458 .request(proto::GitInit {
1459 project_id: project_id.0,
1460 abs_path: path.to_string_lossy().to_string(),
1461 fallback_branch_name,
1462 })
1463 .await?;
1464 Ok(())
1465 })
1466 }
1467 }
1468 }
1469
1470 pub fn git_clone(
1471 &self,
1472 repo: String,
1473 path: impl Into<Arc<std::path::Path>>,
1474 cx: &App,
1475 ) -> Task<Result<()>> {
1476 let path = path.into();
1477 match &self.state {
1478 GitStoreState::Local { fs, .. } => {
1479 let fs = fs.clone();
1480 cx.background_executor()
1481 .spawn(async move { fs.git_clone(&repo, &path).await })
1482 }
1483 GitStoreState::Ssh {
1484 upstream_client,
1485 upstream_project_id,
1486 ..
1487 } => {
1488 let request = upstream_client.request(proto::GitClone {
1489 project_id: upstream_project_id.0,
1490 abs_path: path.to_string_lossy().to_string(),
1491 remote_repo: repo,
1492 });
1493
1494 cx.background_spawn(async move {
1495 let result = request.await?;
1496
1497 match result.success {
1498 true => Ok(()),
1499 false => Err(anyhow!("Git Clone failed")),
1500 }
1501 })
1502 }
1503 GitStoreState::Remote { .. } => {
1504 Task::ready(Err(anyhow!("Git Clone isn't supported for remote users")))
1505 }
1506 }
1507 }
1508
1509 async fn handle_update_repository(
1510 this: Entity<Self>,
1511 envelope: TypedEnvelope<proto::UpdateRepository>,
1512 mut cx: AsyncApp,
1513 ) -> Result<()> {
1514 this.update(&mut cx, |this, cx| {
1515 let mut update = envelope.payload;
1516
1517 let id = RepositoryId::from_proto(update.id);
1518 let client = this
1519 .upstream_client()
1520 .context("no upstream client")?
1521 .clone();
1522
1523 let mut is_new = false;
1524 let repo = this.repositories.entry(id).or_insert_with(|| {
1525 is_new = true;
1526 let git_store = cx.weak_entity();
1527 cx.new(|cx| {
1528 Repository::remote(
1529 id,
1530 Path::new(&update.abs_path).into(),
1531 ProjectId(update.project_id),
1532 client,
1533 git_store,
1534 cx,
1535 )
1536 })
1537 });
1538 if is_new {
1539 this._subscriptions
1540 .push(cx.subscribe(&repo, Self::on_repository_event))
1541 }
1542
1543 repo.update(cx, {
1544 let update = update.clone();
1545 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1546 })?;
1547
1548 this.active_repo_id.get_or_insert_with(|| {
1549 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1550 id
1551 });
1552
1553 if let Some((client, project_id)) = this.downstream_client() {
1554 update.project_id = project_id.to_proto();
1555 client.send(update).log_err();
1556 }
1557 Ok(())
1558 })?
1559 }
1560
1561 async fn handle_remove_repository(
1562 this: Entity<Self>,
1563 envelope: TypedEnvelope<proto::RemoveRepository>,
1564 mut cx: AsyncApp,
1565 ) -> Result<()> {
1566 this.update(&mut cx, |this, cx| {
1567 let mut update = envelope.payload;
1568 let id = RepositoryId::from_proto(update.id);
1569 this.repositories.remove(&id);
1570 if let Some((client, project_id)) = this.downstream_client() {
1571 update.project_id = project_id.to_proto();
1572 client.send(update).log_err();
1573 }
1574 if this.active_repo_id == Some(id) {
1575 this.active_repo_id = None;
1576 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1577 }
1578 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1579 })
1580 }
1581
1582 async fn handle_git_init(
1583 this: Entity<Self>,
1584 envelope: TypedEnvelope<proto::GitInit>,
1585 cx: AsyncApp,
1586 ) -> Result<proto::Ack> {
1587 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1588 let name = envelope.payload.fallback_branch_name;
1589 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1590 .await?;
1591
1592 Ok(proto::Ack {})
1593 }
1594
1595 async fn handle_git_clone(
1596 this: Entity<Self>,
1597 envelope: TypedEnvelope<proto::GitClone>,
1598 cx: AsyncApp,
1599 ) -> Result<proto::GitCloneResponse> {
1600 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1601 let repo_name = envelope.payload.remote_repo;
1602 let result = cx
1603 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1604 .await;
1605
1606 Ok(proto::GitCloneResponse {
1607 success: result.is_ok(),
1608 })
1609 }
1610
1611 async fn handle_fetch(
1612 this: Entity<Self>,
1613 envelope: TypedEnvelope<proto::Fetch>,
1614 mut cx: AsyncApp,
1615 ) -> Result<proto::RemoteMessageResponse> {
1616 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1617 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1618 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1619 let askpass_id = envelope.payload.askpass_id;
1620
1621 let askpass = make_remote_delegate(
1622 this,
1623 envelope.payload.project_id,
1624 repository_id,
1625 askpass_id,
1626 &mut cx,
1627 );
1628
1629 let remote_output = repository_handle
1630 .update(&mut cx, |repository_handle, cx| {
1631 repository_handle.fetch(fetch_options, askpass, cx)
1632 })?
1633 .await??;
1634
1635 Ok(proto::RemoteMessageResponse {
1636 stdout: remote_output.stdout,
1637 stderr: remote_output.stderr,
1638 })
1639 }
1640
1641 async fn handle_push(
1642 this: Entity<Self>,
1643 envelope: TypedEnvelope<proto::Push>,
1644 mut cx: AsyncApp,
1645 ) -> Result<proto::RemoteMessageResponse> {
1646 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1647 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1648
1649 let askpass_id = envelope.payload.askpass_id;
1650 let askpass = make_remote_delegate(
1651 this,
1652 envelope.payload.project_id,
1653 repository_id,
1654 askpass_id,
1655 &mut cx,
1656 );
1657
1658 let options = envelope
1659 .payload
1660 .options
1661 .as_ref()
1662 .map(|_| match envelope.payload.options() {
1663 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1664 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1665 });
1666
1667 let branch_name = envelope.payload.branch_name.into();
1668 let remote_name = envelope.payload.remote_name.into();
1669
1670 let remote_output = repository_handle
1671 .update(&mut cx, |repository_handle, cx| {
1672 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1673 })?
1674 .await??;
1675 Ok(proto::RemoteMessageResponse {
1676 stdout: remote_output.stdout,
1677 stderr: remote_output.stderr,
1678 })
1679 }
1680
1681 async fn handle_pull(
1682 this: Entity<Self>,
1683 envelope: TypedEnvelope<proto::Pull>,
1684 mut cx: AsyncApp,
1685 ) -> Result<proto::RemoteMessageResponse> {
1686 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1687 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1688 let askpass_id = envelope.payload.askpass_id;
1689 let askpass = make_remote_delegate(
1690 this,
1691 envelope.payload.project_id,
1692 repository_id,
1693 askpass_id,
1694 &mut cx,
1695 );
1696
1697 let branch_name = envelope.payload.branch_name.into();
1698 let remote_name = envelope.payload.remote_name.into();
1699
1700 let remote_message = repository_handle
1701 .update(&mut cx, |repository_handle, cx| {
1702 repository_handle.pull(branch_name, remote_name, askpass, cx)
1703 })?
1704 .await??;
1705
1706 Ok(proto::RemoteMessageResponse {
1707 stdout: remote_message.stdout,
1708 stderr: remote_message.stderr,
1709 })
1710 }
1711
1712 async fn handle_stage(
1713 this: Entity<Self>,
1714 envelope: TypedEnvelope<proto::Stage>,
1715 mut cx: AsyncApp,
1716 ) -> Result<proto::Ack> {
1717 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1718 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1719
1720 let entries = envelope
1721 .payload
1722 .paths
1723 .into_iter()
1724 .map(PathBuf::from)
1725 .map(RepoPath::new)
1726 .collect();
1727
1728 repository_handle
1729 .update(&mut cx, |repository_handle, cx| {
1730 repository_handle.stage_entries(entries, cx)
1731 })?
1732 .await?;
1733 Ok(proto::Ack {})
1734 }
1735
1736 async fn handle_unstage(
1737 this: Entity<Self>,
1738 envelope: TypedEnvelope<proto::Unstage>,
1739 mut cx: AsyncApp,
1740 ) -> Result<proto::Ack> {
1741 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1742 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1743
1744 let entries = envelope
1745 .payload
1746 .paths
1747 .into_iter()
1748 .map(PathBuf::from)
1749 .map(RepoPath::new)
1750 .collect();
1751
1752 repository_handle
1753 .update(&mut cx, |repository_handle, cx| {
1754 repository_handle.unstage_entries(entries, cx)
1755 })?
1756 .await?;
1757
1758 Ok(proto::Ack {})
1759 }
1760
1761 async fn handle_stash(
1762 this: Entity<Self>,
1763 envelope: TypedEnvelope<proto::Stash>,
1764 mut cx: AsyncApp,
1765 ) -> Result<proto::Ack> {
1766 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1767 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1768
1769 let entries = envelope
1770 .payload
1771 .paths
1772 .into_iter()
1773 .map(PathBuf::from)
1774 .map(RepoPath::new)
1775 .collect();
1776
1777 repository_handle
1778 .update(&mut cx, |repository_handle, cx| {
1779 repository_handle.stash_entries(entries, cx)
1780 })?
1781 .await?;
1782
1783 Ok(proto::Ack {})
1784 }
1785
1786 async fn handle_stash_pop(
1787 this: Entity<Self>,
1788 envelope: TypedEnvelope<proto::StashPop>,
1789 mut cx: AsyncApp,
1790 ) -> Result<proto::Ack> {
1791 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1792 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1793
1794 repository_handle
1795 .update(&mut cx, |repository_handle, cx| {
1796 repository_handle.stash_pop(cx)
1797 })?
1798 .await?;
1799
1800 Ok(proto::Ack {})
1801 }
1802
1803 async fn handle_set_index_text(
1804 this: Entity<Self>,
1805 envelope: TypedEnvelope<proto::SetIndexText>,
1806 mut cx: AsyncApp,
1807 ) -> Result<proto::Ack> {
1808 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1809 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1810 let repo_path = RepoPath::from_str(&envelope.payload.path);
1811
1812 repository_handle
1813 .update(&mut cx, |repository_handle, cx| {
1814 repository_handle.spawn_set_index_text_job(
1815 repo_path,
1816 envelope.payload.text,
1817 None,
1818 cx,
1819 )
1820 })?
1821 .await??;
1822 Ok(proto::Ack {})
1823 }
1824
1825 async fn handle_commit(
1826 this: Entity<Self>,
1827 envelope: TypedEnvelope<proto::Commit>,
1828 mut cx: AsyncApp,
1829 ) -> Result<proto::Ack> {
1830 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1831 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1832
1833 let message = SharedString::from(envelope.payload.message);
1834 let name = envelope.payload.name.map(SharedString::from);
1835 let email = envelope.payload.email.map(SharedString::from);
1836 let options = envelope.payload.options.unwrap_or_default();
1837
1838 repository_handle
1839 .update(&mut cx, |repository_handle, cx| {
1840 repository_handle.commit(
1841 message,
1842 name.zip(email),
1843 CommitOptions {
1844 amend: options.amend,
1845 signoff: options.signoff,
1846 },
1847 cx,
1848 )
1849 })?
1850 .await??;
1851 Ok(proto::Ack {})
1852 }
1853
1854 async fn handle_get_remotes(
1855 this: Entity<Self>,
1856 envelope: TypedEnvelope<proto::GetRemotes>,
1857 mut cx: AsyncApp,
1858 ) -> Result<proto::GetRemotesResponse> {
1859 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1860 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1861
1862 let branch_name = envelope.payload.branch_name;
1863
1864 let remotes = repository_handle
1865 .update(&mut cx, |repository_handle, _| {
1866 repository_handle.get_remotes(branch_name)
1867 })?
1868 .await??;
1869
1870 Ok(proto::GetRemotesResponse {
1871 remotes: remotes
1872 .into_iter()
1873 .map(|remotes| proto::get_remotes_response::Remote {
1874 name: remotes.name.to_string(),
1875 })
1876 .collect::<Vec<_>>(),
1877 })
1878 }
1879
1880 async fn handle_get_branches(
1881 this: Entity<Self>,
1882 envelope: TypedEnvelope<proto::GitGetBranches>,
1883 mut cx: AsyncApp,
1884 ) -> Result<proto::GitBranchesResponse> {
1885 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1886 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1887
1888 let branches = repository_handle
1889 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1890 .await??;
1891
1892 Ok(proto::GitBranchesResponse {
1893 branches: branches
1894 .into_iter()
1895 .map(|branch| branch_to_proto(&branch))
1896 .collect::<Vec<_>>(),
1897 })
1898 }
1899 async fn handle_get_default_branch(
1900 this: Entity<Self>,
1901 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1902 mut cx: AsyncApp,
1903 ) -> Result<proto::GetDefaultBranchResponse> {
1904 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1905 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1906
1907 let branch = repository_handle
1908 .update(&mut cx, |repository_handle, _| {
1909 repository_handle.default_branch()
1910 })?
1911 .await??
1912 .map(Into::into);
1913
1914 Ok(proto::GetDefaultBranchResponse { branch })
1915 }
1916 async fn handle_create_branch(
1917 this: Entity<Self>,
1918 envelope: TypedEnvelope<proto::GitCreateBranch>,
1919 mut cx: AsyncApp,
1920 ) -> Result<proto::Ack> {
1921 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1922 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1923 let branch_name = envelope.payload.branch_name;
1924
1925 repository_handle
1926 .update(&mut cx, |repository_handle, _| {
1927 repository_handle.create_branch(branch_name)
1928 })?
1929 .await??;
1930
1931 Ok(proto::Ack {})
1932 }
1933
1934 async fn handle_change_branch(
1935 this: Entity<Self>,
1936 envelope: TypedEnvelope<proto::GitChangeBranch>,
1937 mut cx: AsyncApp,
1938 ) -> Result<proto::Ack> {
1939 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1940 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1941 let branch_name = envelope.payload.branch_name;
1942
1943 repository_handle
1944 .update(&mut cx, |repository_handle, _| {
1945 repository_handle.change_branch(branch_name)
1946 })?
1947 .await??;
1948
1949 Ok(proto::Ack {})
1950 }
1951
1952 async fn handle_rename_branch(
1953 this: Entity<Self>,
1954 envelope: TypedEnvelope<proto::GitRenameBranch>,
1955 mut cx: AsyncApp,
1956 ) -> Result<proto::Ack> {
1957 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1958 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1959 let branch = envelope.payload.branch;
1960 let new_name = envelope.payload.new_name;
1961
1962 repository_handle
1963 .update(&mut cx, |repository_handle, _| {
1964 repository_handle.rename_branch(branch, new_name)
1965 })?
1966 .await??;
1967
1968 Ok(proto::Ack {})
1969 }
1970
1971 async fn handle_show(
1972 this: Entity<Self>,
1973 envelope: TypedEnvelope<proto::GitShow>,
1974 mut cx: AsyncApp,
1975 ) -> Result<proto::GitCommitDetails> {
1976 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1977 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1978
1979 let commit = repository_handle
1980 .update(&mut cx, |repository_handle, _| {
1981 repository_handle.show(envelope.payload.commit)
1982 })?
1983 .await??;
1984 Ok(proto::GitCommitDetails {
1985 sha: commit.sha.into(),
1986 message: commit.message.into(),
1987 commit_timestamp: commit.commit_timestamp,
1988 author_email: commit.author_email.into(),
1989 author_name: commit.author_name.into(),
1990 })
1991 }
1992
1993 async fn handle_load_commit_diff(
1994 this: Entity<Self>,
1995 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1996 mut cx: AsyncApp,
1997 ) -> Result<proto::LoadCommitDiffResponse> {
1998 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1999 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2000
2001 let commit_diff = repository_handle
2002 .update(&mut cx, |repository_handle, _| {
2003 repository_handle.load_commit_diff(envelope.payload.commit)
2004 })?
2005 .await??;
2006 Ok(proto::LoadCommitDiffResponse {
2007 files: commit_diff
2008 .files
2009 .into_iter()
2010 .map(|file| proto::CommitFile {
2011 path: file.path.to_string(),
2012 old_text: file.old_text,
2013 new_text: file.new_text,
2014 })
2015 .collect(),
2016 })
2017 }
2018
2019 async fn handle_reset(
2020 this: Entity<Self>,
2021 envelope: TypedEnvelope<proto::GitReset>,
2022 mut cx: AsyncApp,
2023 ) -> Result<proto::Ack> {
2024 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2025 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2026
2027 let mode = match envelope.payload.mode() {
2028 git_reset::ResetMode::Soft => ResetMode::Soft,
2029 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2030 };
2031
2032 repository_handle
2033 .update(&mut cx, |repository_handle, cx| {
2034 repository_handle.reset(envelope.payload.commit, mode, cx)
2035 })?
2036 .await??;
2037 Ok(proto::Ack {})
2038 }
2039
2040 async fn handle_checkout_files(
2041 this: Entity<Self>,
2042 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2043 mut cx: AsyncApp,
2044 ) -> Result<proto::Ack> {
2045 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2046 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2047 let paths = envelope
2048 .payload
2049 .paths
2050 .iter()
2051 .map(|s| RepoPath::from_str(s))
2052 .collect();
2053
2054 repository_handle
2055 .update(&mut cx, |repository_handle, cx| {
2056 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2057 })?
2058 .await??;
2059 Ok(proto::Ack {})
2060 }
2061
2062 async fn handle_open_commit_message_buffer(
2063 this: Entity<Self>,
2064 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2065 mut cx: AsyncApp,
2066 ) -> Result<proto::OpenBufferResponse> {
2067 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2068 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2069 let buffer = repository
2070 .update(&mut cx, |repository, cx| {
2071 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2072 })?
2073 .await?;
2074
2075 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2076 this.update(&mut cx, |this, cx| {
2077 this.buffer_store.update(cx, |buffer_store, cx| {
2078 buffer_store
2079 .create_buffer_for_peer(
2080 &buffer,
2081 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2082 cx,
2083 )
2084 .detach_and_log_err(cx);
2085 })
2086 })?;
2087
2088 Ok(proto::OpenBufferResponse {
2089 buffer_id: buffer_id.to_proto(),
2090 })
2091 }
2092
2093 async fn handle_askpass(
2094 this: Entity<Self>,
2095 envelope: TypedEnvelope<proto::AskPassRequest>,
2096 mut cx: AsyncApp,
2097 ) -> Result<proto::AskPassResponse> {
2098 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2099 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2100
2101 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2102 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2103 debug_panic!("no askpass found");
2104 anyhow::bail!("no askpass found");
2105 };
2106
2107 let response = askpass.ask_password(envelope.payload.prompt).await?;
2108
2109 delegates
2110 .lock()
2111 .insert(envelope.payload.askpass_id, askpass);
2112
2113 Ok(proto::AskPassResponse { response })
2114 }
2115
2116 async fn handle_check_for_pushed_commits(
2117 this: Entity<Self>,
2118 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2119 mut cx: AsyncApp,
2120 ) -> Result<proto::CheckForPushedCommitsResponse> {
2121 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2122 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2123
2124 let branches = repository_handle
2125 .update(&mut cx, |repository_handle, _| {
2126 repository_handle.check_for_pushed_commits()
2127 })?
2128 .await??;
2129 Ok(proto::CheckForPushedCommitsResponse {
2130 pushed_to: branches
2131 .into_iter()
2132 .map(|commit| commit.to_string())
2133 .collect(),
2134 })
2135 }
2136
2137 async fn handle_git_diff(
2138 this: Entity<Self>,
2139 envelope: TypedEnvelope<proto::GitDiff>,
2140 mut cx: AsyncApp,
2141 ) -> Result<proto::GitDiffResponse> {
2142 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2143 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2144 let diff_type = match envelope.payload.diff_type() {
2145 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2146 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2147 };
2148
2149 let mut diff = repository_handle
2150 .update(&mut cx, |repository_handle, cx| {
2151 repository_handle.diff(diff_type, cx)
2152 })?
2153 .await??;
2154 const ONE_MB: usize = 1_000_000;
2155 if diff.len() > ONE_MB {
2156 diff = diff.chars().take(ONE_MB).collect()
2157 }
2158
2159 Ok(proto::GitDiffResponse { diff })
2160 }
2161
2162 async fn handle_open_unstaged_diff(
2163 this: Entity<Self>,
2164 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2165 mut cx: AsyncApp,
2166 ) -> Result<proto::OpenUnstagedDiffResponse> {
2167 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2168 let diff = this
2169 .update(&mut cx, |this, cx| {
2170 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2171 Some(this.open_unstaged_diff(buffer, cx))
2172 })?
2173 .context("missing buffer")?
2174 .await?;
2175 this.update(&mut cx, |this, _| {
2176 let shared_diffs = this
2177 .shared_diffs
2178 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2179 .or_default();
2180 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2181 })?;
2182 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2183 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2184 }
2185
2186 async fn handle_open_uncommitted_diff(
2187 this: Entity<Self>,
2188 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2189 mut cx: AsyncApp,
2190 ) -> Result<proto::OpenUncommittedDiffResponse> {
2191 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2192 let diff = this
2193 .update(&mut cx, |this, cx| {
2194 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2195 Some(this.open_uncommitted_diff(buffer, cx))
2196 })?
2197 .context("missing buffer")?
2198 .await?;
2199 this.update(&mut cx, |this, _| {
2200 let shared_diffs = this
2201 .shared_diffs
2202 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2203 .or_default();
2204 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2205 })?;
2206 diff.read_with(&cx, |diff, cx| {
2207 use proto::open_uncommitted_diff_response::Mode;
2208
2209 let unstaged_diff = diff.secondary_diff();
2210 let index_snapshot = unstaged_diff.and_then(|diff| {
2211 let diff = diff.read(cx);
2212 diff.base_text_exists().then(|| diff.base_text())
2213 });
2214
2215 let mode;
2216 let staged_text;
2217 let committed_text;
2218 if diff.base_text_exists() {
2219 let committed_snapshot = diff.base_text();
2220 committed_text = Some(committed_snapshot.text());
2221 if let Some(index_text) = index_snapshot {
2222 if index_text.remote_id() == committed_snapshot.remote_id() {
2223 mode = Mode::IndexMatchesHead;
2224 staged_text = None;
2225 } else {
2226 mode = Mode::IndexAndHead;
2227 staged_text = Some(index_text.text());
2228 }
2229 } else {
2230 mode = Mode::IndexAndHead;
2231 staged_text = None;
2232 }
2233 } else {
2234 mode = Mode::IndexAndHead;
2235 committed_text = None;
2236 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2237 }
2238
2239 proto::OpenUncommittedDiffResponse {
2240 committed_text,
2241 staged_text,
2242 mode: mode.into(),
2243 }
2244 })
2245 }
2246
2247 async fn handle_update_diff_bases(
2248 this: Entity<Self>,
2249 request: TypedEnvelope<proto::UpdateDiffBases>,
2250 mut cx: AsyncApp,
2251 ) -> Result<()> {
2252 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2253 this.update(&mut cx, |this, cx| {
2254 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2255 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2256 let buffer = buffer.read(cx).text_snapshot();
2257 diff_state.update(cx, |diff_state, cx| {
2258 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2259 })
2260 }
2261 }
2262 })
2263 }
2264
2265 async fn handle_blame_buffer(
2266 this: Entity<Self>,
2267 envelope: TypedEnvelope<proto::BlameBuffer>,
2268 mut cx: AsyncApp,
2269 ) -> Result<proto::BlameBufferResponse> {
2270 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2271 let version = deserialize_version(&envelope.payload.version);
2272 let buffer = this.read_with(&cx, |this, cx| {
2273 this.buffer_store.read(cx).get_existing(buffer_id)
2274 })??;
2275 buffer
2276 .update(&mut cx, |buffer, _| {
2277 buffer.wait_for_version(version.clone())
2278 })?
2279 .await?;
2280 let blame = this
2281 .update(&mut cx, |this, cx| {
2282 this.blame_buffer(&buffer, Some(version), cx)
2283 })?
2284 .await?;
2285 Ok(serialize_blame_buffer_response(blame))
2286 }
2287
2288 async fn handle_get_permalink_to_line(
2289 this: Entity<Self>,
2290 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2291 mut cx: AsyncApp,
2292 ) -> Result<proto::GetPermalinkToLineResponse> {
2293 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2294 // let version = deserialize_version(&envelope.payload.version);
2295 let selection = {
2296 let proto_selection = envelope
2297 .payload
2298 .selection
2299 .context("no selection to get permalink for defined")?;
2300 proto_selection.start as u32..proto_selection.end as u32
2301 };
2302 let buffer = this.read_with(&cx, |this, cx| {
2303 this.buffer_store.read(cx).get_existing(buffer_id)
2304 })??;
2305 let permalink = this
2306 .update(&mut cx, |this, cx| {
2307 this.get_permalink_to_line(&buffer, selection, cx)
2308 })?
2309 .await?;
2310 Ok(proto::GetPermalinkToLineResponse {
2311 permalink: permalink.to_string(),
2312 })
2313 }
2314
2315 fn repository_for_request(
2316 this: &Entity<Self>,
2317 id: RepositoryId,
2318 cx: &mut AsyncApp,
2319 ) -> Result<Entity<Repository>> {
2320 this.read_with(cx, |this, _| {
2321 this.repositories
2322 .get(&id)
2323 .context("missing repository handle")
2324 .cloned()
2325 })?
2326 }
2327
2328 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2329 self.repositories
2330 .iter()
2331 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2332 .collect()
2333 }
2334
2335 fn process_updated_entries(
2336 &self,
2337 worktree: &Entity<Worktree>,
2338 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2339 cx: &mut App,
2340 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2341 let mut repo_paths = self
2342 .repositories
2343 .values()
2344 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2345 .collect::<Vec<_>>();
2346 let mut entries: Vec<_> = updated_entries
2347 .iter()
2348 .map(|(path, _, _)| path.clone())
2349 .collect();
2350 entries.sort();
2351 let worktree = worktree.read(cx);
2352
2353 let entries = entries
2354 .into_iter()
2355 .filter_map(|path| worktree.absolutize(&path).ok())
2356 .collect::<Arc<[_]>>();
2357
2358 let executor = cx.background_executor().clone();
2359 cx.background_executor().spawn(async move {
2360 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2361 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2362 let mut tasks = FuturesOrdered::new();
2363 for (repo_path, repo) in repo_paths.into_iter().rev() {
2364 let entries = entries.clone();
2365 let task = executor.spawn(async move {
2366 // Find all repository paths that belong to this repo
2367 let mut ix = entries.partition_point(|path| path < &*repo_path);
2368 if ix == entries.len() {
2369 return None;
2370 };
2371
2372 let mut paths = vec![];
2373 // All paths prefixed by a given repo will constitute a continuous range.
2374 while let Some(path) = entries.get(ix)
2375 && let Some(repo_path) =
2376 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, &path)
2377 {
2378 paths.push((repo_path, ix));
2379 ix += 1;
2380 }
2381 Some((repo, paths))
2382 });
2383 tasks.push_back(task);
2384 }
2385
2386 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2387 let mut path_was_used = vec![false; entries.len()];
2388 let tasks = tasks.collect::<Vec<_>>().await;
2389 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2390 // We always want to assign a path to it's innermost repository.
2391 for t in tasks {
2392 let Some((repo, paths)) = t else {
2393 continue;
2394 };
2395 let entry = paths_by_git_repo.entry(repo).or_default();
2396 for (repo_path, ix) in paths {
2397 if path_was_used[ix] {
2398 continue;
2399 }
2400 path_was_used[ix] = true;
2401 entry.push(repo_path);
2402 }
2403 }
2404
2405 paths_by_git_repo
2406 })
2407 }
2408}
2409
2410impl BufferGitState {
2411 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2412 Self {
2413 unstaged_diff: Default::default(),
2414 uncommitted_diff: Default::default(),
2415 recalculate_diff_task: Default::default(),
2416 language: Default::default(),
2417 language_registry: Default::default(),
2418 recalculating_tx: postage::watch::channel_with(false).0,
2419 hunk_staging_operation_count: 0,
2420 hunk_staging_operation_count_as_of_write: 0,
2421 head_text: Default::default(),
2422 index_text: Default::default(),
2423 head_changed: Default::default(),
2424 index_changed: Default::default(),
2425 language_changed: Default::default(),
2426 conflict_updated_futures: Default::default(),
2427 conflict_set: Default::default(),
2428 reparse_conflict_markers_task: Default::default(),
2429 }
2430 }
2431
2432 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2433 self.language = buffer.read(cx).language().cloned();
2434 self.language_changed = true;
2435 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2436 }
2437
2438 fn reparse_conflict_markers(
2439 &mut self,
2440 buffer: text::BufferSnapshot,
2441 cx: &mut Context<Self>,
2442 ) -> oneshot::Receiver<()> {
2443 let (tx, rx) = oneshot::channel();
2444
2445 let Some(conflict_set) = self
2446 .conflict_set
2447 .as_ref()
2448 .and_then(|conflict_set| conflict_set.upgrade())
2449 else {
2450 return rx;
2451 };
2452
2453 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2454 if conflict_set.has_conflict {
2455 Some(conflict_set.snapshot())
2456 } else {
2457 None
2458 }
2459 });
2460
2461 if let Some(old_snapshot) = old_snapshot {
2462 self.conflict_updated_futures.push(tx);
2463 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2464 let (snapshot, changed_range) = cx
2465 .background_spawn(async move {
2466 let new_snapshot = ConflictSet::parse(&buffer);
2467 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2468 (new_snapshot, changed_range)
2469 })
2470 .await;
2471 this.update(cx, |this, cx| {
2472 if let Some(conflict_set) = &this.conflict_set {
2473 conflict_set
2474 .update(cx, |conflict_set, cx| {
2475 conflict_set.set_snapshot(snapshot, changed_range, cx);
2476 })
2477 .ok();
2478 }
2479 let futures = std::mem::take(&mut this.conflict_updated_futures);
2480 for tx in futures {
2481 tx.send(()).ok();
2482 }
2483 })
2484 }))
2485 }
2486
2487 rx
2488 }
2489
2490 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2491 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2492 }
2493
2494 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2495 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2496 }
2497
2498 fn handle_base_texts_updated(
2499 &mut self,
2500 buffer: text::BufferSnapshot,
2501 message: proto::UpdateDiffBases,
2502 cx: &mut Context<Self>,
2503 ) {
2504 use proto::update_diff_bases::Mode;
2505
2506 let Some(mode) = Mode::from_i32(message.mode) else {
2507 return;
2508 };
2509
2510 let diff_bases_change = match mode {
2511 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2512 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2513 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2514 Mode::IndexAndHead => DiffBasesChange::SetEach {
2515 index: message.staged_text,
2516 head: message.committed_text,
2517 },
2518 };
2519
2520 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2521 }
2522
2523 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2524 if *self.recalculating_tx.borrow() {
2525 let mut rx = self.recalculating_tx.subscribe();
2526 return Some(async move {
2527 loop {
2528 let is_recalculating = rx.recv().await;
2529 if is_recalculating != Some(true) {
2530 break;
2531 }
2532 }
2533 });
2534 } else {
2535 None
2536 }
2537 }
2538
2539 fn diff_bases_changed(
2540 &mut self,
2541 buffer: text::BufferSnapshot,
2542 diff_bases_change: Option<DiffBasesChange>,
2543 cx: &mut Context<Self>,
2544 ) {
2545 match diff_bases_change {
2546 Some(DiffBasesChange::SetIndex(index)) => {
2547 self.index_text = index.map(|mut index| {
2548 text::LineEnding::normalize(&mut index);
2549 Arc::new(index)
2550 });
2551 self.index_changed = true;
2552 }
2553 Some(DiffBasesChange::SetHead(head)) => {
2554 self.head_text = head.map(|mut head| {
2555 text::LineEnding::normalize(&mut head);
2556 Arc::new(head)
2557 });
2558 self.head_changed = true;
2559 }
2560 Some(DiffBasesChange::SetBoth(text)) => {
2561 let text = text.map(|mut text| {
2562 text::LineEnding::normalize(&mut text);
2563 Arc::new(text)
2564 });
2565 self.head_text = text.clone();
2566 self.index_text = text;
2567 self.head_changed = true;
2568 self.index_changed = true;
2569 }
2570 Some(DiffBasesChange::SetEach { index, head }) => {
2571 self.index_text = index.map(|mut index| {
2572 text::LineEnding::normalize(&mut index);
2573 Arc::new(index)
2574 });
2575 self.index_changed = true;
2576 self.head_text = head.map(|mut head| {
2577 text::LineEnding::normalize(&mut head);
2578 Arc::new(head)
2579 });
2580 self.head_changed = true;
2581 }
2582 None => {}
2583 }
2584
2585 self.recalculate_diffs(buffer, cx)
2586 }
2587
2588 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2589 *self.recalculating_tx.borrow_mut() = true;
2590
2591 let language = self.language.clone();
2592 let language_registry = self.language_registry.clone();
2593 let unstaged_diff = self.unstaged_diff();
2594 let uncommitted_diff = self.uncommitted_diff();
2595 let head = self.head_text.clone();
2596 let index = self.index_text.clone();
2597 let index_changed = self.index_changed;
2598 let head_changed = self.head_changed;
2599 let language_changed = self.language_changed;
2600 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2601 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2602 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2603 (None, None) => true,
2604 _ => false,
2605 };
2606 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2607 log::debug!(
2608 "start recalculating diffs for buffer {}",
2609 buffer.remote_id()
2610 );
2611
2612 let mut new_unstaged_diff = None;
2613 if let Some(unstaged_diff) = &unstaged_diff {
2614 new_unstaged_diff = Some(
2615 BufferDiff::update_diff(
2616 unstaged_diff.clone(),
2617 buffer.clone(),
2618 index,
2619 index_changed,
2620 language_changed,
2621 language.clone(),
2622 language_registry.clone(),
2623 cx,
2624 )
2625 .await?,
2626 );
2627 }
2628
2629 let mut new_uncommitted_diff = None;
2630 if let Some(uncommitted_diff) = &uncommitted_diff {
2631 new_uncommitted_diff = if index_matches_head {
2632 new_unstaged_diff.clone()
2633 } else {
2634 Some(
2635 BufferDiff::update_diff(
2636 uncommitted_diff.clone(),
2637 buffer.clone(),
2638 head,
2639 head_changed,
2640 language_changed,
2641 language.clone(),
2642 language_registry.clone(),
2643 cx,
2644 )
2645 .await?,
2646 )
2647 }
2648 }
2649
2650 let cancel = this.update(cx, |this, _| {
2651 // This checks whether all pending stage/unstage operations
2652 // have quiesced (i.e. both the corresponding write and the
2653 // read of that write have completed). If not, then we cancel
2654 // this recalculation attempt to avoid invalidating pending
2655 // state too quickly; another recalculation will come along
2656 // later and clear the pending state once the state of the index has settled.
2657 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2658 *this.recalculating_tx.borrow_mut() = false;
2659 true
2660 } else {
2661 false
2662 }
2663 })?;
2664 if cancel {
2665 log::debug!(
2666 concat!(
2667 "aborting recalculating diffs for buffer {}",
2668 "due to subsequent hunk operations",
2669 ),
2670 buffer.remote_id()
2671 );
2672 return Ok(());
2673 }
2674
2675 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2676 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2677 {
2678 unstaged_diff.update(cx, |diff, cx| {
2679 if language_changed {
2680 diff.language_changed(cx);
2681 }
2682 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2683 })?
2684 } else {
2685 None
2686 };
2687
2688 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2689 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2690 {
2691 uncommitted_diff.update(cx, |diff, cx| {
2692 if language_changed {
2693 diff.language_changed(cx);
2694 }
2695 diff.set_snapshot_with_secondary(
2696 new_uncommitted_diff,
2697 &buffer,
2698 unstaged_changed_range,
2699 true,
2700 cx,
2701 );
2702 })?;
2703 }
2704
2705 log::debug!(
2706 "finished recalculating diffs for buffer {}",
2707 buffer.remote_id()
2708 );
2709
2710 if let Some(this) = this.upgrade() {
2711 this.update(cx, |this, _| {
2712 this.index_changed = false;
2713 this.head_changed = false;
2714 this.language_changed = false;
2715 *this.recalculating_tx.borrow_mut() = false;
2716 })?;
2717 }
2718
2719 Ok(())
2720 }));
2721 }
2722}
2723
2724fn make_remote_delegate(
2725 this: Entity<GitStore>,
2726 project_id: u64,
2727 repository_id: RepositoryId,
2728 askpass_id: u64,
2729 cx: &mut AsyncApp,
2730) -> AskPassDelegate {
2731 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2732 this.update(cx, |this, cx| {
2733 let Some((client, _)) = this.downstream_client() else {
2734 return;
2735 };
2736 let response = client.request(proto::AskPassRequest {
2737 project_id,
2738 repository_id: repository_id.to_proto(),
2739 askpass_id,
2740 prompt,
2741 });
2742 cx.spawn(async move |_, _| {
2743 tx.send(response.await?.response).ok();
2744 anyhow::Ok(())
2745 })
2746 .detach_and_log_err(cx);
2747 })
2748 .log_err();
2749 })
2750}
2751
2752impl RepositoryId {
2753 pub fn to_proto(self) -> u64 {
2754 self.0
2755 }
2756
2757 pub fn from_proto(id: u64) -> Self {
2758 RepositoryId(id)
2759 }
2760}
2761
2762impl RepositorySnapshot {
2763 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2764 Self {
2765 id,
2766 statuses_by_path: Default::default(),
2767 work_directory_abs_path,
2768 branch: None,
2769 head_commit: None,
2770 scan_id: 0,
2771 merge: Default::default(),
2772 remote_origin_url: None,
2773 remote_upstream_url: None,
2774 }
2775 }
2776
2777 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2778 proto::UpdateRepository {
2779 branch_summary: self.branch.as_ref().map(branch_to_proto),
2780 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2781 updated_statuses: self
2782 .statuses_by_path
2783 .iter()
2784 .map(|entry| entry.to_proto())
2785 .collect(),
2786 removed_statuses: Default::default(),
2787 current_merge_conflicts: self
2788 .merge
2789 .conflicted_paths
2790 .iter()
2791 .map(|repo_path| repo_path.to_proto())
2792 .collect(),
2793 project_id,
2794 id: self.id.to_proto(),
2795 abs_path: self.work_directory_abs_path.to_proto(),
2796 entry_ids: vec![self.id.to_proto()],
2797 scan_id: self.scan_id,
2798 is_last_update: true,
2799 }
2800 }
2801
2802 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2803 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2804 let mut removed_statuses: Vec<String> = Vec::new();
2805
2806 let mut new_statuses = self.statuses_by_path.iter().peekable();
2807 let mut old_statuses = old.statuses_by_path.iter().peekable();
2808
2809 let mut current_new_entry = new_statuses.next();
2810 let mut current_old_entry = old_statuses.next();
2811 loop {
2812 match (current_new_entry, current_old_entry) {
2813 (Some(new_entry), Some(old_entry)) => {
2814 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2815 Ordering::Less => {
2816 updated_statuses.push(new_entry.to_proto());
2817 current_new_entry = new_statuses.next();
2818 }
2819 Ordering::Equal => {
2820 if new_entry.status != old_entry.status {
2821 updated_statuses.push(new_entry.to_proto());
2822 }
2823 current_old_entry = old_statuses.next();
2824 current_new_entry = new_statuses.next();
2825 }
2826 Ordering::Greater => {
2827 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2828 current_old_entry = old_statuses.next();
2829 }
2830 }
2831 }
2832 (None, Some(old_entry)) => {
2833 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2834 current_old_entry = old_statuses.next();
2835 }
2836 (Some(new_entry), None) => {
2837 updated_statuses.push(new_entry.to_proto());
2838 current_new_entry = new_statuses.next();
2839 }
2840 (None, None) => break,
2841 }
2842 }
2843
2844 proto::UpdateRepository {
2845 branch_summary: self.branch.as_ref().map(branch_to_proto),
2846 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2847 updated_statuses,
2848 removed_statuses,
2849 current_merge_conflicts: self
2850 .merge
2851 .conflicted_paths
2852 .iter()
2853 .map(|path| path.as_ref().to_proto())
2854 .collect(),
2855 project_id,
2856 id: self.id.to_proto(),
2857 abs_path: self.work_directory_abs_path.to_proto(),
2858 entry_ids: vec![],
2859 scan_id: self.scan_id,
2860 is_last_update: true,
2861 }
2862 }
2863
2864 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2865 self.statuses_by_path.iter().cloned()
2866 }
2867
2868 pub fn status_summary(&self) -> GitSummary {
2869 self.statuses_by_path.summary().item_summary
2870 }
2871
2872 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2873 self.statuses_by_path
2874 .get(&PathKey(path.0.clone()), &())
2875 .cloned()
2876 }
2877
2878 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2879 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2880 }
2881
2882 #[inline]
2883 fn abs_path_to_repo_path_inner(
2884 work_directory_abs_path: &Path,
2885 abs_path: &Path,
2886 ) -> Option<RepoPath> {
2887 abs_path
2888 .strip_prefix(&work_directory_abs_path)
2889 .map(RepoPath::from)
2890 .ok()
2891 }
2892
2893 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2894 self.merge.conflicted_paths.contains(&repo_path)
2895 }
2896
2897 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2898 let had_conflict_on_last_merge_head_change =
2899 self.merge.conflicted_paths.contains(&repo_path);
2900 let has_conflict_currently = self
2901 .status_for_path(&repo_path)
2902 .map_or(false, |entry| entry.status.is_conflicted());
2903 had_conflict_on_last_merge_head_change || has_conflict_currently
2904 }
2905
2906 /// This is the name that will be displayed in the repository selector for this repository.
2907 pub fn display_name(&self) -> SharedString {
2908 self.work_directory_abs_path
2909 .file_name()
2910 .unwrap_or_default()
2911 .to_string_lossy()
2912 .to_string()
2913 .into()
2914 }
2915}
2916
2917impl MergeDetails {
2918 async fn load(
2919 backend: &Arc<dyn GitRepository>,
2920 status: &SumTree<StatusEntry>,
2921 prev_snapshot: &RepositorySnapshot,
2922 ) -> Result<(MergeDetails, bool)> {
2923 log::debug!("load merge details");
2924 let message = backend.merge_message().await;
2925 let heads = backend
2926 .revparse_batch(vec![
2927 "MERGE_HEAD".into(),
2928 "CHERRY_PICK_HEAD".into(),
2929 "REBASE_HEAD".into(),
2930 "REVERT_HEAD".into(),
2931 "APPLY_HEAD".into(),
2932 ])
2933 .await
2934 .log_err()
2935 .unwrap_or_default()
2936 .into_iter()
2937 .map(|opt| opt.map(SharedString::from))
2938 .collect::<Vec<_>>();
2939 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2940 let conflicted_paths = if merge_heads_changed {
2941 let current_conflicted_paths = TreeSet::from_ordered_entries(
2942 status
2943 .iter()
2944 .filter(|entry| entry.status.is_conflicted())
2945 .map(|entry| entry.repo_path.clone()),
2946 );
2947
2948 // It can happen that we run a scan while a lengthy merge is in progress
2949 // that will eventually result in conflicts, but before those conflicts
2950 // are reported by `git status`. Since for the moment we only care about
2951 // the merge heads state for the purposes of tracking conflicts, don't update
2952 // this state until we see some conflicts.
2953 if heads.iter().any(Option::is_some)
2954 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2955 && current_conflicted_paths.is_empty()
2956 {
2957 log::debug!("not updating merge heads because no conflicts found");
2958 return Ok((
2959 MergeDetails {
2960 message: message.map(SharedString::from),
2961 ..prev_snapshot.merge.clone()
2962 },
2963 false,
2964 ));
2965 }
2966
2967 current_conflicted_paths
2968 } else {
2969 prev_snapshot.merge.conflicted_paths.clone()
2970 };
2971 let details = MergeDetails {
2972 conflicted_paths,
2973 message: message.map(SharedString::from),
2974 heads,
2975 };
2976 Ok((details, merge_heads_changed))
2977 }
2978}
2979
2980impl Repository {
2981 pub fn snapshot(&self) -> RepositorySnapshot {
2982 self.snapshot.clone()
2983 }
2984
2985 fn local(
2986 id: RepositoryId,
2987 work_directory_abs_path: Arc<Path>,
2988 dot_git_abs_path: Arc<Path>,
2989 repository_dir_abs_path: Arc<Path>,
2990 common_dir_abs_path: Arc<Path>,
2991 project_environment: WeakEntity<ProjectEnvironment>,
2992 fs: Arc<dyn Fs>,
2993 git_store: WeakEntity<GitStore>,
2994 cx: &mut Context<Self>,
2995 ) -> Self {
2996 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2997 Repository {
2998 this: cx.weak_entity(),
2999 git_store,
3000 snapshot,
3001 commit_message_buffer: None,
3002 askpass_delegates: Default::default(),
3003 paths_needing_status_update: Default::default(),
3004 latest_askpass_id: 0,
3005 job_sender: Repository::spawn_local_git_worker(
3006 work_directory_abs_path,
3007 dot_git_abs_path,
3008 repository_dir_abs_path,
3009 common_dir_abs_path,
3010 project_environment,
3011 fs,
3012 cx,
3013 ),
3014 job_id: 0,
3015 active_jobs: Default::default(),
3016 }
3017 }
3018
3019 fn remote(
3020 id: RepositoryId,
3021 work_directory_abs_path: Arc<Path>,
3022 project_id: ProjectId,
3023 client: AnyProtoClient,
3024 git_store: WeakEntity<GitStore>,
3025 cx: &mut Context<Self>,
3026 ) -> Self {
3027 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
3028 Self {
3029 this: cx.weak_entity(),
3030 snapshot,
3031 commit_message_buffer: None,
3032 git_store,
3033 paths_needing_status_update: Default::default(),
3034 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3035 askpass_delegates: Default::default(),
3036 latest_askpass_id: 0,
3037 active_jobs: Default::default(),
3038 job_id: 0,
3039 }
3040 }
3041
3042 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3043 self.git_store.upgrade()
3044 }
3045
3046 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3047 let this = cx.weak_entity();
3048 let git_store = self.git_store.clone();
3049 let _ = self.send_keyed_job(
3050 Some(GitJobKey::ReloadBufferDiffBases),
3051 None,
3052 |state, mut cx| async move {
3053 let RepositoryState::Local { backend, .. } = state else {
3054 log::error!("tried to recompute diffs for a non-local repository");
3055 return Ok(());
3056 };
3057
3058 let Some(this) = this.upgrade() else {
3059 return Ok(());
3060 };
3061
3062 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3063 git_store.update(cx, |git_store, cx| {
3064 git_store
3065 .diffs
3066 .iter()
3067 .filter_map(|(buffer_id, diff_state)| {
3068 let buffer_store = git_store.buffer_store.read(cx);
3069 let buffer = buffer_store.get(*buffer_id)?;
3070 let file = File::from_dyn(buffer.read(cx).file())?;
3071 let abs_path =
3072 file.worktree.read(cx).absolutize(&file.path).ok()?;
3073 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3074 log::debug!(
3075 "start reload diff bases for repo path {}",
3076 repo_path.0.display()
3077 );
3078 diff_state.update(cx, |diff_state, _| {
3079 let has_unstaged_diff = diff_state
3080 .unstaged_diff
3081 .as_ref()
3082 .is_some_and(|diff| diff.is_upgradable());
3083 let has_uncommitted_diff = diff_state
3084 .uncommitted_diff
3085 .as_ref()
3086 .is_some_and(|set| set.is_upgradable());
3087
3088 Some((
3089 buffer,
3090 repo_path,
3091 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3092 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3093 ))
3094 })
3095 })
3096 .collect::<Vec<_>>()
3097 })
3098 })??;
3099
3100 let buffer_diff_base_changes = cx
3101 .background_spawn(async move {
3102 let mut changes = Vec::new();
3103 for (buffer, repo_path, current_index_text, current_head_text) in
3104 &repo_diff_state_updates
3105 {
3106 let index_text = if current_index_text.is_some() {
3107 backend.load_index_text(repo_path.clone()).await
3108 } else {
3109 None
3110 };
3111 let head_text = if current_head_text.is_some() {
3112 backend.load_committed_text(repo_path.clone()).await
3113 } else {
3114 None
3115 };
3116
3117 let change =
3118 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3119 (Some(current_index), Some(current_head)) => {
3120 let index_changed =
3121 index_text.as_ref() != current_index.as_deref();
3122 let head_changed =
3123 head_text.as_ref() != current_head.as_deref();
3124 if index_changed && head_changed {
3125 if index_text == head_text {
3126 Some(DiffBasesChange::SetBoth(head_text))
3127 } else {
3128 Some(DiffBasesChange::SetEach {
3129 index: index_text,
3130 head: head_text,
3131 })
3132 }
3133 } else if index_changed {
3134 Some(DiffBasesChange::SetIndex(index_text))
3135 } else if head_changed {
3136 Some(DiffBasesChange::SetHead(head_text))
3137 } else {
3138 None
3139 }
3140 }
3141 (Some(current_index), None) => {
3142 let index_changed =
3143 index_text.as_ref() != current_index.as_deref();
3144 index_changed
3145 .then_some(DiffBasesChange::SetIndex(index_text))
3146 }
3147 (None, Some(current_head)) => {
3148 let head_changed =
3149 head_text.as_ref() != current_head.as_deref();
3150 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3151 }
3152 (None, None) => None,
3153 };
3154
3155 changes.push((buffer.clone(), change))
3156 }
3157 changes
3158 })
3159 .await;
3160
3161 git_store.update(&mut cx, |git_store, cx| {
3162 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3163 let buffer_snapshot = buffer.read(cx).text_snapshot();
3164 let buffer_id = buffer_snapshot.remote_id();
3165 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3166 continue;
3167 };
3168
3169 let downstream_client = git_store.downstream_client();
3170 diff_state.update(cx, |diff_state, cx| {
3171 use proto::update_diff_bases::Mode;
3172
3173 if let Some((diff_bases_change, (client, project_id))) =
3174 diff_bases_change.clone().zip(downstream_client)
3175 {
3176 let (staged_text, committed_text, mode) = match diff_bases_change {
3177 DiffBasesChange::SetIndex(index) => {
3178 (index, None, Mode::IndexOnly)
3179 }
3180 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3181 DiffBasesChange::SetEach { index, head } => {
3182 (index, head, Mode::IndexAndHead)
3183 }
3184 DiffBasesChange::SetBoth(text) => {
3185 (None, text, Mode::IndexMatchesHead)
3186 }
3187 };
3188 client
3189 .send(proto::UpdateDiffBases {
3190 project_id: project_id.to_proto(),
3191 buffer_id: buffer_id.to_proto(),
3192 staged_text,
3193 committed_text,
3194 mode: mode as i32,
3195 })
3196 .log_err();
3197 }
3198
3199 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3200 });
3201 }
3202 })
3203 },
3204 );
3205 }
3206
3207 pub fn send_job<F, Fut, R>(
3208 &mut self,
3209 status: Option<SharedString>,
3210 job: F,
3211 ) -> oneshot::Receiver<R>
3212 where
3213 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3214 Fut: Future<Output = R> + 'static,
3215 R: Send + 'static,
3216 {
3217 self.send_keyed_job(None, status, job)
3218 }
3219
3220 fn send_keyed_job<F, Fut, R>(
3221 &mut self,
3222 key: Option<GitJobKey>,
3223 status: Option<SharedString>,
3224 job: F,
3225 ) -> oneshot::Receiver<R>
3226 where
3227 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3228 Fut: Future<Output = R> + 'static,
3229 R: Send + 'static,
3230 {
3231 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3232 let job_id = post_inc(&mut self.job_id);
3233 let this = self.this.clone();
3234 self.job_sender
3235 .unbounded_send(GitJob {
3236 key,
3237 job: Box::new(move |state, cx: &mut AsyncApp| {
3238 let job = job(state, cx.clone());
3239 cx.spawn(async move |cx| {
3240 if let Some(s) = status.clone() {
3241 this.update(cx, |this, cx| {
3242 this.active_jobs.insert(
3243 job_id,
3244 JobInfo {
3245 start: Instant::now(),
3246 message: s.clone(),
3247 },
3248 );
3249
3250 cx.notify();
3251 })
3252 .ok();
3253 }
3254 let result = job.await;
3255
3256 this.update(cx, |this, cx| {
3257 this.active_jobs.remove(&job_id);
3258 cx.notify();
3259 })
3260 .ok();
3261
3262 result_tx.send(result).ok();
3263 })
3264 }),
3265 })
3266 .ok();
3267 result_rx
3268 }
3269
3270 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3271 let Some(git_store) = self.git_store.upgrade() else {
3272 return;
3273 };
3274 let entity = cx.entity();
3275 git_store.update(cx, |git_store, cx| {
3276 let Some((&id, _)) = git_store
3277 .repositories
3278 .iter()
3279 .find(|(_, handle)| *handle == &entity)
3280 else {
3281 return;
3282 };
3283 git_store.active_repo_id = Some(id);
3284 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3285 });
3286 }
3287
3288 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3289 self.snapshot.status()
3290 }
3291
3292 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3293 let git_store = self.git_store.upgrade()?;
3294 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3295 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3296 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3297 Some(ProjectPath {
3298 worktree_id: worktree.read(cx).id(),
3299 path: relative_path.into(),
3300 })
3301 }
3302
3303 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3304 let git_store = self.git_store.upgrade()?;
3305 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3306 let abs_path = worktree_store.absolutize(path, cx)?;
3307 self.snapshot.abs_path_to_repo_path(&abs_path)
3308 }
3309
3310 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3311 other
3312 .read(cx)
3313 .snapshot
3314 .work_directory_abs_path
3315 .starts_with(&self.snapshot.work_directory_abs_path)
3316 }
3317
3318 pub fn open_commit_buffer(
3319 &mut self,
3320 languages: Option<Arc<LanguageRegistry>>,
3321 buffer_store: Entity<BufferStore>,
3322 cx: &mut Context<Self>,
3323 ) -> Task<Result<Entity<Buffer>>> {
3324 let id = self.id;
3325 if let Some(buffer) = self.commit_message_buffer.clone() {
3326 return Task::ready(Ok(buffer));
3327 }
3328 let this = cx.weak_entity();
3329
3330 let rx = self.send_job(None, move |state, mut cx| async move {
3331 let Some(this) = this.upgrade() else {
3332 bail!("git store was dropped");
3333 };
3334 match state {
3335 RepositoryState::Local { .. } => {
3336 this.update(&mut cx, |_, cx| {
3337 Self::open_local_commit_buffer(languages, buffer_store, cx)
3338 })?
3339 .await
3340 }
3341 RepositoryState::Remote { project_id, client } => {
3342 let request = client.request(proto::OpenCommitMessageBuffer {
3343 project_id: project_id.0,
3344 repository_id: id.to_proto(),
3345 });
3346 let response = request.await.context("requesting to open commit buffer")?;
3347 let buffer_id = BufferId::new(response.buffer_id)?;
3348 let buffer = buffer_store
3349 .update(&mut cx, |buffer_store, cx| {
3350 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3351 })?
3352 .await?;
3353 if let Some(language_registry) = languages {
3354 let git_commit_language =
3355 language_registry.language_for_name("Git Commit").await?;
3356 buffer.update(&mut cx, |buffer, cx| {
3357 buffer.set_language(Some(git_commit_language), cx);
3358 })?;
3359 }
3360 this.update(&mut cx, |this, _| {
3361 this.commit_message_buffer = Some(buffer.clone());
3362 })?;
3363 Ok(buffer)
3364 }
3365 }
3366 });
3367
3368 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3369 }
3370
3371 fn open_local_commit_buffer(
3372 language_registry: Option<Arc<LanguageRegistry>>,
3373 buffer_store: Entity<BufferStore>,
3374 cx: &mut Context<Self>,
3375 ) -> Task<Result<Entity<Buffer>>> {
3376 cx.spawn(async move |repository, cx| {
3377 let buffer = buffer_store
3378 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
3379 .await?;
3380
3381 if let Some(language_registry) = language_registry {
3382 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3383 buffer.update(cx, |buffer, cx| {
3384 buffer.set_language(Some(git_commit_language), cx);
3385 })?;
3386 }
3387
3388 repository.update(cx, |repository, _| {
3389 repository.commit_message_buffer = Some(buffer.clone());
3390 })?;
3391 Ok(buffer)
3392 })
3393 }
3394
3395 pub fn checkout_files(
3396 &mut self,
3397 commit: &str,
3398 paths: Vec<RepoPath>,
3399 _cx: &mut App,
3400 ) -> oneshot::Receiver<Result<()>> {
3401 let commit = commit.to_string();
3402 let id = self.id;
3403
3404 self.send_job(
3405 Some(format!("git checkout {}", commit).into()),
3406 move |git_repo, _| async move {
3407 match git_repo {
3408 RepositoryState::Local {
3409 backend,
3410 environment,
3411 ..
3412 } => {
3413 backend
3414 .checkout_files(commit, paths, environment.clone())
3415 .await
3416 }
3417 RepositoryState::Remote { project_id, client } => {
3418 client
3419 .request(proto::GitCheckoutFiles {
3420 project_id: project_id.0,
3421 repository_id: id.to_proto(),
3422 commit,
3423 paths: paths
3424 .into_iter()
3425 .map(|p| p.to_string_lossy().to_string())
3426 .collect(),
3427 })
3428 .await?;
3429
3430 Ok(())
3431 }
3432 }
3433 },
3434 )
3435 }
3436
3437 pub fn reset(
3438 &mut self,
3439 commit: String,
3440 reset_mode: ResetMode,
3441 _cx: &mut App,
3442 ) -> oneshot::Receiver<Result<()>> {
3443 let commit = commit.to_string();
3444 let id = self.id;
3445
3446 self.send_job(None, move |git_repo, _| async move {
3447 match git_repo {
3448 RepositoryState::Local {
3449 backend,
3450 environment,
3451 ..
3452 } => backend.reset(commit, reset_mode, environment).await,
3453 RepositoryState::Remote { project_id, client } => {
3454 client
3455 .request(proto::GitReset {
3456 project_id: project_id.0,
3457 repository_id: id.to_proto(),
3458 commit,
3459 mode: match reset_mode {
3460 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3461 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3462 },
3463 })
3464 .await?;
3465
3466 Ok(())
3467 }
3468 }
3469 })
3470 }
3471
3472 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3473 let id = self.id;
3474 self.send_job(None, move |git_repo, _cx| async move {
3475 match git_repo {
3476 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3477 RepositoryState::Remote { project_id, client } => {
3478 let resp = client
3479 .request(proto::GitShow {
3480 project_id: project_id.0,
3481 repository_id: id.to_proto(),
3482 commit,
3483 })
3484 .await?;
3485
3486 Ok(CommitDetails {
3487 sha: resp.sha.into(),
3488 message: resp.message.into(),
3489 commit_timestamp: resp.commit_timestamp,
3490 author_email: resp.author_email.into(),
3491 author_name: resp.author_name.into(),
3492 })
3493 }
3494 }
3495 })
3496 }
3497
3498 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3499 let id = self.id;
3500 self.send_job(None, move |git_repo, cx| async move {
3501 match git_repo {
3502 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3503 RepositoryState::Remote {
3504 client, project_id, ..
3505 } => {
3506 let response = client
3507 .request(proto::LoadCommitDiff {
3508 project_id: project_id.0,
3509 repository_id: id.to_proto(),
3510 commit,
3511 })
3512 .await?;
3513 Ok(CommitDiff {
3514 files: response
3515 .files
3516 .into_iter()
3517 .map(|file| CommitFile {
3518 path: Path::new(&file.path).into(),
3519 old_text: file.old_text,
3520 new_text: file.new_text,
3521 })
3522 .collect(),
3523 })
3524 }
3525 }
3526 })
3527 }
3528
3529 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3530 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3531 }
3532
3533 pub fn stage_entries(
3534 &self,
3535 entries: Vec<RepoPath>,
3536 cx: &mut Context<Self>,
3537 ) -> Task<anyhow::Result<()>> {
3538 if entries.is_empty() {
3539 return Task::ready(Ok(()));
3540 }
3541 let id = self.id;
3542
3543 let mut save_futures = Vec::new();
3544 if let Some(buffer_store) = self.buffer_store(cx) {
3545 buffer_store.update(cx, |buffer_store, cx| {
3546 for path in &entries {
3547 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3548 continue;
3549 };
3550 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3551 if buffer
3552 .read(cx)
3553 .file()
3554 .map_or(false, |file| file.disk_state().exists())
3555 {
3556 save_futures.push(buffer_store.save_buffer(buffer, cx));
3557 }
3558 }
3559 }
3560 })
3561 }
3562
3563 cx.spawn(async move |this, cx| {
3564 for save_future in save_futures {
3565 save_future.await?;
3566 }
3567
3568 this.update(cx, |this, _| {
3569 this.send_job(None, move |git_repo, _cx| async move {
3570 match git_repo {
3571 RepositoryState::Local {
3572 backend,
3573 environment,
3574 ..
3575 } => backend.stage_paths(entries, environment.clone()).await,
3576 RepositoryState::Remote { project_id, client } => {
3577 client
3578 .request(proto::Stage {
3579 project_id: project_id.0,
3580 repository_id: id.to_proto(),
3581 paths: entries
3582 .into_iter()
3583 .map(|repo_path| repo_path.as_ref().to_proto())
3584 .collect(),
3585 })
3586 .await
3587 .context("sending stage request")?;
3588
3589 Ok(())
3590 }
3591 }
3592 })
3593 })?
3594 .await??;
3595
3596 Ok(())
3597 })
3598 }
3599
3600 pub fn unstage_entries(
3601 &self,
3602 entries: Vec<RepoPath>,
3603 cx: &mut Context<Self>,
3604 ) -> Task<anyhow::Result<()>> {
3605 if entries.is_empty() {
3606 return Task::ready(Ok(()));
3607 }
3608 let id = self.id;
3609
3610 let mut save_futures = Vec::new();
3611 if let Some(buffer_store) = self.buffer_store(cx) {
3612 buffer_store.update(cx, |buffer_store, cx| {
3613 for path in &entries {
3614 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3615 continue;
3616 };
3617 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3618 if buffer
3619 .read(cx)
3620 .file()
3621 .map_or(false, |file| file.disk_state().exists())
3622 {
3623 save_futures.push(buffer_store.save_buffer(buffer, cx));
3624 }
3625 }
3626 }
3627 })
3628 }
3629
3630 cx.spawn(async move |this, cx| {
3631 for save_future in save_futures {
3632 save_future.await?;
3633 }
3634
3635 this.update(cx, |this, _| {
3636 this.send_job(None, move |git_repo, _cx| async move {
3637 match git_repo {
3638 RepositoryState::Local {
3639 backend,
3640 environment,
3641 ..
3642 } => backend.unstage_paths(entries, environment).await,
3643 RepositoryState::Remote { project_id, client } => {
3644 client
3645 .request(proto::Unstage {
3646 project_id: project_id.0,
3647 repository_id: id.to_proto(),
3648 paths: entries
3649 .into_iter()
3650 .map(|repo_path| repo_path.as_ref().to_proto())
3651 .collect(),
3652 })
3653 .await
3654 .context("sending unstage request")?;
3655
3656 Ok(())
3657 }
3658 }
3659 })
3660 })?
3661 .await??;
3662
3663 Ok(())
3664 })
3665 }
3666
3667 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3668 let to_stage = self
3669 .cached_status()
3670 .filter(|entry| !entry.status.staging().is_fully_staged())
3671 .map(|entry| entry.repo_path.clone())
3672 .collect();
3673 self.stage_entries(to_stage, cx)
3674 }
3675
3676 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3677 let to_unstage = self
3678 .cached_status()
3679 .filter(|entry| entry.status.staging().has_staged())
3680 .map(|entry| entry.repo_path.clone())
3681 .collect();
3682 self.unstage_entries(to_unstage, cx)
3683 }
3684
3685 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3686 let to_stash = self
3687 .cached_status()
3688 .map(|entry| entry.repo_path.clone())
3689 .collect();
3690
3691 self.stash_entries(to_stash, cx)
3692 }
3693
3694 pub fn stash_entries(
3695 &mut self,
3696 entries: Vec<RepoPath>,
3697 cx: &mut Context<Self>,
3698 ) -> Task<anyhow::Result<()>> {
3699 let id = self.id;
3700
3701 cx.spawn(async move |this, cx| {
3702 this.update(cx, |this, _| {
3703 this.send_job(None, move |git_repo, _cx| async move {
3704 match git_repo {
3705 RepositoryState::Local {
3706 backend,
3707 environment,
3708 ..
3709 } => backend.stash_paths(entries, environment).await,
3710 RepositoryState::Remote { project_id, client } => {
3711 client
3712 .request(proto::Stash {
3713 project_id: project_id.0,
3714 repository_id: id.to_proto(),
3715 paths: entries
3716 .into_iter()
3717 .map(|repo_path| repo_path.as_ref().to_proto())
3718 .collect(),
3719 })
3720 .await
3721 .context("sending stash request")?;
3722 Ok(())
3723 }
3724 }
3725 })
3726 })?
3727 .await??;
3728 Ok(())
3729 })
3730 }
3731
3732 pub fn stash_pop(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3733 let id = self.id;
3734 cx.spawn(async move |this, cx| {
3735 this.update(cx, |this, _| {
3736 this.send_job(None, move |git_repo, _cx| async move {
3737 match git_repo {
3738 RepositoryState::Local {
3739 backend,
3740 environment,
3741 ..
3742 } => backend.stash_pop(environment).await,
3743 RepositoryState::Remote { project_id, client } => {
3744 client
3745 .request(proto::StashPop {
3746 project_id: project_id.0,
3747 repository_id: id.to_proto(),
3748 })
3749 .await
3750 .context("sending stash pop request")?;
3751 Ok(())
3752 }
3753 }
3754 })
3755 })?
3756 .await??;
3757 Ok(())
3758 })
3759 }
3760
3761 pub fn commit(
3762 &mut self,
3763 message: SharedString,
3764 name_and_email: Option<(SharedString, SharedString)>,
3765 options: CommitOptions,
3766 _cx: &mut App,
3767 ) -> oneshot::Receiver<Result<()>> {
3768 let id = self.id;
3769
3770 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3771 match git_repo {
3772 RepositoryState::Local {
3773 backend,
3774 environment,
3775 ..
3776 } => {
3777 backend
3778 .commit(message, name_and_email, options, environment)
3779 .await
3780 }
3781 RepositoryState::Remote { project_id, client } => {
3782 let (name, email) = name_and_email.unzip();
3783 client
3784 .request(proto::Commit {
3785 project_id: project_id.0,
3786 repository_id: id.to_proto(),
3787 message: String::from(message),
3788 name: name.map(String::from),
3789 email: email.map(String::from),
3790 options: Some(proto::commit::CommitOptions {
3791 amend: options.amend,
3792 signoff: options.signoff,
3793 }),
3794 })
3795 .await
3796 .context("sending commit request")?;
3797
3798 Ok(())
3799 }
3800 }
3801 })
3802 }
3803
3804 pub fn fetch(
3805 &mut self,
3806 fetch_options: FetchOptions,
3807 askpass: AskPassDelegate,
3808 _cx: &mut App,
3809 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3810 let askpass_delegates = self.askpass_delegates.clone();
3811 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3812 let id = self.id;
3813
3814 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3815 match git_repo {
3816 RepositoryState::Local {
3817 backend,
3818 environment,
3819 ..
3820 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3821 RepositoryState::Remote { project_id, client } => {
3822 askpass_delegates.lock().insert(askpass_id, askpass);
3823 let _defer = util::defer(|| {
3824 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3825 debug_assert!(askpass_delegate.is_some());
3826 });
3827
3828 let response = client
3829 .request(proto::Fetch {
3830 project_id: project_id.0,
3831 repository_id: id.to_proto(),
3832 askpass_id,
3833 remote: fetch_options.to_proto(),
3834 })
3835 .await
3836 .context("sending fetch request")?;
3837
3838 Ok(RemoteCommandOutput {
3839 stdout: response.stdout,
3840 stderr: response.stderr,
3841 })
3842 }
3843 }
3844 })
3845 }
3846
3847 pub fn push(
3848 &mut self,
3849 branch: SharedString,
3850 remote: SharedString,
3851 options: Option<PushOptions>,
3852 askpass: AskPassDelegate,
3853 cx: &mut Context<Self>,
3854 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3855 let askpass_delegates = self.askpass_delegates.clone();
3856 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3857 let id = self.id;
3858
3859 let args = options
3860 .map(|option| match option {
3861 PushOptions::SetUpstream => " --set-upstream",
3862 PushOptions::Force => " --force-with-lease",
3863 })
3864 .unwrap_or("");
3865
3866 let updates_tx = self
3867 .git_store()
3868 .and_then(|git_store| match &git_store.read(cx).state {
3869 GitStoreState::Local { downstream, .. } => downstream
3870 .as_ref()
3871 .map(|downstream| downstream.updates_tx.clone()),
3872 _ => None,
3873 });
3874
3875 let this = cx.weak_entity();
3876 self.send_job(
3877 Some(format!("git push {} {} {}", args, branch, remote).into()),
3878 move |git_repo, mut cx| async move {
3879 match git_repo {
3880 RepositoryState::Local {
3881 backend,
3882 environment,
3883 ..
3884 } => {
3885 let result = backend
3886 .push(
3887 branch.to_string(),
3888 remote.to_string(),
3889 options,
3890 askpass,
3891 environment.clone(),
3892 cx.clone(),
3893 )
3894 .await;
3895 if result.is_ok() {
3896 let branches = backend.branches().await?;
3897 let branch = branches.into_iter().find(|branch| branch.is_head);
3898 log::info!("head branch after scan is {branch:?}");
3899 let snapshot = this.update(&mut cx, |this, cx| {
3900 this.snapshot.branch = branch;
3901 let snapshot = this.snapshot.clone();
3902 cx.emit(RepositoryEvent::Updated {
3903 full_scan: false,
3904 new_instance: false,
3905 });
3906 snapshot
3907 })?;
3908 if let Some(updates_tx) = updates_tx {
3909 updates_tx
3910 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3911 .ok();
3912 }
3913 }
3914 result
3915 }
3916 RepositoryState::Remote { project_id, client } => {
3917 askpass_delegates.lock().insert(askpass_id, askpass);
3918 let _defer = util::defer(|| {
3919 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3920 debug_assert!(askpass_delegate.is_some());
3921 });
3922 let response = client
3923 .request(proto::Push {
3924 project_id: project_id.0,
3925 repository_id: id.to_proto(),
3926 askpass_id,
3927 branch_name: branch.to_string(),
3928 remote_name: remote.to_string(),
3929 options: options.map(|options| match options {
3930 PushOptions::Force => proto::push::PushOptions::Force,
3931 PushOptions::SetUpstream => {
3932 proto::push::PushOptions::SetUpstream
3933 }
3934 }
3935 as i32),
3936 })
3937 .await
3938 .context("sending push request")?;
3939
3940 Ok(RemoteCommandOutput {
3941 stdout: response.stdout,
3942 stderr: response.stderr,
3943 })
3944 }
3945 }
3946 },
3947 )
3948 }
3949
3950 pub fn pull(
3951 &mut self,
3952 branch: SharedString,
3953 remote: SharedString,
3954 askpass: AskPassDelegate,
3955 _cx: &mut App,
3956 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3957 let askpass_delegates = self.askpass_delegates.clone();
3958 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3959 let id = self.id;
3960
3961 self.send_job(
3962 Some(format!("git pull {} {}", remote, branch).into()),
3963 move |git_repo, cx| async move {
3964 match git_repo {
3965 RepositoryState::Local {
3966 backend,
3967 environment,
3968 ..
3969 } => {
3970 backend
3971 .pull(
3972 branch.to_string(),
3973 remote.to_string(),
3974 askpass,
3975 environment.clone(),
3976 cx,
3977 )
3978 .await
3979 }
3980 RepositoryState::Remote { project_id, client } => {
3981 askpass_delegates.lock().insert(askpass_id, askpass);
3982 let _defer = util::defer(|| {
3983 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3984 debug_assert!(askpass_delegate.is_some());
3985 });
3986 let response = client
3987 .request(proto::Pull {
3988 project_id: project_id.0,
3989 repository_id: id.to_proto(),
3990 askpass_id,
3991 branch_name: branch.to_string(),
3992 remote_name: remote.to_string(),
3993 })
3994 .await
3995 .context("sending pull request")?;
3996
3997 Ok(RemoteCommandOutput {
3998 stdout: response.stdout,
3999 stderr: response.stderr,
4000 })
4001 }
4002 }
4003 },
4004 )
4005 }
4006
4007 fn spawn_set_index_text_job(
4008 &mut self,
4009 path: RepoPath,
4010 content: Option<String>,
4011 hunk_staging_operation_count: Option<usize>,
4012 cx: &mut Context<Self>,
4013 ) -> oneshot::Receiver<anyhow::Result<()>> {
4014 let id = self.id;
4015 let this = cx.weak_entity();
4016 let git_store = self.git_store.clone();
4017 self.send_keyed_job(
4018 Some(GitJobKey::WriteIndex(path.clone())),
4019 None,
4020 move |git_repo, mut cx| async move {
4021 log::debug!("start updating index text for buffer {}", path.display());
4022 match git_repo {
4023 RepositoryState::Local {
4024 backend,
4025 environment,
4026 ..
4027 } => {
4028 backend
4029 .set_index_text(path.clone(), content, environment.clone())
4030 .await?;
4031 }
4032 RepositoryState::Remote { project_id, client } => {
4033 client
4034 .request(proto::SetIndexText {
4035 project_id: project_id.0,
4036 repository_id: id.to_proto(),
4037 path: path.as_ref().to_proto(),
4038 text: content,
4039 })
4040 .await?;
4041 }
4042 }
4043 log::debug!("finish updating index text for buffer {}", path.display());
4044
4045 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4046 let project_path = this
4047 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4048 .ok()
4049 .flatten();
4050 git_store.update(&mut cx, |git_store, cx| {
4051 let buffer_id = git_store
4052 .buffer_store
4053 .read(cx)
4054 .get_by_path(&project_path?)?
4055 .read(cx)
4056 .remote_id();
4057 let diff_state = git_store.diffs.get(&buffer_id)?;
4058 diff_state.update(cx, |diff_state, _| {
4059 diff_state.hunk_staging_operation_count_as_of_write =
4060 hunk_staging_operation_count;
4061 });
4062 Some(())
4063 })?;
4064 }
4065 Ok(())
4066 },
4067 )
4068 }
4069
4070 pub fn get_remotes(
4071 &mut self,
4072 branch_name: Option<String>,
4073 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4074 let id = self.id;
4075 self.send_job(None, move |repo, _cx| async move {
4076 match repo {
4077 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4078 RepositoryState::Remote { project_id, client } => {
4079 let response = client
4080 .request(proto::GetRemotes {
4081 project_id: project_id.0,
4082 repository_id: id.to_proto(),
4083 branch_name,
4084 })
4085 .await?;
4086
4087 let remotes = response
4088 .remotes
4089 .into_iter()
4090 .map(|remotes| git::repository::Remote {
4091 name: remotes.name.into(),
4092 })
4093 .collect();
4094
4095 Ok(remotes)
4096 }
4097 }
4098 })
4099 }
4100
4101 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4102 let id = self.id;
4103 self.send_job(None, move |repo, _| async move {
4104 match repo {
4105 RepositoryState::Local { backend, .. } => backend.branches().await,
4106 RepositoryState::Remote { project_id, client } => {
4107 let response = client
4108 .request(proto::GitGetBranches {
4109 project_id: project_id.0,
4110 repository_id: id.to_proto(),
4111 })
4112 .await?;
4113
4114 let branches = response
4115 .branches
4116 .into_iter()
4117 .map(|branch| proto_to_branch(&branch))
4118 .collect();
4119
4120 Ok(branches)
4121 }
4122 }
4123 })
4124 }
4125
4126 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4127 let id = self.id;
4128 self.send_job(None, move |repo, _| async move {
4129 match repo {
4130 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4131 RepositoryState::Remote { project_id, client } => {
4132 let response = client
4133 .request(proto::GetDefaultBranch {
4134 project_id: project_id.0,
4135 repository_id: id.to_proto(),
4136 })
4137 .await?;
4138
4139 anyhow::Ok(response.branch.map(SharedString::from))
4140 }
4141 }
4142 })
4143 }
4144
4145 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4146 let id = self.id;
4147 self.send_job(None, move |repo, _cx| async move {
4148 match repo {
4149 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4150 RepositoryState::Remote { project_id, client } => {
4151 let response = client
4152 .request(proto::GitDiff {
4153 project_id: project_id.0,
4154 repository_id: id.to_proto(),
4155 diff_type: match diff_type {
4156 DiffType::HeadToIndex => {
4157 proto::git_diff::DiffType::HeadToIndex.into()
4158 }
4159 DiffType::HeadToWorktree => {
4160 proto::git_diff::DiffType::HeadToWorktree.into()
4161 }
4162 },
4163 })
4164 .await?;
4165
4166 Ok(response.diff)
4167 }
4168 }
4169 })
4170 }
4171
4172 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4173 let id = self.id;
4174 self.send_job(
4175 Some(format!("git switch -c {branch_name}").into()),
4176 move |repo, _cx| async move {
4177 match repo {
4178 RepositoryState::Local { backend, .. } => {
4179 backend.create_branch(branch_name).await
4180 }
4181 RepositoryState::Remote { project_id, client } => {
4182 client
4183 .request(proto::GitCreateBranch {
4184 project_id: project_id.0,
4185 repository_id: id.to_proto(),
4186 branch_name,
4187 })
4188 .await?;
4189
4190 Ok(())
4191 }
4192 }
4193 },
4194 )
4195 }
4196
4197 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4198 let id = self.id;
4199 self.send_job(
4200 Some(format!("git switch {branch_name}").into()),
4201 move |repo, _cx| async move {
4202 match repo {
4203 RepositoryState::Local { backend, .. } => {
4204 backend.change_branch(branch_name).await
4205 }
4206 RepositoryState::Remote { project_id, client } => {
4207 client
4208 .request(proto::GitChangeBranch {
4209 project_id: project_id.0,
4210 repository_id: id.to_proto(),
4211 branch_name,
4212 })
4213 .await?;
4214
4215 Ok(())
4216 }
4217 }
4218 },
4219 )
4220 }
4221
4222 pub fn rename_branch(
4223 &mut self,
4224 branch: String,
4225 new_name: String,
4226 ) -> oneshot::Receiver<Result<()>> {
4227 let id = self.id;
4228 self.send_job(
4229 Some(format!("git branch -m {branch} {new_name}").into()),
4230 move |repo, _cx| async move {
4231 match repo {
4232 RepositoryState::Local { backend, .. } => {
4233 backend.rename_branch(branch, new_name).await
4234 }
4235 RepositoryState::Remote { project_id, client } => {
4236 client
4237 .request(proto::GitRenameBranch {
4238 project_id: project_id.0,
4239 repository_id: id.to_proto(),
4240 branch,
4241 new_name,
4242 })
4243 .await?;
4244
4245 Ok(())
4246 }
4247 }
4248 },
4249 )
4250 }
4251
4252 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4253 let id = self.id;
4254 self.send_job(None, move |repo, _cx| async move {
4255 match repo {
4256 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4257 RepositoryState::Remote { project_id, client } => {
4258 let response = client
4259 .request(proto::CheckForPushedCommits {
4260 project_id: project_id.0,
4261 repository_id: id.to_proto(),
4262 })
4263 .await?;
4264
4265 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4266
4267 Ok(branches)
4268 }
4269 }
4270 })
4271 }
4272
4273 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4274 self.send_job(None, |repo, _cx| async move {
4275 match repo {
4276 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4277 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4278 }
4279 })
4280 }
4281
4282 pub fn restore_checkpoint(
4283 &mut self,
4284 checkpoint: GitRepositoryCheckpoint,
4285 ) -> oneshot::Receiver<Result<()>> {
4286 self.send_job(None, move |repo, _cx| async move {
4287 match repo {
4288 RepositoryState::Local { backend, .. } => {
4289 backend.restore_checkpoint(checkpoint).await
4290 }
4291 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4292 }
4293 })
4294 }
4295
4296 pub(crate) fn apply_remote_update(
4297 &mut self,
4298 update: proto::UpdateRepository,
4299 is_new: bool,
4300 cx: &mut Context<Self>,
4301 ) -> Result<()> {
4302 let conflicted_paths = TreeSet::from_ordered_entries(
4303 update
4304 .current_merge_conflicts
4305 .into_iter()
4306 .map(|path| RepoPath(Path::new(&path).into())),
4307 );
4308 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4309 self.snapshot.head_commit = update
4310 .head_commit_details
4311 .as_ref()
4312 .map(proto_to_commit_details);
4313
4314 self.snapshot.merge.conflicted_paths = conflicted_paths;
4315
4316 let edits = update
4317 .removed_statuses
4318 .into_iter()
4319 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4320 .chain(
4321 update
4322 .updated_statuses
4323 .into_iter()
4324 .filter_map(|updated_status| {
4325 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4326 }),
4327 )
4328 .collect::<Vec<_>>();
4329 self.snapshot.statuses_by_path.edit(edits, &());
4330 if update.is_last_update {
4331 self.snapshot.scan_id = update.scan_id;
4332 }
4333 cx.emit(RepositoryEvent::Updated {
4334 full_scan: true,
4335 new_instance: is_new,
4336 });
4337 Ok(())
4338 }
4339
4340 pub fn compare_checkpoints(
4341 &mut self,
4342 left: GitRepositoryCheckpoint,
4343 right: GitRepositoryCheckpoint,
4344 ) -> oneshot::Receiver<Result<bool>> {
4345 self.send_job(None, move |repo, _cx| async move {
4346 match repo {
4347 RepositoryState::Local { backend, .. } => {
4348 backend.compare_checkpoints(left, right).await
4349 }
4350 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4351 }
4352 })
4353 }
4354
4355 pub fn diff_checkpoints(
4356 &mut self,
4357 base_checkpoint: GitRepositoryCheckpoint,
4358 target_checkpoint: GitRepositoryCheckpoint,
4359 ) -> oneshot::Receiver<Result<String>> {
4360 self.send_job(None, move |repo, _cx| async move {
4361 match repo {
4362 RepositoryState::Local { backend, .. } => {
4363 backend
4364 .diff_checkpoints(base_checkpoint, target_checkpoint)
4365 .await
4366 }
4367 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4368 }
4369 })
4370 }
4371
4372 fn schedule_scan(
4373 &mut self,
4374 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4375 cx: &mut Context<Self>,
4376 ) {
4377 let this = cx.weak_entity();
4378 let _ = self.send_keyed_job(
4379 Some(GitJobKey::ReloadGitState),
4380 None,
4381 |state, mut cx| async move {
4382 log::debug!("run scheduled git status scan");
4383
4384 let Some(this) = this.upgrade() else {
4385 return Ok(());
4386 };
4387 let RepositoryState::Local { backend, .. } = state else {
4388 bail!("not a local repository")
4389 };
4390 let (snapshot, events) = this
4391 .read_with(&mut cx, |this, _| {
4392 compute_snapshot(
4393 this.id,
4394 this.work_directory_abs_path.clone(),
4395 this.snapshot.clone(),
4396 backend.clone(),
4397 )
4398 })?
4399 .await?;
4400 this.update(&mut cx, |this, cx| {
4401 this.snapshot = snapshot.clone();
4402 for event in events {
4403 cx.emit(event);
4404 }
4405 })?;
4406 if let Some(updates_tx) = updates_tx {
4407 updates_tx
4408 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4409 .ok();
4410 }
4411 Ok(())
4412 },
4413 );
4414 }
4415
4416 fn spawn_local_git_worker(
4417 work_directory_abs_path: Arc<Path>,
4418 dot_git_abs_path: Arc<Path>,
4419 _repository_dir_abs_path: Arc<Path>,
4420 _common_dir_abs_path: Arc<Path>,
4421 project_environment: WeakEntity<ProjectEnvironment>,
4422 fs: Arc<dyn Fs>,
4423 cx: &mut Context<Self>,
4424 ) -> mpsc::UnboundedSender<GitJob> {
4425 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4426
4427 cx.spawn(async move |_, cx| {
4428 let environment = project_environment
4429 .upgrade()
4430 .context("missing project environment")?
4431 .update(cx, |project_environment, cx| {
4432 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4433 })?
4434 .await
4435 .unwrap_or_else(|| {
4436 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4437 HashMap::default()
4438 });
4439 let backend = cx
4440 .background_spawn(async move {
4441 fs.open_repo(&dot_git_abs_path)
4442 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4443 })
4444 .await?;
4445
4446 if let Some(git_hosting_provider_registry) =
4447 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4448 {
4449 git_hosting_providers::register_additional_providers(
4450 git_hosting_provider_registry,
4451 backend.clone(),
4452 );
4453 }
4454
4455 let state = RepositoryState::Local {
4456 backend,
4457 environment: Arc::new(environment),
4458 };
4459 let mut jobs = VecDeque::new();
4460 loop {
4461 while let Ok(Some(next_job)) = job_rx.try_next() {
4462 jobs.push_back(next_job);
4463 }
4464
4465 if let Some(job) = jobs.pop_front() {
4466 if let Some(current_key) = &job.key {
4467 if jobs
4468 .iter()
4469 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4470 {
4471 continue;
4472 }
4473 }
4474 (job.job)(state.clone(), cx).await;
4475 } else if let Some(job) = job_rx.next().await {
4476 jobs.push_back(job);
4477 } else {
4478 break;
4479 }
4480 }
4481 anyhow::Ok(())
4482 })
4483 .detach_and_log_err(cx);
4484
4485 job_tx
4486 }
4487
4488 fn spawn_remote_git_worker(
4489 project_id: ProjectId,
4490 client: AnyProtoClient,
4491 cx: &mut Context<Self>,
4492 ) -> mpsc::UnboundedSender<GitJob> {
4493 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4494
4495 cx.spawn(async move |_, cx| {
4496 let state = RepositoryState::Remote { project_id, client };
4497 let mut jobs = VecDeque::new();
4498 loop {
4499 while let Ok(Some(next_job)) = job_rx.try_next() {
4500 jobs.push_back(next_job);
4501 }
4502
4503 if let Some(job) = jobs.pop_front() {
4504 if let Some(current_key) = &job.key {
4505 if jobs
4506 .iter()
4507 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4508 {
4509 continue;
4510 }
4511 }
4512 (job.job)(state.clone(), cx).await;
4513 } else if let Some(job) = job_rx.next().await {
4514 jobs.push_back(job);
4515 } else {
4516 break;
4517 }
4518 }
4519 anyhow::Ok(())
4520 })
4521 .detach_and_log_err(cx);
4522
4523 job_tx
4524 }
4525
4526 fn load_staged_text(
4527 &mut self,
4528 buffer_id: BufferId,
4529 repo_path: RepoPath,
4530 cx: &App,
4531 ) -> Task<Result<Option<String>>> {
4532 let rx = self.send_job(None, move |state, _| async move {
4533 match state {
4534 RepositoryState::Local { backend, .. } => {
4535 anyhow::Ok(backend.load_index_text(repo_path).await)
4536 }
4537 RepositoryState::Remote { project_id, client } => {
4538 let response = client
4539 .request(proto::OpenUnstagedDiff {
4540 project_id: project_id.to_proto(),
4541 buffer_id: buffer_id.to_proto(),
4542 })
4543 .await?;
4544 Ok(response.staged_text)
4545 }
4546 }
4547 });
4548 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4549 }
4550
4551 fn load_committed_text(
4552 &mut self,
4553 buffer_id: BufferId,
4554 repo_path: RepoPath,
4555 cx: &App,
4556 ) -> Task<Result<DiffBasesChange>> {
4557 let rx = self.send_job(None, move |state, _| async move {
4558 match state {
4559 RepositoryState::Local { backend, .. } => {
4560 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4561 let staged_text = backend.load_index_text(repo_path).await;
4562 let diff_bases_change = if committed_text == staged_text {
4563 DiffBasesChange::SetBoth(committed_text)
4564 } else {
4565 DiffBasesChange::SetEach {
4566 index: staged_text,
4567 head: committed_text,
4568 }
4569 };
4570 anyhow::Ok(diff_bases_change)
4571 }
4572 RepositoryState::Remote { project_id, client } => {
4573 use proto::open_uncommitted_diff_response::Mode;
4574
4575 let response = client
4576 .request(proto::OpenUncommittedDiff {
4577 project_id: project_id.to_proto(),
4578 buffer_id: buffer_id.to_proto(),
4579 })
4580 .await?;
4581 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4582 let bases = match mode {
4583 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4584 Mode::IndexAndHead => DiffBasesChange::SetEach {
4585 head: response.committed_text,
4586 index: response.staged_text,
4587 },
4588 };
4589 Ok(bases)
4590 }
4591 }
4592 });
4593
4594 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4595 }
4596
4597 fn paths_changed(
4598 &mut self,
4599 paths: Vec<RepoPath>,
4600 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4601 cx: &mut Context<Self>,
4602 ) {
4603 self.paths_needing_status_update.extend(paths);
4604
4605 let this = cx.weak_entity();
4606 let _ = self.send_keyed_job(
4607 Some(GitJobKey::RefreshStatuses),
4608 None,
4609 |state, mut cx| async move {
4610 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4611 (
4612 this.snapshot.clone(),
4613 mem::take(&mut this.paths_needing_status_update),
4614 )
4615 })?;
4616 let RepositoryState::Local { backend, .. } = state else {
4617 bail!("not a local repository")
4618 };
4619
4620 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4621 let statuses = backend.status(&paths).await?;
4622
4623 let changed_path_statuses = cx
4624 .background_spawn(async move {
4625 let mut changed_path_statuses = Vec::new();
4626 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4627 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4628
4629 for (repo_path, status) in &*statuses.entries {
4630 changed_paths.remove(repo_path);
4631 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) {
4632 if cursor.item().is_some_and(|entry| entry.status == *status) {
4633 continue;
4634 }
4635 }
4636
4637 changed_path_statuses.push(Edit::Insert(StatusEntry {
4638 repo_path: repo_path.clone(),
4639 status: *status,
4640 }));
4641 }
4642 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4643 for path in changed_paths.into_iter() {
4644 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4645 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4646 }
4647 }
4648 changed_path_statuses
4649 })
4650 .await;
4651
4652 this.update(&mut cx, |this, cx| {
4653 if !changed_path_statuses.is_empty() {
4654 this.snapshot
4655 .statuses_by_path
4656 .edit(changed_path_statuses, &());
4657 this.snapshot.scan_id += 1;
4658 if let Some(updates_tx) = updates_tx {
4659 updates_tx
4660 .unbounded_send(DownstreamUpdate::UpdateRepository(
4661 this.snapshot.clone(),
4662 ))
4663 .ok();
4664 }
4665 }
4666 cx.emit(RepositoryEvent::Updated {
4667 full_scan: false,
4668 new_instance: false,
4669 });
4670 })
4671 },
4672 );
4673 }
4674
4675 /// currently running git command and when it started
4676 pub fn current_job(&self) -> Option<JobInfo> {
4677 self.active_jobs.values().next().cloned()
4678 }
4679
4680 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4681 self.send_job(None, |_, _| async {})
4682 }
4683}
4684
4685fn get_permalink_in_rust_registry_src(
4686 provider_registry: Arc<GitHostingProviderRegistry>,
4687 path: PathBuf,
4688 selection: Range<u32>,
4689) -> Result<url::Url> {
4690 #[derive(Deserialize)]
4691 struct CargoVcsGit {
4692 sha1: String,
4693 }
4694
4695 #[derive(Deserialize)]
4696 struct CargoVcsInfo {
4697 git: CargoVcsGit,
4698 path_in_vcs: String,
4699 }
4700
4701 #[derive(Deserialize)]
4702 struct CargoPackage {
4703 repository: String,
4704 }
4705
4706 #[derive(Deserialize)]
4707 struct CargoToml {
4708 package: CargoPackage,
4709 }
4710
4711 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4712 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4713 Some((dir, json))
4714 }) else {
4715 bail!("No .cargo_vcs_info.json found in parent directories")
4716 };
4717 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4718 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4719 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4720 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4721 .context("parsing package.repository field of manifest")?;
4722 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4723 let permalink = provider.build_permalink(
4724 remote,
4725 BuildPermalinkParams {
4726 sha: &cargo_vcs_info.git.sha1,
4727 path: &path.to_string_lossy(),
4728 selection: Some(selection),
4729 },
4730 );
4731 Ok(permalink)
4732}
4733
4734fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4735 let Some(blame) = blame else {
4736 return proto::BlameBufferResponse {
4737 blame_response: None,
4738 };
4739 };
4740
4741 let entries = blame
4742 .entries
4743 .into_iter()
4744 .map(|entry| proto::BlameEntry {
4745 sha: entry.sha.as_bytes().into(),
4746 start_line: entry.range.start,
4747 end_line: entry.range.end,
4748 original_line_number: entry.original_line_number,
4749 author: entry.author,
4750 author_mail: entry.author_mail,
4751 author_time: entry.author_time,
4752 author_tz: entry.author_tz,
4753 committer: entry.committer_name,
4754 committer_mail: entry.committer_email,
4755 committer_time: entry.committer_time,
4756 committer_tz: entry.committer_tz,
4757 summary: entry.summary,
4758 previous: entry.previous,
4759 filename: entry.filename,
4760 })
4761 .collect::<Vec<_>>();
4762
4763 let messages = blame
4764 .messages
4765 .into_iter()
4766 .map(|(oid, message)| proto::CommitMessage {
4767 oid: oid.as_bytes().into(),
4768 message,
4769 })
4770 .collect::<Vec<_>>();
4771
4772 proto::BlameBufferResponse {
4773 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4774 entries,
4775 messages,
4776 remote_url: blame.remote_url,
4777 }),
4778 }
4779}
4780
4781fn deserialize_blame_buffer_response(
4782 response: proto::BlameBufferResponse,
4783) -> Option<git::blame::Blame> {
4784 let response = response.blame_response?;
4785 let entries = response
4786 .entries
4787 .into_iter()
4788 .filter_map(|entry| {
4789 Some(git::blame::BlameEntry {
4790 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4791 range: entry.start_line..entry.end_line,
4792 original_line_number: entry.original_line_number,
4793 committer_name: entry.committer,
4794 committer_time: entry.committer_time,
4795 committer_tz: entry.committer_tz,
4796 committer_email: entry.committer_mail,
4797 author: entry.author,
4798 author_mail: entry.author_mail,
4799 author_time: entry.author_time,
4800 author_tz: entry.author_tz,
4801 summary: entry.summary,
4802 previous: entry.previous,
4803 filename: entry.filename,
4804 })
4805 })
4806 .collect::<Vec<_>>();
4807
4808 let messages = response
4809 .messages
4810 .into_iter()
4811 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4812 .collect::<HashMap<_, _>>();
4813
4814 Some(Blame {
4815 entries,
4816 messages,
4817 remote_url: response.remote_url,
4818 })
4819}
4820
4821fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4822 proto::Branch {
4823 is_head: branch.is_head,
4824 ref_name: branch.ref_name.to_string(),
4825 unix_timestamp: branch
4826 .most_recent_commit
4827 .as_ref()
4828 .map(|commit| commit.commit_timestamp as u64),
4829 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4830 ref_name: upstream.ref_name.to_string(),
4831 tracking: upstream
4832 .tracking
4833 .status()
4834 .map(|upstream| proto::UpstreamTracking {
4835 ahead: upstream.ahead as u64,
4836 behind: upstream.behind as u64,
4837 }),
4838 }),
4839 most_recent_commit: branch
4840 .most_recent_commit
4841 .as_ref()
4842 .map(|commit| proto::CommitSummary {
4843 sha: commit.sha.to_string(),
4844 subject: commit.subject.to_string(),
4845 commit_timestamp: commit.commit_timestamp,
4846 }),
4847 }
4848}
4849
4850fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4851 git::repository::Branch {
4852 is_head: proto.is_head,
4853 ref_name: proto.ref_name.clone().into(),
4854 upstream: proto
4855 .upstream
4856 .as_ref()
4857 .map(|upstream| git::repository::Upstream {
4858 ref_name: upstream.ref_name.to_string().into(),
4859 tracking: upstream
4860 .tracking
4861 .as_ref()
4862 .map(|tracking| {
4863 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4864 ahead: tracking.ahead as u32,
4865 behind: tracking.behind as u32,
4866 })
4867 })
4868 .unwrap_or(git::repository::UpstreamTracking::Gone),
4869 }),
4870 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4871 git::repository::CommitSummary {
4872 sha: commit.sha.to_string().into(),
4873 subject: commit.subject.to_string().into(),
4874 commit_timestamp: commit.commit_timestamp,
4875 has_parent: true,
4876 }
4877 }),
4878 }
4879}
4880
4881fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4882 proto::GitCommitDetails {
4883 sha: commit.sha.to_string(),
4884 message: commit.message.to_string(),
4885 commit_timestamp: commit.commit_timestamp,
4886 author_email: commit.author_email.to_string(),
4887 author_name: commit.author_name.to_string(),
4888 }
4889}
4890
4891fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4892 CommitDetails {
4893 sha: proto.sha.clone().into(),
4894 message: proto.message.clone().into(),
4895 commit_timestamp: proto.commit_timestamp,
4896 author_email: proto.author_email.clone().into(),
4897 author_name: proto.author_name.clone().into(),
4898 }
4899}
4900
4901async fn compute_snapshot(
4902 id: RepositoryId,
4903 work_directory_abs_path: Arc<Path>,
4904 prev_snapshot: RepositorySnapshot,
4905 backend: Arc<dyn GitRepository>,
4906) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4907 let mut events = Vec::new();
4908 let branches = backend.branches().await?;
4909 let branch = branches.into_iter().find(|branch| branch.is_head);
4910 let statuses = backend
4911 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
4912 .await?;
4913 let statuses_by_path = SumTree::from_iter(
4914 statuses
4915 .entries
4916 .iter()
4917 .map(|(repo_path, status)| StatusEntry {
4918 repo_path: repo_path.clone(),
4919 status: *status,
4920 }),
4921 &(),
4922 );
4923 let (merge_details, merge_heads_changed) =
4924 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
4925 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
4926
4927 if merge_heads_changed
4928 || branch != prev_snapshot.branch
4929 || statuses_by_path != prev_snapshot.statuses_by_path
4930 {
4931 events.push(RepositoryEvent::Updated {
4932 full_scan: true,
4933 new_instance: false,
4934 });
4935 }
4936
4937 // Cache merge conflict paths so they don't change from staging/unstaging,
4938 // until the merge heads change (at commit time, etc.).
4939 if merge_heads_changed {
4940 events.push(RepositoryEvent::MergeHeadsChanged);
4941 }
4942
4943 // Useful when branch is None in detached head state
4944 let head_commit = match backend.head_sha().await {
4945 Some(head_sha) => backend.show(head_sha).await.log_err(),
4946 None => None,
4947 };
4948
4949 // Used by edit prediction data collection
4950 let remote_origin_url = backend.remote_url("origin");
4951 let remote_upstream_url = backend.remote_url("upstream");
4952
4953 let snapshot = RepositorySnapshot {
4954 id,
4955 statuses_by_path,
4956 work_directory_abs_path,
4957 scan_id: prev_snapshot.scan_id + 1,
4958 branch,
4959 head_commit,
4960 merge: merge_details,
4961 remote_origin_url,
4962 remote_upstream_url,
4963 };
4964
4965 Ok((snapshot, events))
4966}
4967
4968fn status_from_proto(
4969 simple_status: i32,
4970 status: Option<proto::GitFileStatus>,
4971) -> anyhow::Result<FileStatus> {
4972 use proto::git_file_status::Variant;
4973
4974 let Some(variant) = status.and_then(|status| status.variant) else {
4975 let code = proto::GitStatus::from_i32(simple_status)
4976 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
4977 let result = match code {
4978 proto::GitStatus::Added => TrackedStatus {
4979 worktree_status: StatusCode::Added,
4980 index_status: StatusCode::Unmodified,
4981 }
4982 .into(),
4983 proto::GitStatus::Modified => TrackedStatus {
4984 worktree_status: StatusCode::Modified,
4985 index_status: StatusCode::Unmodified,
4986 }
4987 .into(),
4988 proto::GitStatus::Conflict => UnmergedStatus {
4989 first_head: UnmergedStatusCode::Updated,
4990 second_head: UnmergedStatusCode::Updated,
4991 }
4992 .into(),
4993 proto::GitStatus::Deleted => TrackedStatus {
4994 worktree_status: StatusCode::Deleted,
4995 index_status: StatusCode::Unmodified,
4996 }
4997 .into(),
4998 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
4999 };
5000 return Ok(result);
5001 };
5002
5003 let result = match variant {
5004 Variant::Untracked(_) => FileStatus::Untracked,
5005 Variant::Ignored(_) => FileStatus::Ignored,
5006 Variant::Unmerged(unmerged) => {
5007 let [first_head, second_head] =
5008 [unmerged.first_head, unmerged.second_head].map(|head| {
5009 let code = proto::GitStatus::from_i32(head)
5010 .with_context(|| format!("Invalid git status code: {head}"))?;
5011 let result = match code {
5012 proto::GitStatus::Added => UnmergedStatusCode::Added,
5013 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5014 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5015 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5016 };
5017 Ok(result)
5018 });
5019 let [first_head, second_head] = [first_head?, second_head?];
5020 UnmergedStatus {
5021 first_head,
5022 second_head,
5023 }
5024 .into()
5025 }
5026 Variant::Tracked(tracked) => {
5027 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5028 .map(|status| {
5029 let code = proto::GitStatus::from_i32(status)
5030 .with_context(|| format!("Invalid git status code: {status}"))?;
5031 let result = match code {
5032 proto::GitStatus::Modified => StatusCode::Modified,
5033 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5034 proto::GitStatus::Added => StatusCode::Added,
5035 proto::GitStatus::Deleted => StatusCode::Deleted,
5036 proto::GitStatus::Renamed => StatusCode::Renamed,
5037 proto::GitStatus::Copied => StatusCode::Copied,
5038 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5039 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5040 };
5041 Ok(result)
5042 });
5043 let [index_status, worktree_status] = [index_status?, worktree_status?];
5044 TrackedStatus {
5045 index_status,
5046 worktree_status,
5047 }
5048 .into()
5049 }
5050 };
5051 Ok(result)
5052}
5053
5054fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5055 use proto::git_file_status::{Tracked, Unmerged, Variant};
5056
5057 let variant = match status {
5058 FileStatus::Untracked => Variant::Untracked(Default::default()),
5059 FileStatus::Ignored => Variant::Ignored(Default::default()),
5060 FileStatus::Unmerged(UnmergedStatus {
5061 first_head,
5062 second_head,
5063 }) => Variant::Unmerged(Unmerged {
5064 first_head: unmerged_status_to_proto(first_head),
5065 second_head: unmerged_status_to_proto(second_head),
5066 }),
5067 FileStatus::Tracked(TrackedStatus {
5068 index_status,
5069 worktree_status,
5070 }) => Variant::Tracked(Tracked {
5071 index_status: tracked_status_to_proto(index_status),
5072 worktree_status: tracked_status_to_proto(worktree_status),
5073 }),
5074 };
5075 proto::GitFileStatus {
5076 variant: Some(variant),
5077 }
5078}
5079
5080fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5081 match code {
5082 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5083 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5084 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5085 }
5086}
5087
5088fn tracked_status_to_proto(code: StatusCode) -> i32 {
5089 match code {
5090 StatusCode::Added => proto::GitStatus::Added as _,
5091 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5092 StatusCode::Modified => proto::GitStatus::Modified as _,
5093 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5094 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5095 StatusCode::Copied => proto::GitStatus::Copied as _,
5096 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5097 }
5098}