1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 status::{
32 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
33 },
34};
35use gpui::{
36 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
37 WeakEntity,
38};
39use language::{
40 Buffer, BufferEvent, Language, LanguageRegistry,
41 proto::{deserialize_version, serialize_version},
42};
43use parking_lot::Mutex;
44use postage::stream::Stream as _;
45use rpc::{
46 AnyProtoClient, TypedEnvelope,
47 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
48};
49use serde::Deserialize;
50use std::{
51 cmp::Ordering,
52 collections::{BTreeSet, VecDeque},
53 future::Future,
54 mem,
55 ops::Range,
56 path::{Path, PathBuf},
57 sync::{
58 Arc,
59 atomic::{self, AtomicU64},
60 },
61 time::Instant,
62};
63use sum_tree::{Edit, SumTree, TreeSet};
64use text::{Bias, BufferId};
65use util::{ResultExt, debug_panic, post_inc};
66use worktree::{
67 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
68 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
69};
70
71pub struct GitStore {
72 state: GitStoreState,
73 buffer_store: Entity<BufferStore>,
74 worktree_store: Entity<WorktreeStore>,
75 repositories: HashMap<RepositoryId, Entity<Repository>>,
76 active_repo_id: Option<RepositoryId>,
77 #[allow(clippy::type_complexity)]
78 loading_diffs:
79 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
80 diffs: HashMap<BufferId, Entity<BufferGitState>>,
81 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
82 _subscriptions: Vec<Subscription>,
83}
84
85#[derive(Default)]
86struct SharedDiffs {
87 unstaged: Option<Entity<BufferDiff>>,
88 uncommitted: Option<Entity<BufferDiff>>,
89}
90
91struct BufferGitState {
92 unstaged_diff: Option<WeakEntity<BufferDiff>>,
93 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
94 conflict_set: Option<WeakEntity<ConflictSet>>,
95 recalculate_diff_task: Option<Task<Result<()>>>,
96 reparse_conflict_markers_task: Option<Task<Result<()>>>,
97 language: Option<Arc<Language>>,
98 language_registry: Option<Arc<LanguageRegistry>>,
99 conflict_updated_futures: Vec<oneshot::Sender<()>>,
100 recalculating_tx: postage::watch::Sender<bool>,
101
102 /// These operation counts are used to ensure that head and index text
103 /// values read from the git repository are up-to-date with any hunk staging
104 /// operations that have been performed on the BufferDiff.
105 ///
106 /// The operation count is incremented immediately when the user initiates a
107 /// hunk stage/unstage operation. Then, upon finishing writing the new index
108 /// text do disk, the `operation count as of write` is updated to reflect
109 /// the operation count that prompted the write.
110 hunk_staging_operation_count: usize,
111 hunk_staging_operation_count_as_of_write: usize,
112
113 head_text: Option<Arc<String>>,
114 index_text: Option<Arc<String>>,
115 head_changed: bool,
116 index_changed: bool,
117 language_changed: bool,
118}
119
120#[derive(Clone, Debug)]
121enum DiffBasesChange {
122 SetIndex(Option<String>),
123 SetHead(Option<String>),
124 SetEach {
125 index: Option<String>,
126 head: Option<String>,
127 },
128 SetBoth(Option<String>),
129}
130
131#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
132enum DiffKind {
133 Unstaged,
134 Uncommitted,
135}
136
137enum GitStoreState {
138 Local {
139 next_repository_id: Arc<AtomicU64>,
140 downstream: Option<LocalDownstreamState>,
141 project_environment: Entity<ProjectEnvironment>,
142 fs: Arc<dyn Fs>,
143 },
144 Ssh {
145 upstream_client: AnyProtoClient,
146 upstream_project_id: ProjectId,
147 downstream: Option<(AnyProtoClient, ProjectId)>,
148 },
149 Remote {
150 upstream_client: AnyProtoClient,
151 upstream_project_id: ProjectId,
152 },
153}
154
155enum DownstreamUpdate {
156 UpdateRepository(RepositorySnapshot),
157 RemoveRepository(RepositoryId),
158}
159
160struct LocalDownstreamState {
161 client: AnyProtoClient,
162 project_id: ProjectId,
163 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
164 _task: Task<Result<()>>,
165}
166
167#[derive(Clone, Debug)]
168pub struct GitStoreCheckpoint {
169 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
170}
171
172#[derive(Clone, Debug, PartialEq, Eq)]
173pub struct StatusEntry {
174 pub repo_path: RepoPath,
175 pub status: FileStatus,
176}
177
178impl StatusEntry {
179 fn to_proto(&self) -> proto::StatusEntry {
180 let simple_status = match self.status {
181 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
182 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
183 FileStatus::Tracked(TrackedStatus {
184 index_status,
185 worktree_status,
186 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
187 worktree_status
188 } else {
189 index_status
190 }),
191 };
192
193 proto::StatusEntry {
194 repo_path: self.repo_path.as_ref().to_proto(),
195 simple_status,
196 status: Some(status_to_proto(self.status)),
197 }
198 }
199}
200
201impl TryFrom<proto::StatusEntry> for StatusEntry {
202 type Error = anyhow::Error;
203
204 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
205 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
206 let status = status_from_proto(value.simple_status, value.status)?;
207 Ok(Self { repo_path, status })
208 }
209}
210
211impl sum_tree::Item for StatusEntry {
212 type Summary = PathSummary<GitSummary>;
213
214 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
215 PathSummary {
216 max_path: self.repo_path.0.clone(),
217 item_summary: self.status.summary(),
218 }
219 }
220}
221
222impl sum_tree::KeyedItem for StatusEntry {
223 type Key = PathKey;
224
225 fn key(&self) -> Self::Key {
226 PathKey(self.repo_path.0.clone())
227 }
228}
229
230#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
231pub struct RepositoryId(pub u64);
232
233#[derive(Clone, Debug, Default, PartialEq, Eq)]
234pub struct MergeDetails {
235 pub conflicted_paths: TreeSet<RepoPath>,
236 pub message: Option<SharedString>,
237 pub heads: Vec<Option<SharedString>>,
238}
239
240#[derive(Clone, Debug, PartialEq, Eq)]
241pub struct RepositorySnapshot {
242 pub id: RepositoryId,
243 pub statuses_by_path: SumTree<StatusEntry>,
244 pub work_directory_abs_path: Arc<Path>,
245 pub branch: Option<Branch>,
246 pub head_commit: Option<CommitDetails>,
247 pub scan_id: u64,
248 pub merge: MergeDetails,
249 pub remote_origin_url: Option<String>,
250 pub remote_upstream_url: Option<String>,
251}
252
253type JobId = u64;
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct JobInfo {
257 pub start: Instant,
258 pub message: SharedString,
259}
260
261pub struct Repository {
262 this: WeakEntity<Self>,
263 snapshot: RepositorySnapshot,
264 commit_message_buffer: Option<Entity<Buffer>>,
265 git_store: WeakEntity<GitStore>,
266 // For a local repository, holds paths that have had worktree events since the last status scan completed,
267 // and that should be examined during the next status scan.
268 paths_needing_status_update: BTreeSet<RepoPath>,
269 job_sender: mpsc::UnboundedSender<GitJob>,
270 active_jobs: HashMap<JobId, JobInfo>,
271 job_id: JobId,
272 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
273 latest_askpass_id: u64,
274}
275
276impl std::ops::Deref for Repository {
277 type Target = RepositorySnapshot;
278
279 fn deref(&self) -> &Self::Target {
280 &self.snapshot
281 }
282}
283
284#[derive(Clone)]
285pub enum RepositoryState {
286 Local {
287 backend: Arc<dyn GitRepository>,
288 environment: Arc<HashMap<String, String>>,
289 },
290 Remote {
291 project_id: ProjectId,
292 client: AnyProtoClient,
293 },
294}
295
296#[derive(Clone, Debug)]
297pub enum RepositoryEvent {
298 Updated { full_scan: bool, new_instance: bool },
299 MergeHeadsChanged,
300}
301
302#[derive(Clone, Debug)]
303pub struct JobsUpdated;
304
305#[derive(Debug)]
306pub enum GitStoreEvent {
307 ActiveRepositoryChanged(Option<RepositoryId>),
308 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
309 RepositoryAdded(RepositoryId),
310 RepositoryRemoved(RepositoryId),
311 IndexWriteError(anyhow::Error),
312 JobsUpdated,
313 ConflictsUpdated,
314}
315
316impl EventEmitter<RepositoryEvent> for Repository {}
317impl EventEmitter<JobsUpdated> for Repository {}
318impl EventEmitter<GitStoreEvent> for GitStore {}
319
320pub struct GitJob {
321 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
322 key: Option<GitJobKey>,
323}
324
325#[derive(PartialEq, Eq)]
326enum GitJobKey {
327 WriteIndex(RepoPath),
328 ReloadBufferDiffBases,
329 RefreshStatuses,
330 ReloadGitState,
331}
332
333impl GitStore {
334 pub fn local(
335 worktree_store: &Entity<WorktreeStore>,
336 buffer_store: Entity<BufferStore>,
337 environment: Entity<ProjectEnvironment>,
338 fs: Arc<dyn Fs>,
339 cx: &mut Context<Self>,
340 ) -> Self {
341 Self::new(
342 worktree_store.clone(),
343 buffer_store,
344 GitStoreState::Local {
345 next_repository_id: Arc::new(AtomicU64::new(1)),
346 downstream: None,
347 project_environment: environment,
348 fs,
349 },
350 cx,
351 )
352 }
353
354 pub fn remote(
355 worktree_store: &Entity<WorktreeStore>,
356 buffer_store: Entity<BufferStore>,
357 upstream_client: AnyProtoClient,
358 project_id: ProjectId,
359 cx: &mut Context<Self>,
360 ) -> Self {
361 Self::new(
362 worktree_store.clone(),
363 buffer_store,
364 GitStoreState::Remote {
365 upstream_client,
366 upstream_project_id: project_id,
367 },
368 cx,
369 )
370 }
371
372 pub fn ssh(
373 worktree_store: &Entity<WorktreeStore>,
374 buffer_store: Entity<BufferStore>,
375 upstream_client: AnyProtoClient,
376 cx: &mut Context<Self>,
377 ) -> Self {
378 Self::new(
379 worktree_store.clone(),
380 buffer_store,
381 GitStoreState::Ssh {
382 upstream_client,
383 upstream_project_id: ProjectId(SSH_PROJECT_ID),
384 downstream: None,
385 },
386 cx,
387 )
388 }
389
390 fn new(
391 worktree_store: Entity<WorktreeStore>,
392 buffer_store: Entity<BufferStore>,
393 state: GitStoreState,
394 cx: &mut Context<Self>,
395 ) -> Self {
396 let _subscriptions = vec![
397 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
398 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
399 ];
400
401 GitStore {
402 state,
403 buffer_store,
404 worktree_store,
405 repositories: HashMap::default(),
406 active_repo_id: None,
407 _subscriptions,
408 loading_diffs: HashMap::default(),
409 shared_diffs: HashMap::default(),
410 diffs: HashMap::default(),
411 }
412 }
413
414 pub fn init(client: &AnyProtoClient) {
415 client.add_entity_request_handler(Self::handle_get_remotes);
416 client.add_entity_request_handler(Self::handle_get_branches);
417 client.add_entity_request_handler(Self::handle_change_branch);
418 client.add_entity_request_handler(Self::handle_create_branch);
419 client.add_entity_request_handler(Self::handle_git_init);
420 client.add_entity_request_handler(Self::handle_push);
421 client.add_entity_request_handler(Self::handle_pull);
422 client.add_entity_request_handler(Self::handle_fetch);
423 client.add_entity_request_handler(Self::handle_stage);
424 client.add_entity_request_handler(Self::handle_unstage);
425 client.add_entity_request_handler(Self::handle_stash);
426 client.add_entity_request_handler(Self::handle_stash_pop);
427 client.add_entity_request_handler(Self::handle_commit);
428 client.add_entity_request_handler(Self::handle_reset);
429 client.add_entity_request_handler(Self::handle_show);
430 client.add_entity_request_handler(Self::handle_load_commit_diff);
431 client.add_entity_request_handler(Self::handle_checkout_files);
432 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
433 client.add_entity_request_handler(Self::handle_set_index_text);
434 client.add_entity_request_handler(Self::handle_askpass);
435 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
436 client.add_entity_request_handler(Self::handle_git_diff);
437 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
438 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
439 client.add_entity_message_handler(Self::handle_update_diff_bases);
440 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
441 client.add_entity_request_handler(Self::handle_blame_buffer);
442 client.add_entity_message_handler(Self::handle_update_repository);
443 client.add_entity_message_handler(Self::handle_remove_repository);
444 }
445
446 pub fn is_local(&self) -> bool {
447 matches!(self.state, GitStoreState::Local { .. })
448 }
449
450 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
451 match &mut self.state {
452 GitStoreState::Ssh {
453 downstream: downstream_client,
454 ..
455 } => {
456 for repo in self.repositories.values() {
457 let update = repo.read(cx).snapshot.initial_update(project_id);
458 for update in split_repository_update(update) {
459 client.send(update).log_err();
460 }
461 }
462 *downstream_client = Some((client, ProjectId(project_id)));
463 }
464 GitStoreState::Local {
465 downstream: downstream_client,
466 ..
467 } => {
468 let mut snapshots = HashMap::default();
469 let (updates_tx, mut updates_rx) = mpsc::unbounded();
470 for repo in self.repositories.values() {
471 updates_tx
472 .unbounded_send(DownstreamUpdate::UpdateRepository(
473 repo.read(cx).snapshot.clone(),
474 ))
475 .ok();
476 }
477 *downstream_client = Some(LocalDownstreamState {
478 client: client.clone(),
479 project_id: ProjectId(project_id),
480 updates_tx,
481 _task: cx.spawn(async move |this, cx| {
482 cx.background_spawn(async move {
483 while let Some(update) = updates_rx.next().await {
484 match update {
485 DownstreamUpdate::UpdateRepository(snapshot) => {
486 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
487 {
488 let update =
489 snapshot.build_update(old_snapshot, project_id);
490 *old_snapshot = snapshot;
491 for update in split_repository_update(update) {
492 client.send(update)?;
493 }
494 } else {
495 let update = snapshot.initial_update(project_id);
496 for update in split_repository_update(update) {
497 client.send(update)?;
498 }
499 snapshots.insert(snapshot.id, snapshot);
500 }
501 }
502 DownstreamUpdate::RemoveRepository(id) => {
503 client.send(proto::RemoveRepository {
504 project_id,
505 id: id.to_proto(),
506 })?;
507 }
508 }
509 }
510 anyhow::Ok(())
511 })
512 .await
513 .ok();
514 this.update(cx, |this, _| {
515 if let GitStoreState::Local {
516 downstream: downstream_client,
517 ..
518 } = &mut this.state
519 {
520 downstream_client.take();
521 } else {
522 unreachable!("unshared called on remote store");
523 }
524 })
525 }),
526 });
527 }
528 GitStoreState::Remote { .. } => {
529 debug_panic!("shared called on remote store");
530 }
531 }
532 }
533
534 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
535 match &mut self.state {
536 GitStoreState::Local {
537 downstream: downstream_client,
538 ..
539 } => {
540 downstream_client.take();
541 }
542 GitStoreState::Ssh {
543 downstream: downstream_client,
544 ..
545 } => {
546 downstream_client.take();
547 }
548 GitStoreState::Remote { .. } => {
549 debug_panic!("unshared called on remote store");
550 }
551 }
552 self.shared_diffs.clear();
553 }
554
555 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
556 self.shared_diffs.remove(peer_id);
557 }
558
559 pub fn active_repository(&self) -> Option<Entity<Repository>> {
560 self.active_repo_id
561 .as_ref()
562 .map(|id| self.repositories[&id].clone())
563 }
564
565 pub fn open_unstaged_diff(
566 &mut self,
567 buffer: Entity<Buffer>,
568 cx: &mut Context<Self>,
569 ) -> Task<Result<Entity<BufferDiff>>> {
570 let buffer_id = buffer.read(cx).remote_id();
571 if let Some(diff_state) = self.diffs.get(&buffer_id) {
572 if let Some(unstaged_diff) = diff_state
573 .read(cx)
574 .unstaged_diff
575 .as_ref()
576 .and_then(|weak| weak.upgrade())
577 {
578 if let Some(task) =
579 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
580 {
581 return cx.background_executor().spawn(async move {
582 task.await;
583 Ok(unstaged_diff)
584 });
585 }
586 return Task::ready(Ok(unstaged_diff));
587 }
588 }
589
590 let Some((repo, repo_path)) =
591 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
592 else {
593 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
594 };
595
596 let task = self
597 .loading_diffs
598 .entry((buffer_id, DiffKind::Unstaged))
599 .or_insert_with(|| {
600 let staged_text = repo.update(cx, |repo, cx| {
601 repo.load_staged_text(buffer_id, repo_path, cx)
602 });
603 cx.spawn(async move |this, cx| {
604 Self::open_diff_internal(
605 this,
606 DiffKind::Unstaged,
607 staged_text.await.map(DiffBasesChange::SetIndex),
608 buffer,
609 cx,
610 )
611 .await
612 .map_err(Arc::new)
613 })
614 .shared()
615 })
616 .clone();
617
618 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
619 }
620
621 pub fn open_uncommitted_diff(
622 &mut self,
623 buffer: Entity<Buffer>,
624 cx: &mut Context<Self>,
625 ) -> Task<Result<Entity<BufferDiff>>> {
626 let buffer_id = buffer.read(cx).remote_id();
627
628 if let Some(diff_state) = self.diffs.get(&buffer_id) {
629 if let Some(uncommitted_diff) = diff_state
630 .read(cx)
631 .uncommitted_diff
632 .as_ref()
633 .and_then(|weak| weak.upgrade())
634 {
635 if let Some(task) =
636 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
637 {
638 return cx.background_executor().spawn(async move {
639 task.await;
640 Ok(uncommitted_diff)
641 });
642 }
643 return Task::ready(Ok(uncommitted_diff));
644 }
645 }
646
647 let Some((repo, repo_path)) =
648 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
649 else {
650 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
651 };
652
653 let task = self
654 .loading_diffs
655 .entry((buffer_id, DiffKind::Uncommitted))
656 .or_insert_with(|| {
657 let changes = repo.update(cx, |repo, cx| {
658 repo.load_committed_text(buffer_id, repo_path, cx)
659 });
660
661 cx.spawn(async move |this, cx| {
662 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
663 .await
664 .map_err(Arc::new)
665 })
666 .shared()
667 })
668 .clone();
669
670 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
671 }
672
673 async fn open_diff_internal(
674 this: WeakEntity<Self>,
675 kind: DiffKind,
676 texts: Result<DiffBasesChange>,
677 buffer_entity: Entity<Buffer>,
678 cx: &mut AsyncApp,
679 ) -> Result<Entity<BufferDiff>> {
680 let diff_bases_change = match texts {
681 Err(e) => {
682 this.update(cx, |this, cx| {
683 let buffer = buffer_entity.read(cx);
684 let buffer_id = buffer.remote_id();
685 this.loading_diffs.remove(&(buffer_id, kind));
686 })?;
687 return Err(e);
688 }
689 Ok(change) => change,
690 };
691
692 this.update(cx, |this, cx| {
693 let buffer = buffer_entity.read(cx);
694 let buffer_id = buffer.remote_id();
695 let language = buffer.language().cloned();
696 let language_registry = buffer.language_registry();
697 let text_snapshot = buffer.text_snapshot();
698 this.loading_diffs.remove(&(buffer_id, kind));
699
700 let git_store = cx.weak_entity();
701 let diff_state = this
702 .diffs
703 .entry(buffer_id)
704 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
705
706 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
707
708 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
709 diff_state.update(cx, |diff_state, cx| {
710 diff_state.language = language;
711 diff_state.language_registry = language_registry;
712
713 match kind {
714 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
715 DiffKind::Uncommitted => {
716 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
717 diff
718 } else {
719 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
720 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
721 unstaged_diff
722 };
723
724 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
725 diff_state.uncommitted_diff = Some(diff.downgrade())
726 }
727 }
728
729 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
730 let rx = diff_state.wait_for_recalculation();
731
732 anyhow::Ok(async move {
733 if let Some(rx) = rx {
734 rx.await;
735 }
736 Ok(diff)
737 })
738 })
739 })??
740 .await
741 }
742
743 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
744 let diff_state = self.diffs.get(&buffer_id)?;
745 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
746 }
747
748 pub fn get_uncommitted_diff(
749 &self,
750 buffer_id: BufferId,
751 cx: &App,
752 ) -> Option<Entity<BufferDiff>> {
753 let diff_state = self.diffs.get(&buffer_id)?;
754 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
755 }
756
757 pub fn open_conflict_set(
758 &mut self,
759 buffer: Entity<Buffer>,
760 cx: &mut Context<Self>,
761 ) -> Entity<ConflictSet> {
762 log::debug!("open conflict set");
763 let buffer_id = buffer.read(cx).remote_id();
764
765 if let Some(git_state) = self.diffs.get(&buffer_id) {
766 if let Some(conflict_set) = git_state
767 .read(cx)
768 .conflict_set
769 .as_ref()
770 .and_then(|weak| weak.upgrade())
771 {
772 let conflict_set = conflict_set.clone();
773 let buffer_snapshot = buffer.read(cx).text_snapshot();
774
775 git_state.update(cx, |state, cx| {
776 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
777 });
778
779 return conflict_set;
780 }
781 }
782
783 let is_unmerged = self
784 .repository_and_path_for_buffer_id(buffer_id, cx)
785 .map_or(false, |(repo, path)| {
786 repo.read(cx).snapshot.has_conflict(&path)
787 });
788 let git_store = cx.weak_entity();
789 let buffer_git_state = self
790 .diffs
791 .entry(buffer_id)
792 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
793 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
794
795 self._subscriptions
796 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
797 cx.emit(GitStoreEvent::ConflictsUpdated);
798 }));
799
800 buffer_git_state.update(cx, |state, cx| {
801 state.conflict_set = Some(conflict_set.downgrade());
802 let buffer_snapshot = buffer.read(cx).text_snapshot();
803 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
804 });
805
806 conflict_set
807 }
808
809 pub fn project_path_git_status(
810 &self,
811 project_path: &ProjectPath,
812 cx: &App,
813 ) -> Option<FileStatus> {
814 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
815 Some(repo.read(cx).status_for_path(&repo_path)?.status)
816 }
817
818 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
819 let mut work_directory_abs_paths = Vec::new();
820 let mut checkpoints = Vec::new();
821 for repository in self.repositories.values() {
822 repository.update(cx, |repository, _| {
823 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
824 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
825 });
826 }
827
828 cx.background_executor().spawn(async move {
829 let checkpoints = future::try_join_all(checkpoints).await?;
830 Ok(GitStoreCheckpoint {
831 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
832 .into_iter()
833 .zip(checkpoints)
834 .collect(),
835 })
836 })
837 }
838
839 pub fn restore_checkpoint(
840 &self,
841 checkpoint: GitStoreCheckpoint,
842 cx: &mut App,
843 ) -> Task<Result<()>> {
844 let repositories_by_work_dir_abs_path = self
845 .repositories
846 .values()
847 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
848 .collect::<HashMap<_, _>>();
849
850 let mut tasks = Vec::new();
851 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
852 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
853 let restore = repository.update(cx, |repository, _| {
854 repository.restore_checkpoint(checkpoint)
855 });
856 tasks.push(async move { restore.await? });
857 }
858 }
859 cx.background_spawn(async move {
860 future::try_join_all(tasks).await?;
861 Ok(())
862 })
863 }
864
865 /// Compares two checkpoints, returning true if they are equal.
866 pub fn compare_checkpoints(
867 &self,
868 left: GitStoreCheckpoint,
869 mut right: GitStoreCheckpoint,
870 cx: &mut App,
871 ) -> Task<Result<bool>> {
872 let repositories_by_work_dir_abs_path = self
873 .repositories
874 .values()
875 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
876 .collect::<HashMap<_, _>>();
877
878 let mut tasks = Vec::new();
879 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
880 if let Some(right_checkpoint) = right
881 .checkpoints_by_work_dir_abs_path
882 .remove(&work_dir_abs_path)
883 {
884 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
885 {
886 let compare = repository.update(cx, |repository, _| {
887 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
888 });
889
890 tasks.push(async move { compare.await? });
891 }
892 } else {
893 return Task::ready(Ok(false));
894 }
895 }
896 cx.background_spawn(async move {
897 Ok(future::try_join_all(tasks)
898 .await?
899 .into_iter()
900 .all(|result| result))
901 })
902 }
903
904 /// Blames a buffer.
905 pub fn blame_buffer(
906 &self,
907 buffer: &Entity<Buffer>,
908 version: Option<clock::Global>,
909 cx: &mut App,
910 ) -> Task<Result<Option<Blame>>> {
911 let buffer = buffer.read(cx);
912 let Some((repo, repo_path)) =
913 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
914 else {
915 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
916 };
917 let content = match &version {
918 Some(version) => buffer.rope_for_version(version).clone(),
919 None => buffer.as_rope().clone(),
920 };
921 let version = version.unwrap_or(buffer.version());
922 let buffer_id = buffer.remote_id();
923
924 let rx = repo.update(cx, |repo, _| {
925 repo.send_job(None, move |state, _| async move {
926 match state {
927 RepositoryState::Local { backend, .. } => backend
928 .blame(repo_path.clone(), content)
929 .await
930 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
931 .map(Some),
932 RepositoryState::Remote { project_id, client } => {
933 let response = client
934 .request(proto::BlameBuffer {
935 project_id: project_id.to_proto(),
936 buffer_id: buffer_id.into(),
937 version: serialize_version(&version),
938 })
939 .await?;
940 Ok(deserialize_blame_buffer_response(response))
941 }
942 }
943 })
944 });
945
946 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
947 }
948
949 pub fn get_permalink_to_line(
950 &self,
951 buffer: &Entity<Buffer>,
952 selection: Range<u32>,
953 cx: &mut App,
954 ) -> Task<Result<url::Url>> {
955 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
956 return Task::ready(Err(anyhow!("buffer has no file")));
957 };
958
959 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
960 &(file.worktree.read(cx).id(), file.path.clone()).into(),
961 cx,
962 ) else {
963 // If we're not in a Git repo, check whether this is a Rust source
964 // file in the Cargo registry (presumably opened with go-to-definition
965 // from a normal Rust file). If so, we can put together a permalink
966 // using crate metadata.
967 if buffer
968 .read(cx)
969 .language()
970 .is_none_or(|lang| lang.name() != "Rust".into())
971 {
972 return Task::ready(Err(anyhow!("no permalink available")));
973 }
974 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
975 return Task::ready(Err(anyhow!("no permalink available")));
976 };
977 return cx.spawn(async move |cx| {
978 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
979 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
980 .context("no permalink available")
981 });
982
983 // TODO remote case
984 };
985
986 let buffer_id = buffer.read(cx).remote_id();
987 let branch = repo.read(cx).branch.clone();
988 let remote = branch
989 .as_ref()
990 .and_then(|b| b.upstream.as_ref())
991 .and_then(|b| b.remote_name())
992 .unwrap_or("origin")
993 .to_string();
994
995 let rx = repo.update(cx, |repo, _| {
996 repo.send_job(None, move |state, cx| async move {
997 match state {
998 RepositoryState::Local { backend, .. } => {
999 let origin_url = backend
1000 .remote_url(&remote)
1001 .with_context(|| format!("remote \"{remote}\" not found"))?;
1002
1003 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1004
1005 let provider_registry =
1006 cx.update(GitHostingProviderRegistry::default_global)?;
1007
1008 let (provider, remote) =
1009 parse_git_remote_url(provider_registry, &origin_url)
1010 .context("parsing Git remote URL")?;
1011
1012 let path = repo_path.to_str().with_context(|| {
1013 format!("converting repo path {repo_path:?} to string")
1014 })?;
1015
1016 Ok(provider.build_permalink(
1017 remote,
1018 BuildPermalinkParams {
1019 sha: &sha,
1020 path,
1021 selection: Some(selection),
1022 },
1023 ))
1024 }
1025 RepositoryState::Remote { project_id, client } => {
1026 let response = client
1027 .request(proto::GetPermalinkToLine {
1028 project_id: project_id.to_proto(),
1029 buffer_id: buffer_id.into(),
1030 selection: Some(proto::Range {
1031 start: selection.start as u64,
1032 end: selection.end as u64,
1033 }),
1034 })
1035 .await?;
1036
1037 url::Url::parse(&response.permalink).context("failed to parse permalink")
1038 }
1039 }
1040 })
1041 });
1042 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1043 }
1044
1045 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1046 match &self.state {
1047 GitStoreState::Local {
1048 downstream: downstream_client,
1049 ..
1050 } => downstream_client
1051 .as_ref()
1052 .map(|state| (state.client.clone(), state.project_id)),
1053 GitStoreState::Ssh {
1054 downstream: downstream_client,
1055 ..
1056 } => downstream_client.clone(),
1057 GitStoreState::Remote { .. } => None,
1058 }
1059 }
1060
1061 fn upstream_client(&self) -> Option<AnyProtoClient> {
1062 match &self.state {
1063 GitStoreState::Local { .. } => None,
1064 GitStoreState::Ssh {
1065 upstream_client, ..
1066 }
1067 | GitStoreState::Remote {
1068 upstream_client, ..
1069 } => Some(upstream_client.clone()),
1070 }
1071 }
1072
1073 fn on_worktree_store_event(
1074 &mut self,
1075 worktree_store: Entity<WorktreeStore>,
1076 event: &WorktreeStoreEvent,
1077 cx: &mut Context<Self>,
1078 ) {
1079 let GitStoreState::Local {
1080 project_environment,
1081 downstream,
1082 next_repository_id,
1083 fs,
1084 } = &self.state
1085 else {
1086 return;
1087 };
1088
1089 match event {
1090 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1091 if let Some(worktree) = self
1092 .worktree_store
1093 .read(cx)
1094 .worktree_for_id(*worktree_id, cx)
1095 {
1096 let paths_by_git_repo =
1097 self.process_updated_entries(&worktree, updated_entries, cx);
1098 let downstream = downstream
1099 .as_ref()
1100 .map(|downstream| downstream.updates_tx.clone());
1101 cx.spawn(async move |_, cx| {
1102 let paths_by_git_repo = paths_by_git_repo.await;
1103 for (repo, paths) in paths_by_git_repo {
1104 repo.update(cx, |repo, cx| {
1105 repo.paths_changed(paths, downstream.clone(), cx);
1106 })
1107 .ok();
1108 }
1109 })
1110 .detach();
1111 }
1112 }
1113 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1114 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1115 else {
1116 return;
1117 };
1118 if !worktree.read(cx).is_visible() {
1119 log::debug!(
1120 "not adding repositories for local worktree {:?} because it's not visible",
1121 worktree.read(cx).abs_path()
1122 );
1123 return;
1124 }
1125 self.update_repositories_from_worktree(
1126 project_environment.clone(),
1127 next_repository_id.clone(),
1128 downstream
1129 .as_ref()
1130 .map(|downstream| downstream.updates_tx.clone()),
1131 changed_repos.clone(),
1132 fs.clone(),
1133 cx,
1134 );
1135 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1136 }
1137 _ => {}
1138 }
1139 }
1140
1141 fn on_repository_event(
1142 &mut self,
1143 repo: Entity<Repository>,
1144 event: &RepositoryEvent,
1145 cx: &mut Context<Self>,
1146 ) {
1147 let id = repo.read(cx).id;
1148 let repo_snapshot = repo.read(cx).snapshot.clone();
1149 for (buffer_id, diff) in self.diffs.iter() {
1150 if let Some((buffer_repo, repo_path)) =
1151 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1152 {
1153 if buffer_repo == repo {
1154 diff.update(cx, |diff, cx| {
1155 if let Some(conflict_set) = &diff.conflict_set {
1156 let conflict_status_changed =
1157 conflict_set.update(cx, |conflict_set, cx| {
1158 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1159 conflict_set.set_has_conflict(has_conflict, cx)
1160 })?;
1161 if conflict_status_changed {
1162 let buffer_store = self.buffer_store.read(cx);
1163 if let Some(buffer) = buffer_store.get(*buffer_id) {
1164 let _ = diff.reparse_conflict_markers(
1165 buffer.read(cx).text_snapshot(),
1166 cx,
1167 );
1168 }
1169 }
1170 }
1171 anyhow::Ok(())
1172 })
1173 .ok();
1174 }
1175 }
1176 }
1177 cx.emit(GitStoreEvent::RepositoryUpdated(
1178 id,
1179 event.clone(),
1180 self.active_repo_id == Some(id),
1181 ))
1182 }
1183
1184 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1185 cx.emit(GitStoreEvent::JobsUpdated)
1186 }
1187
1188 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1189 fn update_repositories_from_worktree(
1190 &mut self,
1191 project_environment: Entity<ProjectEnvironment>,
1192 next_repository_id: Arc<AtomicU64>,
1193 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1194 updated_git_repositories: UpdatedGitRepositoriesSet,
1195 fs: Arc<dyn Fs>,
1196 cx: &mut Context<Self>,
1197 ) {
1198 let mut removed_ids = Vec::new();
1199 for update in updated_git_repositories.iter() {
1200 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1201 let existing_work_directory_abs_path =
1202 repo.read(cx).work_directory_abs_path.clone();
1203 Some(&existing_work_directory_abs_path)
1204 == update.old_work_directory_abs_path.as_ref()
1205 || Some(&existing_work_directory_abs_path)
1206 == update.new_work_directory_abs_path.as_ref()
1207 }) {
1208 if let Some(new_work_directory_abs_path) =
1209 update.new_work_directory_abs_path.clone()
1210 {
1211 existing.update(cx, |existing, cx| {
1212 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1213 existing.schedule_scan(updates_tx.clone(), cx);
1214 });
1215 } else {
1216 removed_ids.push(*id);
1217 }
1218 } else if let UpdatedGitRepository {
1219 new_work_directory_abs_path: Some(work_directory_abs_path),
1220 dot_git_abs_path: Some(dot_git_abs_path),
1221 repository_dir_abs_path: Some(repository_dir_abs_path),
1222 common_dir_abs_path: Some(common_dir_abs_path),
1223 ..
1224 } = update
1225 {
1226 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1227 let git_store = cx.weak_entity();
1228 let repo = cx.new(|cx| {
1229 let mut repo = Repository::local(
1230 id,
1231 work_directory_abs_path.clone(),
1232 dot_git_abs_path.clone(),
1233 repository_dir_abs_path.clone(),
1234 common_dir_abs_path.clone(),
1235 project_environment.downgrade(),
1236 fs.clone(),
1237 git_store,
1238 cx,
1239 );
1240 repo.schedule_scan(updates_tx.clone(), cx);
1241 repo
1242 });
1243 self._subscriptions
1244 .push(cx.subscribe(&repo, Self::on_repository_event));
1245 self._subscriptions
1246 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1247 self.repositories.insert(id, repo);
1248 cx.emit(GitStoreEvent::RepositoryAdded(id));
1249 self.active_repo_id.get_or_insert_with(|| {
1250 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1251 id
1252 });
1253 }
1254 }
1255
1256 for id in removed_ids {
1257 if self.active_repo_id == Some(id) {
1258 self.active_repo_id = None;
1259 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1260 }
1261 self.repositories.remove(&id);
1262 if let Some(updates_tx) = updates_tx.as_ref() {
1263 updates_tx
1264 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1265 .ok();
1266 }
1267 }
1268 }
1269
1270 fn on_buffer_store_event(
1271 &mut self,
1272 _: Entity<BufferStore>,
1273 event: &BufferStoreEvent,
1274 cx: &mut Context<Self>,
1275 ) {
1276 match event {
1277 BufferStoreEvent::BufferAdded(buffer) => {
1278 cx.subscribe(&buffer, |this, buffer, event, cx| {
1279 if let BufferEvent::LanguageChanged = event {
1280 let buffer_id = buffer.read(cx).remote_id();
1281 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1282 diff_state.update(cx, |diff_state, cx| {
1283 diff_state.buffer_language_changed(buffer, cx);
1284 });
1285 }
1286 }
1287 })
1288 .detach();
1289 }
1290 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1291 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1292 diffs.remove(buffer_id);
1293 }
1294 }
1295 BufferStoreEvent::BufferDropped(buffer_id) => {
1296 self.diffs.remove(&buffer_id);
1297 for diffs in self.shared_diffs.values_mut() {
1298 diffs.remove(buffer_id);
1299 }
1300 }
1301
1302 _ => {}
1303 }
1304 }
1305
1306 pub fn recalculate_buffer_diffs(
1307 &mut self,
1308 buffers: Vec<Entity<Buffer>>,
1309 cx: &mut Context<Self>,
1310 ) -> impl Future<Output = ()> + use<> {
1311 let mut futures = Vec::new();
1312 for buffer in buffers {
1313 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1314 let buffer = buffer.read(cx).text_snapshot();
1315 diff_state.update(cx, |diff_state, cx| {
1316 diff_state.recalculate_diffs(buffer.clone(), cx);
1317 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1318 });
1319 futures.push(diff_state.update(cx, |diff_state, cx| {
1320 diff_state
1321 .reparse_conflict_markers(buffer, cx)
1322 .map(|_| {})
1323 .boxed()
1324 }));
1325 }
1326 }
1327 async move {
1328 futures::future::join_all(futures).await;
1329 }
1330 }
1331
1332 fn on_buffer_diff_event(
1333 &mut self,
1334 diff: Entity<buffer_diff::BufferDiff>,
1335 event: &BufferDiffEvent,
1336 cx: &mut Context<Self>,
1337 ) {
1338 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1339 let buffer_id = diff.read(cx).buffer_id;
1340 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1341 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1342 diff_state.hunk_staging_operation_count += 1;
1343 diff_state.hunk_staging_operation_count
1344 });
1345 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1346 let recv = repo.update(cx, |repo, cx| {
1347 log::debug!("hunks changed for {}", path.display());
1348 repo.spawn_set_index_text_job(
1349 path,
1350 new_index_text.as_ref().map(|rope| rope.to_string()),
1351 Some(hunk_staging_operation_count),
1352 cx,
1353 )
1354 });
1355 let diff = diff.downgrade();
1356 cx.spawn(async move |this, cx| {
1357 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1358 diff.update(cx, |diff, cx| {
1359 diff.clear_pending_hunks(cx);
1360 })
1361 .ok();
1362 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1363 .ok();
1364 }
1365 })
1366 .detach();
1367 }
1368 }
1369 }
1370 }
1371
1372 fn local_worktree_git_repos_changed(
1373 &mut self,
1374 worktree: Entity<Worktree>,
1375 changed_repos: &UpdatedGitRepositoriesSet,
1376 cx: &mut Context<Self>,
1377 ) {
1378 log::debug!("local worktree repos changed");
1379 debug_assert!(worktree.read(cx).is_local());
1380
1381 for repository in self.repositories.values() {
1382 repository.update(cx, |repository, cx| {
1383 let repo_abs_path = &repository.work_directory_abs_path;
1384 if changed_repos.iter().any(|update| {
1385 update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1386 || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1387 }) {
1388 repository.reload_buffer_diff_bases(cx);
1389 }
1390 });
1391 }
1392 }
1393
1394 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1395 &self.repositories
1396 }
1397
1398 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1399 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1400 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1401 Some(status.status)
1402 }
1403
1404 pub fn repository_and_path_for_buffer_id(
1405 &self,
1406 buffer_id: BufferId,
1407 cx: &App,
1408 ) -> Option<(Entity<Repository>, RepoPath)> {
1409 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1410 let project_path = buffer.read(cx).project_path(cx)?;
1411 self.repository_and_path_for_project_path(&project_path, cx)
1412 }
1413
1414 pub fn repository_and_path_for_project_path(
1415 &self,
1416 path: &ProjectPath,
1417 cx: &App,
1418 ) -> Option<(Entity<Repository>, RepoPath)> {
1419 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1420 self.repositories
1421 .values()
1422 .filter_map(|repo| {
1423 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1424 Some((repo.clone(), repo_path))
1425 })
1426 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1427 }
1428
1429 pub fn git_init(
1430 &self,
1431 path: Arc<Path>,
1432 fallback_branch_name: String,
1433 cx: &App,
1434 ) -> Task<Result<()>> {
1435 match &self.state {
1436 GitStoreState::Local { fs, .. } => {
1437 let fs = fs.clone();
1438 cx.background_executor()
1439 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1440 }
1441 GitStoreState::Ssh {
1442 upstream_client,
1443 upstream_project_id: project_id,
1444 ..
1445 }
1446 | GitStoreState::Remote {
1447 upstream_client,
1448 upstream_project_id: project_id,
1449 ..
1450 } => {
1451 let client = upstream_client.clone();
1452 let project_id = *project_id;
1453 cx.background_executor().spawn(async move {
1454 client
1455 .request(proto::GitInit {
1456 project_id: project_id.0,
1457 abs_path: path.to_string_lossy().to_string(),
1458 fallback_branch_name,
1459 })
1460 .await?;
1461 Ok(())
1462 })
1463 }
1464 }
1465 }
1466
1467 async fn handle_update_repository(
1468 this: Entity<Self>,
1469 envelope: TypedEnvelope<proto::UpdateRepository>,
1470 mut cx: AsyncApp,
1471 ) -> Result<()> {
1472 this.update(&mut cx, |this, cx| {
1473 let mut update = envelope.payload;
1474
1475 let id = RepositoryId::from_proto(update.id);
1476 let client = this
1477 .upstream_client()
1478 .context("no upstream client")?
1479 .clone();
1480
1481 let mut is_new = false;
1482 let repo = this.repositories.entry(id).or_insert_with(|| {
1483 is_new = true;
1484 let git_store = cx.weak_entity();
1485 cx.new(|cx| {
1486 Repository::remote(
1487 id,
1488 Path::new(&update.abs_path).into(),
1489 ProjectId(update.project_id),
1490 client,
1491 git_store,
1492 cx,
1493 )
1494 })
1495 });
1496 if is_new {
1497 this._subscriptions
1498 .push(cx.subscribe(&repo, Self::on_repository_event))
1499 }
1500
1501 repo.update(cx, {
1502 let update = update.clone();
1503 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1504 })?;
1505
1506 this.active_repo_id.get_or_insert_with(|| {
1507 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1508 id
1509 });
1510
1511 if let Some((client, project_id)) = this.downstream_client() {
1512 update.project_id = project_id.to_proto();
1513 client.send(update).log_err();
1514 }
1515 Ok(())
1516 })?
1517 }
1518
1519 async fn handle_remove_repository(
1520 this: Entity<Self>,
1521 envelope: TypedEnvelope<proto::RemoveRepository>,
1522 mut cx: AsyncApp,
1523 ) -> Result<()> {
1524 this.update(&mut cx, |this, cx| {
1525 let mut update = envelope.payload;
1526 let id = RepositoryId::from_proto(update.id);
1527 this.repositories.remove(&id);
1528 if let Some((client, project_id)) = this.downstream_client() {
1529 update.project_id = project_id.to_proto();
1530 client.send(update).log_err();
1531 }
1532 if this.active_repo_id == Some(id) {
1533 this.active_repo_id = None;
1534 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1535 }
1536 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1537 })
1538 }
1539
1540 async fn handle_git_init(
1541 this: Entity<Self>,
1542 envelope: TypedEnvelope<proto::GitInit>,
1543 cx: AsyncApp,
1544 ) -> Result<proto::Ack> {
1545 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1546 let name = envelope.payload.fallback_branch_name;
1547 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1548 .await?;
1549
1550 Ok(proto::Ack {})
1551 }
1552
1553 async fn handle_fetch(
1554 this: Entity<Self>,
1555 envelope: TypedEnvelope<proto::Fetch>,
1556 mut cx: AsyncApp,
1557 ) -> Result<proto::RemoteMessageResponse> {
1558 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1559 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1560 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1561 let askpass_id = envelope.payload.askpass_id;
1562
1563 let askpass = make_remote_delegate(
1564 this,
1565 envelope.payload.project_id,
1566 repository_id,
1567 askpass_id,
1568 &mut cx,
1569 );
1570
1571 let remote_output = repository_handle
1572 .update(&mut cx, |repository_handle, cx| {
1573 repository_handle.fetch(fetch_options, askpass, cx)
1574 })?
1575 .await??;
1576
1577 Ok(proto::RemoteMessageResponse {
1578 stdout: remote_output.stdout,
1579 stderr: remote_output.stderr,
1580 })
1581 }
1582
1583 async fn handle_push(
1584 this: Entity<Self>,
1585 envelope: TypedEnvelope<proto::Push>,
1586 mut cx: AsyncApp,
1587 ) -> Result<proto::RemoteMessageResponse> {
1588 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1589 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1590
1591 let askpass_id = envelope.payload.askpass_id;
1592 let askpass = make_remote_delegate(
1593 this,
1594 envelope.payload.project_id,
1595 repository_id,
1596 askpass_id,
1597 &mut cx,
1598 );
1599
1600 let options = envelope
1601 .payload
1602 .options
1603 .as_ref()
1604 .map(|_| match envelope.payload.options() {
1605 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1606 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1607 });
1608
1609 let branch_name = envelope.payload.branch_name.into();
1610 let remote_name = envelope.payload.remote_name.into();
1611
1612 let remote_output = repository_handle
1613 .update(&mut cx, |repository_handle, cx| {
1614 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1615 })?
1616 .await??;
1617 Ok(proto::RemoteMessageResponse {
1618 stdout: remote_output.stdout,
1619 stderr: remote_output.stderr,
1620 })
1621 }
1622
1623 async fn handle_pull(
1624 this: Entity<Self>,
1625 envelope: TypedEnvelope<proto::Pull>,
1626 mut cx: AsyncApp,
1627 ) -> Result<proto::RemoteMessageResponse> {
1628 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1629 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1630 let askpass_id = envelope.payload.askpass_id;
1631 let askpass = make_remote_delegate(
1632 this,
1633 envelope.payload.project_id,
1634 repository_id,
1635 askpass_id,
1636 &mut cx,
1637 );
1638
1639 let branch_name = envelope.payload.branch_name.into();
1640 let remote_name = envelope.payload.remote_name.into();
1641
1642 let remote_message = repository_handle
1643 .update(&mut cx, |repository_handle, cx| {
1644 repository_handle.pull(branch_name, remote_name, askpass, cx)
1645 })?
1646 .await??;
1647
1648 Ok(proto::RemoteMessageResponse {
1649 stdout: remote_message.stdout,
1650 stderr: remote_message.stderr,
1651 })
1652 }
1653
1654 async fn handle_stage(
1655 this: Entity<Self>,
1656 envelope: TypedEnvelope<proto::Stage>,
1657 mut cx: AsyncApp,
1658 ) -> Result<proto::Ack> {
1659 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1660 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1661
1662 let entries = envelope
1663 .payload
1664 .paths
1665 .into_iter()
1666 .map(PathBuf::from)
1667 .map(RepoPath::new)
1668 .collect();
1669
1670 repository_handle
1671 .update(&mut cx, |repository_handle, cx| {
1672 repository_handle.stage_entries(entries, cx)
1673 })?
1674 .await?;
1675 Ok(proto::Ack {})
1676 }
1677
1678 async fn handle_unstage(
1679 this: Entity<Self>,
1680 envelope: TypedEnvelope<proto::Unstage>,
1681 mut cx: AsyncApp,
1682 ) -> Result<proto::Ack> {
1683 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1684 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1685
1686 let entries = envelope
1687 .payload
1688 .paths
1689 .into_iter()
1690 .map(PathBuf::from)
1691 .map(RepoPath::new)
1692 .collect();
1693
1694 repository_handle
1695 .update(&mut cx, |repository_handle, cx| {
1696 repository_handle.unstage_entries(entries, cx)
1697 })?
1698 .await?;
1699
1700 Ok(proto::Ack {})
1701 }
1702
1703 async fn handle_stash(
1704 this: Entity<Self>,
1705 envelope: TypedEnvelope<proto::Stash>,
1706 mut cx: AsyncApp,
1707 ) -> Result<proto::Ack> {
1708 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1709 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1710
1711 let entries = envelope
1712 .payload
1713 .paths
1714 .into_iter()
1715 .map(PathBuf::from)
1716 .map(RepoPath::new)
1717 .collect();
1718
1719 repository_handle
1720 .update(&mut cx, |repository_handle, cx| {
1721 repository_handle.stash_entries(entries, cx)
1722 })?
1723 .await?;
1724
1725 Ok(proto::Ack {})
1726 }
1727
1728 async fn handle_stash_pop(
1729 this: Entity<Self>,
1730 envelope: TypedEnvelope<proto::StashPop>,
1731 mut cx: AsyncApp,
1732 ) -> Result<proto::Ack> {
1733 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1734 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1735
1736 repository_handle
1737 .update(&mut cx, |repository_handle, cx| {
1738 repository_handle.stash_pop(cx)
1739 })?
1740 .await?;
1741
1742 Ok(proto::Ack {})
1743 }
1744
1745 async fn handle_set_index_text(
1746 this: Entity<Self>,
1747 envelope: TypedEnvelope<proto::SetIndexText>,
1748 mut cx: AsyncApp,
1749 ) -> Result<proto::Ack> {
1750 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1751 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1752 let repo_path = RepoPath::from_str(&envelope.payload.path);
1753
1754 repository_handle
1755 .update(&mut cx, |repository_handle, cx| {
1756 repository_handle.spawn_set_index_text_job(
1757 repo_path,
1758 envelope.payload.text,
1759 None,
1760 cx,
1761 )
1762 })?
1763 .await??;
1764 Ok(proto::Ack {})
1765 }
1766
1767 async fn handle_commit(
1768 this: Entity<Self>,
1769 envelope: TypedEnvelope<proto::Commit>,
1770 mut cx: AsyncApp,
1771 ) -> Result<proto::Ack> {
1772 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1773 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1774
1775 let message = SharedString::from(envelope.payload.message);
1776 let name = envelope.payload.name.map(SharedString::from);
1777 let email = envelope.payload.email.map(SharedString::from);
1778 let options = envelope.payload.options.unwrap_or_default();
1779
1780 repository_handle
1781 .update(&mut cx, |repository_handle, cx| {
1782 repository_handle.commit(
1783 message,
1784 name.zip(email),
1785 CommitOptions {
1786 amend: options.amend,
1787 signoff: options.signoff,
1788 },
1789 cx,
1790 )
1791 })?
1792 .await??;
1793 Ok(proto::Ack {})
1794 }
1795
1796 async fn handle_get_remotes(
1797 this: Entity<Self>,
1798 envelope: TypedEnvelope<proto::GetRemotes>,
1799 mut cx: AsyncApp,
1800 ) -> Result<proto::GetRemotesResponse> {
1801 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1802 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1803
1804 let branch_name = envelope.payload.branch_name;
1805
1806 let remotes = repository_handle
1807 .update(&mut cx, |repository_handle, _| {
1808 repository_handle.get_remotes(branch_name)
1809 })?
1810 .await??;
1811
1812 Ok(proto::GetRemotesResponse {
1813 remotes: remotes
1814 .into_iter()
1815 .map(|remotes| proto::get_remotes_response::Remote {
1816 name: remotes.name.to_string(),
1817 })
1818 .collect::<Vec<_>>(),
1819 })
1820 }
1821
1822 async fn handle_get_branches(
1823 this: Entity<Self>,
1824 envelope: TypedEnvelope<proto::GitGetBranches>,
1825 mut cx: AsyncApp,
1826 ) -> Result<proto::GitBranchesResponse> {
1827 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1828 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1829
1830 let branches = repository_handle
1831 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1832 .await??;
1833
1834 Ok(proto::GitBranchesResponse {
1835 branches: branches
1836 .into_iter()
1837 .map(|branch| branch_to_proto(&branch))
1838 .collect::<Vec<_>>(),
1839 })
1840 }
1841 async fn handle_create_branch(
1842 this: Entity<Self>,
1843 envelope: TypedEnvelope<proto::GitCreateBranch>,
1844 mut cx: AsyncApp,
1845 ) -> Result<proto::Ack> {
1846 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1847 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1848 let branch_name = envelope.payload.branch_name;
1849
1850 repository_handle
1851 .update(&mut cx, |repository_handle, _| {
1852 repository_handle.create_branch(branch_name)
1853 })?
1854 .await??;
1855
1856 Ok(proto::Ack {})
1857 }
1858
1859 async fn handle_change_branch(
1860 this: Entity<Self>,
1861 envelope: TypedEnvelope<proto::GitChangeBranch>,
1862 mut cx: AsyncApp,
1863 ) -> Result<proto::Ack> {
1864 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1865 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1866 let branch_name = envelope.payload.branch_name;
1867
1868 repository_handle
1869 .update(&mut cx, |repository_handle, _| {
1870 repository_handle.change_branch(branch_name)
1871 })?
1872 .await??;
1873
1874 Ok(proto::Ack {})
1875 }
1876
1877 async fn handle_show(
1878 this: Entity<Self>,
1879 envelope: TypedEnvelope<proto::GitShow>,
1880 mut cx: AsyncApp,
1881 ) -> Result<proto::GitCommitDetails> {
1882 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1883 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1884
1885 let commit = repository_handle
1886 .update(&mut cx, |repository_handle, _| {
1887 repository_handle.show(envelope.payload.commit)
1888 })?
1889 .await??;
1890 Ok(proto::GitCommitDetails {
1891 sha: commit.sha.into(),
1892 message: commit.message.into(),
1893 commit_timestamp: commit.commit_timestamp,
1894 author_email: commit.author_email.into(),
1895 author_name: commit.author_name.into(),
1896 })
1897 }
1898
1899 async fn handle_load_commit_diff(
1900 this: Entity<Self>,
1901 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1902 mut cx: AsyncApp,
1903 ) -> Result<proto::LoadCommitDiffResponse> {
1904 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1905 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1906
1907 let commit_diff = repository_handle
1908 .update(&mut cx, |repository_handle, _| {
1909 repository_handle.load_commit_diff(envelope.payload.commit)
1910 })?
1911 .await??;
1912 Ok(proto::LoadCommitDiffResponse {
1913 files: commit_diff
1914 .files
1915 .into_iter()
1916 .map(|file| proto::CommitFile {
1917 path: file.path.to_string(),
1918 old_text: file.old_text,
1919 new_text: file.new_text,
1920 })
1921 .collect(),
1922 })
1923 }
1924
1925 async fn handle_reset(
1926 this: Entity<Self>,
1927 envelope: TypedEnvelope<proto::GitReset>,
1928 mut cx: AsyncApp,
1929 ) -> Result<proto::Ack> {
1930 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1931 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1932
1933 let mode = match envelope.payload.mode() {
1934 git_reset::ResetMode::Soft => ResetMode::Soft,
1935 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1936 };
1937
1938 repository_handle
1939 .update(&mut cx, |repository_handle, cx| {
1940 repository_handle.reset(envelope.payload.commit, mode, cx)
1941 })?
1942 .await??;
1943 Ok(proto::Ack {})
1944 }
1945
1946 async fn handle_checkout_files(
1947 this: Entity<Self>,
1948 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1949 mut cx: AsyncApp,
1950 ) -> Result<proto::Ack> {
1951 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1952 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1953 let paths = envelope
1954 .payload
1955 .paths
1956 .iter()
1957 .map(|s| RepoPath::from_str(s))
1958 .collect();
1959
1960 repository_handle
1961 .update(&mut cx, |repository_handle, cx| {
1962 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1963 })?
1964 .await??;
1965 Ok(proto::Ack {})
1966 }
1967
1968 async fn handle_open_commit_message_buffer(
1969 this: Entity<Self>,
1970 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1971 mut cx: AsyncApp,
1972 ) -> Result<proto::OpenBufferResponse> {
1973 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1974 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1975 let buffer = repository
1976 .update(&mut cx, |repository, cx| {
1977 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1978 })?
1979 .await?;
1980
1981 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1982 this.update(&mut cx, |this, cx| {
1983 this.buffer_store.update(cx, |buffer_store, cx| {
1984 buffer_store
1985 .create_buffer_for_peer(
1986 &buffer,
1987 envelope.original_sender_id.unwrap_or(envelope.sender_id),
1988 cx,
1989 )
1990 .detach_and_log_err(cx);
1991 })
1992 })?;
1993
1994 Ok(proto::OpenBufferResponse {
1995 buffer_id: buffer_id.to_proto(),
1996 })
1997 }
1998
1999 async fn handle_askpass(
2000 this: Entity<Self>,
2001 envelope: TypedEnvelope<proto::AskPassRequest>,
2002 mut cx: AsyncApp,
2003 ) -> Result<proto::AskPassResponse> {
2004 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2005 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2006
2007 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2008 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2009 debug_panic!("no askpass found");
2010 anyhow::bail!("no askpass found");
2011 };
2012
2013 let response = askpass.ask_password(envelope.payload.prompt).await?;
2014
2015 delegates
2016 .lock()
2017 .insert(envelope.payload.askpass_id, askpass);
2018
2019 Ok(proto::AskPassResponse { response })
2020 }
2021
2022 async fn handle_check_for_pushed_commits(
2023 this: Entity<Self>,
2024 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2025 mut cx: AsyncApp,
2026 ) -> Result<proto::CheckForPushedCommitsResponse> {
2027 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2028 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2029
2030 let branches = repository_handle
2031 .update(&mut cx, |repository_handle, _| {
2032 repository_handle.check_for_pushed_commits()
2033 })?
2034 .await??;
2035 Ok(proto::CheckForPushedCommitsResponse {
2036 pushed_to: branches
2037 .into_iter()
2038 .map(|commit| commit.to_string())
2039 .collect(),
2040 })
2041 }
2042
2043 async fn handle_git_diff(
2044 this: Entity<Self>,
2045 envelope: TypedEnvelope<proto::GitDiff>,
2046 mut cx: AsyncApp,
2047 ) -> Result<proto::GitDiffResponse> {
2048 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2049 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2050 let diff_type = match envelope.payload.diff_type() {
2051 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2052 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2053 };
2054
2055 let mut diff = repository_handle
2056 .update(&mut cx, |repository_handle, cx| {
2057 repository_handle.diff(diff_type, cx)
2058 })?
2059 .await??;
2060 const ONE_MB: usize = 1_000_000;
2061 if diff.len() > ONE_MB {
2062 diff = diff.chars().take(ONE_MB).collect()
2063 }
2064
2065 Ok(proto::GitDiffResponse { diff })
2066 }
2067
2068 async fn handle_open_unstaged_diff(
2069 this: Entity<Self>,
2070 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2071 mut cx: AsyncApp,
2072 ) -> Result<proto::OpenUnstagedDiffResponse> {
2073 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2074 let diff = this
2075 .update(&mut cx, |this, cx| {
2076 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2077 Some(this.open_unstaged_diff(buffer, cx))
2078 })?
2079 .context("missing buffer")?
2080 .await?;
2081 this.update(&mut cx, |this, _| {
2082 let shared_diffs = this
2083 .shared_diffs
2084 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2085 .or_default();
2086 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2087 })?;
2088 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2089 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2090 }
2091
2092 async fn handle_open_uncommitted_diff(
2093 this: Entity<Self>,
2094 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2095 mut cx: AsyncApp,
2096 ) -> Result<proto::OpenUncommittedDiffResponse> {
2097 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2098 let diff = this
2099 .update(&mut cx, |this, cx| {
2100 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2101 Some(this.open_uncommitted_diff(buffer, cx))
2102 })?
2103 .context("missing buffer")?
2104 .await?;
2105 this.update(&mut cx, |this, _| {
2106 let shared_diffs = this
2107 .shared_diffs
2108 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2109 .or_default();
2110 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2111 })?;
2112 diff.read_with(&cx, |diff, cx| {
2113 use proto::open_uncommitted_diff_response::Mode;
2114
2115 let unstaged_diff = diff.secondary_diff();
2116 let index_snapshot = unstaged_diff.and_then(|diff| {
2117 let diff = diff.read(cx);
2118 diff.base_text_exists().then(|| diff.base_text())
2119 });
2120
2121 let mode;
2122 let staged_text;
2123 let committed_text;
2124 if diff.base_text_exists() {
2125 let committed_snapshot = diff.base_text();
2126 committed_text = Some(committed_snapshot.text());
2127 if let Some(index_text) = index_snapshot {
2128 if index_text.remote_id() == committed_snapshot.remote_id() {
2129 mode = Mode::IndexMatchesHead;
2130 staged_text = None;
2131 } else {
2132 mode = Mode::IndexAndHead;
2133 staged_text = Some(index_text.text());
2134 }
2135 } else {
2136 mode = Mode::IndexAndHead;
2137 staged_text = None;
2138 }
2139 } else {
2140 mode = Mode::IndexAndHead;
2141 committed_text = None;
2142 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2143 }
2144
2145 proto::OpenUncommittedDiffResponse {
2146 committed_text,
2147 staged_text,
2148 mode: mode.into(),
2149 }
2150 })
2151 }
2152
2153 async fn handle_update_diff_bases(
2154 this: Entity<Self>,
2155 request: TypedEnvelope<proto::UpdateDiffBases>,
2156 mut cx: AsyncApp,
2157 ) -> Result<()> {
2158 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2159 this.update(&mut cx, |this, cx| {
2160 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2161 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2162 let buffer = buffer.read(cx).text_snapshot();
2163 diff_state.update(cx, |diff_state, cx| {
2164 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2165 })
2166 }
2167 }
2168 })
2169 }
2170
2171 async fn handle_blame_buffer(
2172 this: Entity<Self>,
2173 envelope: TypedEnvelope<proto::BlameBuffer>,
2174 mut cx: AsyncApp,
2175 ) -> Result<proto::BlameBufferResponse> {
2176 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2177 let version = deserialize_version(&envelope.payload.version);
2178 let buffer = this.read_with(&cx, |this, cx| {
2179 this.buffer_store.read(cx).get_existing(buffer_id)
2180 })??;
2181 buffer
2182 .update(&mut cx, |buffer, _| {
2183 buffer.wait_for_version(version.clone())
2184 })?
2185 .await?;
2186 let blame = this
2187 .update(&mut cx, |this, cx| {
2188 this.blame_buffer(&buffer, Some(version), cx)
2189 })?
2190 .await?;
2191 Ok(serialize_blame_buffer_response(blame))
2192 }
2193
2194 async fn handle_get_permalink_to_line(
2195 this: Entity<Self>,
2196 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2197 mut cx: AsyncApp,
2198 ) -> Result<proto::GetPermalinkToLineResponse> {
2199 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2200 // let version = deserialize_version(&envelope.payload.version);
2201 let selection = {
2202 let proto_selection = envelope
2203 .payload
2204 .selection
2205 .context("no selection to get permalink for defined")?;
2206 proto_selection.start as u32..proto_selection.end as u32
2207 };
2208 let buffer = this.read_with(&cx, |this, cx| {
2209 this.buffer_store.read(cx).get_existing(buffer_id)
2210 })??;
2211 let permalink = this
2212 .update(&mut cx, |this, cx| {
2213 this.get_permalink_to_line(&buffer, selection, cx)
2214 })?
2215 .await?;
2216 Ok(proto::GetPermalinkToLineResponse {
2217 permalink: permalink.to_string(),
2218 })
2219 }
2220
2221 fn repository_for_request(
2222 this: &Entity<Self>,
2223 id: RepositoryId,
2224 cx: &mut AsyncApp,
2225 ) -> Result<Entity<Repository>> {
2226 this.read_with(cx, |this, _| {
2227 this.repositories
2228 .get(&id)
2229 .context("missing repository handle")
2230 .cloned()
2231 })?
2232 }
2233
2234 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2235 self.repositories
2236 .iter()
2237 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2238 .collect()
2239 }
2240
2241 fn process_updated_entries(
2242 &self,
2243 worktree: &Entity<Worktree>,
2244 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2245 cx: &mut App,
2246 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2247 let mut repo_paths = self
2248 .repositories
2249 .values()
2250 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2251 .collect::<Vec<_>>();
2252 let mut entries: Vec<_> = updated_entries
2253 .iter()
2254 .map(|(path, _, _)| path.clone())
2255 .collect();
2256 entries.sort();
2257 let worktree = worktree.read(cx);
2258
2259 let entries = entries
2260 .into_iter()
2261 .filter_map(|path| worktree.absolutize(&path).ok())
2262 .collect::<Arc<[_]>>();
2263
2264 let executor = cx.background_executor().clone();
2265 cx.background_executor().spawn(async move {
2266 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2267 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2268 let mut tasks = FuturesOrdered::new();
2269 for (repo_path, repo) in repo_paths.into_iter().rev() {
2270 let entries = entries.clone();
2271 let task = executor.spawn(async move {
2272 // Find all repository paths that belong to this repo
2273 let mut ix = entries.partition_point(|path| path < &*repo_path);
2274 if ix == entries.len() {
2275 return None;
2276 };
2277
2278 let mut paths = vec![];
2279 // All paths prefixed by a given repo will constitute a continuous range.
2280 while let Some(path) = entries.get(ix)
2281 && let Some(repo_path) =
2282 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, &path)
2283 {
2284 paths.push((repo_path, ix));
2285 ix += 1;
2286 }
2287 Some((repo, paths))
2288 });
2289 tasks.push_back(task);
2290 }
2291
2292 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2293 let mut path_was_used = vec![false; entries.len()];
2294 let tasks = tasks.collect::<Vec<_>>().await;
2295 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2296 // We always want to assign a path to it's innermost repository.
2297 for t in tasks {
2298 let Some((repo, paths)) = t else {
2299 continue;
2300 };
2301 let entry = paths_by_git_repo.entry(repo).or_default();
2302 for (repo_path, ix) in paths {
2303 if path_was_used[ix] {
2304 continue;
2305 }
2306 path_was_used[ix] = true;
2307 entry.push(repo_path);
2308 }
2309 }
2310
2311 paths_by_git_repo
2312 })
2313 }
2314}
2315
2316impl BufferGitState {
2317 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2318 Self {
2319 unstaged_diff: Default::default(),
2320 uncommitted_diff: Default::default(),
2321 recalculate_diff_task: Default::default(),
2322 language: Default::default(),
2323 language_registry: Default::default(),
2324 recalculating_tx: postage::watch::channel_with(false).0,
2325 hunk_staging_operation_count: 0,
2326 hunk_staging_operation_count_as_of_write: 0,
2327 head_text: Default::default(),
2328 index_text: Default::default(),
2329 head_changed: Default::default(),
2330 index_changed: Default::default(),
2331 language_changed: Default::default(),
2332 conflict_updated_futures: Default::default(),
2333 conflict_set: Default::default(),
2334 reparse_conflict_markers_task: Default::default(),
2335 }
2336 }
2337
2338 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2339 self.language = buffer.read(cx).language().cloned();
2340 self.language_changed = true;
2341 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2342 }
2343
2344 fn reparse_conflict_markers(
2345 &mut self,
2346 buffer: text::BufferSnapshot,
2347 cx: &mut Context<Self>,
2348 ) -> oneshot::Receiver<()> {
2349 let (tx, rx) = oneshot::channel();
2350
2351 let Some(conflict_set) = self
2352 .conflict_set
2353 .as_ref()
2354 .and_then(|conflict_set| conflict_set.upgrade())
2355 else {
2356 return rx;
2357 };
2358
2359 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2360 if conflict_set.has_conflict {
2361 Some(conflict_set.snapshot())
2362 } else {
2363 None
2364 }
2365 });
2366
2367 if let Some(old_snapshot) = old_snapshot {
2368 self.conflict_updated_futures.push(tx);
2369 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2370 let (snapshot, changed_range) = cx
2371 .background_spawn(async move {
2372 let new_snapshot = ConflictSet::parse(&buffer);
2373 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2374 (new_snapshot, changed_range)
2375 })
2376 .await;
2377 this.update(cx, |this, cx| {
2378 if let Some(conflict_set) = &this.conflict_set {
2379 conflict_set
2380 .update(cx, |conflict_set, cx| {
2381 conflict_set.set_snapshot(snapshot, changed_range, cx);
2382 })
2383 .ok();
2384 }
2385 let futures = std::mem::take(&mut this.conflict_updated_futures);
2386 for tx in futures {
2387 tx.send(()).ok();
2388 }
2389 })
2390 }))
2391 }
2392
2393 rx
2394 }
2395
2396 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2397 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2398 }
2399
2400 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2401 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2402 }
2403
2404 fn handle_base_texts_updated(
2405 &mut self,
2406 buffer: text::BufferSnapshot,
2407 message: proto::UpdateDiffBases,
2408 cx: &mut Context<Self>,
2409 ) {
2410 use proto::update_diff_bases::Mode;
2411
2412 let Some(mode) = Mode::from_i32(message.mode) else {
2413 return;
2414 };
2415
2416 let diff_bases_change = match mode {
2417 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2418 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2419 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2420 Mode::IndexAndHead => DiffBasesChange::SetEach {
2421 index: message.staged_text,
2422 head: message.committed_text,
2423 },
2424 };
2425
2426 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2427 }
2428
2429 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2430 if *self.recalculating_tx.borrow() {
2431 let mut rx = self.recalculating_tx.subscribe();
2432 return Some(async move {
2433 loop {
2434 let is_recalculating = rx.recv().await;
2435 if is_recalculating != Some(true) {
2436 break;
2437 }
2438 }
2439 });
2440 } else {
2441 None
2442 }
2443 }
2444
2445 fn diff_bases_changed(
2446 &mut self,
2447 buffer: text::BufferSnapshot,
2448 diff_bases_change: Option<DiffBasesChange>,
2449 cx: &mut Context<Self>,
2450 ) {
2451 match diff_bases_change {
2452 Some(DiffBasesChange::SetIndex(index)) => {
2453 self.index_text = index.map(|mut index| {
2454 text::LineEnding::normalize(&mut index);
2455 Arc::new(index)
2456 });
2457 self.index_changed = true;
2458 }
2459 Some(DiffBasesChange::SetHead(head)) => {
2460 self.head_text = head.map(|mut head| {
2461 text::LineEnding::normalize(&mut head);
2462 Arc::new(head)
2463 });
2464 self.head_changed = true;
2465 }
2466 Some(DiffBasesChange::SetBoth(text)) => {
2467 let text = text.map(|mut text| {
2468 text::LineEnding::normalize(&mut text);
2469 Arc::new(text)
2470 });
2471 self.head_text = text.clone();
2472 self.index_text = text;
2473 self.head_changed = true;
2474 self.index_changed = true;
2475 }
2476 Some(DiffBasesChange::SetEach { index, head }) => {
2477 self.index_text = index.map(|mut index| {
2478 text::LineEnding::normalize(&mut index);
2479 Arc::new(index)
2480 });
2481 self.index_changed = true;
2482 self.head_text = head.map(|mut head| {
2483 text::LineEnding::normalize(&mut head);
2484 Arc::new(head)
2485 });
2486 self.head_changed = true;
2487 }
2488 None => {}
2489 }
2490
2491 self.recalculate_diffs(buffer, cx)
2492 }
2493
2494 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2495 *self.recalculating_tx.borrow_mut() = true;
2496
2497 let language = self.language.clone();
2498 let language_registry = self.language_registry.clone();
2499 let unstaged_diff = self.unstaged_diff();
2500 let uncommitted_diff = self.uncommitted_diff();
2501 let head = self.head_text.clone();
2502 let index = self.index_text.clone();
2503 let index_changed = self.index_changed;
2504 let head_changed = self.head_changed;
2505 let language_changed = self.language_changed;
2506 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2507 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2508 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2509 (None, None) => true,
2510 _ => false,
2511 };
2512 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2513 log::debug!(
2514 "start recalculating diffs for buffer {}",
2515 buffer.remote_id()
2516 );
2517
2518 let mut new_unstaged_diff = None;
2519 if let Some(unstaged_diff) = &unstaged_diff {
2520 new_unstaged_diff = Some(
2521 BufferDiff::update_diff(
2522 unstaged_diff.clone(),
2523 buffer.clone(),
2524 index,
2525 index_changed,
2526 language_changed,
2527 language.clone(),
2528 language_registry.clone(),
2529 cx,
2530 )
2531 .await?,
2532 );
2533 }
2534
2535 let mut new_uncommitted_diff = None;
2536 if let Some(uncommitted_diff) = &uncommitted_diff {
2537 new_uncommitted_diff = if index_matches_head {
2538 new_unstaged_diff.clone()
2539 } else {
2540 Some(
2541 BufferDiff::update_diff(
2542 uncommitted_diff.clone(),
2543 buffer.clone(),
2544 head,
2545 head_changed,
2546 language_changed,
2547 language.clone(),
2548 language_registry.clone(),
2549 cx,
2550 )
2551 .await?,
2552 )
2553 }
2554 }
2555
2556 let cancel = this.update(cx, |this, _| {
2557 // This checks whether all pending stage/unstage operations
2558 // have quiesced (i.e. both the corresponding write and the
2559 // read of that write have completed). If not, then we cancel
2560 // this recalculation attempt to avoid invalidating pending
2561 // state too quickly; another recalculation will come along
2562 // later and clear the pending state once the state of the index has settled.
2563 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2564 *this.recalculating_tx.borrow_mut() = false;
2565 true
2566 } else {
2567 false
2568 }
2569 })?;
2570 if cancel {
2571 log::debug!(
2572 concat!(
2573 "aborting recalculating diffs for buffer {}",
2574 "due to subsequent hunk operations",
2575 ),
2576 buffer.remote_id()
2577 );
2578 return Ok(());
2579 }
2580
2581 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2582 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2583 {
2584 unstaged_diff.update(cx, |diff, cx| {
2585 if language_changed {
2586 diff.language_changed(cx);
2587 }
2588 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2589 })?
2590 } else {
2591 None
2592 };
2593
2594 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2595 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2596 {
2597 uncommitted_diff.update(cx, |diff, cx| {
2598 if language_changed {
2599 diff.language_changed(cx);
2600 }
2601 diff.set_snapshot_with_secondary(
2602 new_uncommitted_diff,
2603 &buffer,
2604 unstaged_changed_range,
2605 true,
2606 cx,
2607 );
2608 })?;
2609 }
2610
2611 log::debug!(
2612 "finished recalculating diffs for buffer {}",
2613 buffer.remote_id()
2614 );
2615
2616 if let Some(this) = this.upgrade() {
2617 this.update(cx, |this, _| {
2618 this.index_changed = false;
2619 this.head_changed = false;
2620 this.language_changed = false;
2621 *this.recalculating_tx.borrow_mut() = false;
2622 })?;
2623 }
2624
2625 Ok(())
2626 }));
2627 }
2628}
2629
2630fn make_remote_delegate(
2631 this: Entity<GitStore>,
2632 project_id: u64,
2633 repository_id: RepositoryId,
2634 askpass_id: u64,
2635 cx: &mut AsyncApp,
2636) -> AskPassDelegate {
2637 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2638 this.update(cx, |this, cx| {
2639 let Some((client, _)) = this.downstream_client() else {
2640 return;
2641 };
2642 let response = client.request(proto::AskPassRequest {
2643 project_id,
2644 repository_id: repository_id.to_proto(),
2645 askpass_id,
2646 prompt,
2647 });
2648 cx.spawn(async move |_, _| {
2649 tx.send(response.await?.response).ok();
2650 anyhow::Ok(())
2651 })
2652 .detach_and_log_err(cx);
2653 })
2654 .log_err();
2655 })
2656}
2657
2658impl RepositoryId {
2659 pub fn to_proto(self) -> u64 {
2660 self.0
2661 }
2662
2663 pub fn from_proto(id: u64) -> Self {
2664 RepositoryId(id)
2665 }
2666}
2667
2668impl RepositorySnapshot {
2669 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2670 Self {
2671 id,
2672 statuses_by_path: Default::default(),
2673 work_directory_abs_path,
2674 branch: None,
2675 head_commit: None,
2676 scan_id: 0,
2677 merge: Default::default(),
2678 remote_origin_url: None,
2679 remote_upstream_url: None,
2680 }
2681 }
2682
2683 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2684 proto::UpdateRepository {
2685 branch_summary: self.branch.as_ref().map(branch_to_proto),
2686 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2687 updated_statuses: self
2688 .statuses_by_path
2689 .iter()
2690 .map(|entry| entry.to_proto())
2691 .collect(),
2692 removed_statuses: Default::default(),
2693 current_merge_conflicts: self
2694 .merge
2695 .conflicted_paths
2696 .iter()
2697 .map(|repo_path| repo_path.to_proto())
2698 .collect(),
2699 project_id,
2700 id: self.id.to_proto(),
2701 abs_path: self.work_directory_abs_path.to_proto(),
2702 entry_ids: vec![self.id.to_proto()],
2703 scan_id: self.scan_id,
2704 is_last_update: true,
2705 }
2706 }
2707
2708 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2709 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2710 let mut removed_statuses: Vec<String> = Vec::new();
2711
2712 let mut new_statuses = self.statuses_by_path.iter().peekable();
2713 let mut old_statuses = old.statuses_by_path.iter().peekable();
2714
2715 let mut current_new_entry = new_statuses.next();
2716 let mut current_old_entry = old_statuses.next();
2717 loop {
2718 match (current_new_entry, current_old_entry) {
2719 (Some(new_entry), Some(old_entry)) => {
2720 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2721 Ordering::Less => {
2722 updated_statuses.push(new_entry.to_proto());
2723 current_new_entry = new_statuses.next();
2724 }
2725 Ordering::Equal => {
2726 if new_entry.status != old_entry.status {
2727 updated_statuses.push(new_entry.to_proto());
2728 }
2729 current_old_entry = old_statuses.next();
2730 current_new_entry = new_statuses.next();
2731 }
2732 Ordering::Greater => {
2733 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2734 current_old_entry = old_statuses.next();
2735 }
2736 }
2737 }
2738 (None, Some(old_entry)) => {
2739 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2740 current_old_entry = old_statuses.next();
2741 }
2742 (Some(new_entry), None) => {
2743 updated_statuses.push(new_entry.to_proto());
2744 current_new_entry = new_statuses.next();
2745 }
2746 (None, None) => break,
2747 }
2748 }
2749
2750 proto::UpdateRepository {
2751 branch_summary: self.branch.as_ref().map(branch_to_proto),
2752 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2753 updated_statuses,
2754 removed_statuses,
2755 current_merge_conflicts: self
2756 .merge
2757 .conflicted_paths
2758 .iter()
2759 .map(|path| path.as_ref().to_proto())
2760 .collect(),
2761 project_id,
2762 id: self.id.to_proto(),
2763 abs_path: self.work_directory_abs_path.to_proto(),
2764 entry_ids: vec![],
2765 scan_id: self.scan_id,
2766 is_last_update: true,
2767 }
2768 }
2769
2770 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2771 self.statuses_by_path.iter().cloned()
2772 }
2773
2774 pub fn status_summary(&self) -> GitSummary {
2775 self.statuses_by_path.summary().item_summary
2776 }
2777
2778 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2779 self.statuses_by_path
2780 .get(&PathKey(path.0.clone()), &())
2781 .cloned()
2782 }
2783
2784 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2785 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2786 }
2787
2788 #[inline]
2789 fn abs_path_to_repo_path_inner(
2790 work_directory_abs_path: &Path,
2791 abs_path: &Path,
2792 ) -> Option<RepoPath> {
2793 abs_path
2794 .strip_prefix(&work_directory_abs_path)
2795 .map(RepoPath::from)
2796 .ok()
2797 }
2798
2799 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2800 self.merge.conflicted_paths.contains(&repo_path)
2801 }
2802
2803 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2804 let had_conflict_on_last_merge_head_change =
2805 self.merge.conflicted_paths.contains(&repo_path);
2806 let has_conflict_currently = self
2807 .status_for_path(&repo_path)
2808 .map_or(false, |entry| entry.status.is_conflicted());
2809 had_conflict_on_last_merge_head_change || has_conflict_currently
2810 }
2811
2812 /// This is the name that will be displayed in the repository selector for this repository.
2813 pub fn display_name(&self) -> SharedString {
2814 self.work_directory_abs_path
2815 .file_name()
2816 .unwrap_or_default()
2817 .to_string_lossy()
2818 .to_string()
2819 .into()
2820 }
2821}
2822
2823impl MergeDetails {
2824 async fn load(
2825 backend: &Arc<dyn GitRepository>,
2826 status: &SumTree<StatusEntry>,
2827 prev_snapshot: &RepositorySnapshot,
2828 ) -> Result<(MergeDetails, bool)> {
2829 log::debug!("load merge details");
2830 let message = backend.merge_message().await;
2831 let heads = backend
2832 .revparse_batch(vec![
2833 "MERGE_HEAD".into(),
2834 "CHERRY_PICK_HEAD".into(),
2835 "REBASE_HEAD".into(),
2836 "REVERT_HEAD".into(),
2837 "APPLY_HEAD".into(),
2838 ])
2839 .await
2840 .log_err()
2841 .unwrap_or_default()
2842 .into_iter()
2843 .map(|opt| opt.map(SharedString::from))
2844 .collect::<Vec<_>>();
2845 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2846 let conflicted_paths = if merge_heads_changed {
2847 let current_conflicted_paths = TreeSet::from_ordered_entries(
2848 status
2849 .iter()
2850 .filter(|entry| entry.status.is_conflicted())
2851 .map(|entry| entry.repo_path.clone()),
2852 );
2853
2854 // It can happen that we run a scan while a lengthy merge is in progress
2855 // that will eventually result in conflicts, but before those conflicts
2856 // are reported by `git status`. Since for the moment we only care about
2857 // the merge heads state for the purposes of tracking conflicts, don't update
2858 // this state until we see some conflicts.
2859 if heads.iter().any(Option::is_some)
2860 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2861 && current_conflicted_paths.is_empty()
2862 {
2863 log::debug!("not updating merge heads because no conflicts found");
2864 return Ok((
2865 MergeDetails {
2866 message: message.map(SharedString::from),
2867 ..prev_snapshot.merge.clone()
2868 },
2869 false,
2870 ));
2871 }
2872
2873 current_conflicted_paths
2874 } else {
2875 prev_snapshot.merge.conflicted_paths.clone()
2876 };
2877 let details = MergeDetails {
2878 conflicted_paths,
2879 message: message.map(SharedString::from),
2880 heads,
2881 };
2882 Ok((details, merge_heads_changed))
2883 }
2884}
2885
2886impl Repository {
2887 pub fn snapshot(&self) -> RepositorySnapshot {
2888 self.snapshot.clone()
2889 }
2890
2891 fn local(
2892 id: RepositoryId,
2893 work_directory_abs_path: Arc<Path>,
2894 dot_git_abs_path: Arc<Path>,
2895 repository_dir_abs_path: Arc<Path>,
2896 common_dir_abs_path: Arc<Path>,
2897 project_environment: WeakEntity<ProjectEnvironment>,
2898 fs: Arc<dyn Fs>,
2899 git_store: WeakEntity<GitStore>,
2900 cx: &mut Context<Self>,
2901 ) -> Self {
2902 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2903 Repository {
2904 this: cx.weak_entity(),
2905 git_store,
2906 snapshot,
2907 commit_message_buffer: None,
2908 askpass_delegates: Default::default(),
2909 paths_needing_status_update: Default::default(),
2910 latest_askpass_id: 0,
2911 job_sender: Repository::spawn_local_git_worker(
2912 work_directory_abs_path,
2913 dot_git_abs_path,
2914 repository_dir_abs_path,
2915 common_dir_abs_path,
2916 project_environment,
2917 fs,
2918 cx,
2919 ),
2920 job_id: 0,
2921 active_jobs: Default::default(),
2922 }
2923 }
2924
2925 fn remote(
2926 id: RepositoryId,
2927 work_directory_abs_path: Arc<Path>,
2928 project_id: ProjectId,
2929 client: AnyProtoClient,
2930 git_store: WeakEntity<GitStore>,
2931 cx: &mut Context<Self>,
2932 ) -> Self {
2933 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2934 Self {
2935 this: cx.weak_entity(),
2936 snapshot,
2937 commit_message_buffer: None,
2938 git_store,
2939 paths_needing_status_update: Default::default(),
2940 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2941 askpass_delegates: Default::default(),
2942 latest_askpass_id: 0,
2943 active_jobs: Default::default(),
2944 job_id: 0,
2945 }
2946 }
2947
2948 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2949 self.git_store.upgrade()
2950 }
2951
2952 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
2953 let this = cx.weak_entity();
2954 let git_store = self.git_store.clone();
2955 let _ = self.send_keyed_job(
2956 Some(GitJobKey::ReloadBufferDiffBases),
2957 None,
2958 |state, mut cx| async move {
2959 let RepositoryState::Local { backend, .. } = state else {
2960 log::error!("tried to recompute diffs for a non-local repository");
2961 return Ok(());
2962 };
2963
2964 let Some(this) = this.upgrade() else {
2965 return Ok(());
2966 };
2967
2968 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
2969 git_store.update(cx, |git_store, cx| {
2970 git_store
2971 .diffs
2972 .iter()
2973 .filter_map(|(buffer_id, diff_state)| {
2974 let buffer_store = git_store.buffer_store.read(cx);
2975 let buffer = buffer_store.get(*buffer_id)?;
2976 let file = File::from_dyn(buffer.read(cx).file())?;
2977 let abs_path =
2978 file.worktree.read(cx).absolutize(&file.path).ok()?;
2979 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
2980 log::debug!(
2981 "start reload diff bases for repo path {}",
2982 repo_path.0.display()
2983 );
2984 diff_state.update(cx, |diff_state, _| {
2985 let has_unstaged_diff = diff_state
2986 .unstaged_diff
2987 .as_ref()
2988 .is_some_and(|diff| diff.is_upgradable());
2989 let has_uncommitted_diff = diff_state
2990 .uncommitted_diff
2991 .as_ref()
2992 .is_some_and(|set| set.is_upgradable());
2993
2994 Some((
2995 buffer,
2996 repo_path,
2997 has_unstaged_diff.then(|| diff_state.index_text.clone()),
2998 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
2999 ))
3000 })
3001 })
3002 .collect::<Vec<_>>()
3003 })
3004 })??;
3005
3006 let buffer_diff_base_changes = cx
3007 .background_spawn(async move {
3008 let mut changes = Vec::new();
3009 for (buffer, repo_path, current_index_text, current_head_text) in
3010 &repo_diff_state_updates
3011 {
3012 let index_text = if current_index_text.is_some() {
3013 backend.load_index_text(repo_path.clone()).await
3014 } else {
3015 None
3016 };
3017 let head_text = if current_head_text.is_some() {
3018 backend.load_committed_text(repo_path.clone()).await
3019 } else {
3020 None
3021 };
3022
3023 let change =
3024 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3025 (Some(current_index), Some(current_head)) => {
3026 let index_changed =
3027 index_text.as_ref() != current_index.as_deref();
3028 let head_changed =
3029 head_text.as_ref() != current_head.as_deref();
3030 if index_changed && head_changed {
3031 if index_text == head_text {
3032 Some(DiffBasesChange::SetBoth(head_text))
3033 } else {
3034 Some(DiffBasesChange::SetEach {
3035 index: index_text,
3036 head: head_text,
3037 })
3038 }
3039 } else if index_changed {
3040 Some(DiffBasesChange::SetIndex(index_text))
3041 } else if head_changed {
3042 Some(DiffBasesChange::SetHead(head_text))
3043 } else {
3044 None
3045 }
3046 }
3047 (Some(current_index), None) => {
3048 let index_changed =
3049 index_text.as_ref() != current_index.as_deref();
3050 index_changed
3051 .then_some(DiffBasesChange::SetIndex(index_text))
3052 }
3053 (None, Some(current_head)) => {
3054 let head_changed =
3055 head_text.as_ref() != current_head.as_deref();
3056 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3057 }
3058 (None, None) => None,
3059 };
3060
3061 changes.push((buffer.clone(), change))
3062 }
3063 changes
3064 })
3065 .await;
3066
3067 git_store.update(&mut cx, |git_store, cx| {
3068 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3069 let buffer_snapshot = buffer.read(cx).text_snapshot();
3070 let buffer_id = buffer_snapshot.remote_id();
3071 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3072 continue;
3073 };
3074
3075 let downstream_client = git_store.downstream_client();
3076 diff_state.update(cx, |diff_state, cx| {
3077 use proto::update_diff_bases::Mode;
3078
3079 if let Some((diff_bases_change, (client, project_id))) =
3080 diff_bases_change.clone().zip(downstream_client)
3081 {
3082 let (staged_text, committed_text, mode) = match diff_bases_change {
3083 DiffBasesChange::SetIndex(index) => {
3084 (index, None, Mode::IndexOnly)
3085 }
3086 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3087 DiffBasesChange::SetEach { index, head } => {
3088 (index, head, Mode::IndexAndHead)
3089 }
3090 DiffBasesChange::SetBoth(text) => {
3091 (None, text, Mode::IndexMatchesHead)
3092 }
3093 };
3094 client
3095 .send(proto::UpdateDiffBases {
3096 project_id: project_id.to_proto(),
3097 buffer_id: buffer_id.to_proto(),
3098 staged_text,
3099 committed_text,
3100 mode: mode as i32,
3101 })
3102 .log_err();
3103 }
3104
3105 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3106 });
3107 }
3108 })
3109 },
3110 );
3111 }
3112
3113 pub fn send_job<F, Fut, R>(
3114 &mut self,
3115 status: Option<SharedString>,
3116 job: F,
3117 ) -> oneshot::Receiver<R>
3118 where
3119 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3120 Fut: Future<Output = R> + 'static,
3121 R: Send + 'static,
3122 {
3123 self.send_keyed_job(None, status, job)
3124 }
3125
3126 fn send_keyed_job<F, Fut, R>(
3127 &mut self,
3128 key: Option<GitJobKey>,
3129 status: Option<SharedString>,
3130 job: F,
3131 ) -> oneshot::Receiver<R>
3132 where
3133 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3134 Fut: Future<Output = R> + 'static,
3135 R: Send + 'static,
3136 {
3137 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3138 let job_id = post_inc(&mut self.job_id);
3139 let this = self.this.clone();
3140 self.job_sender
3141 .unbounded_send(GitJob {
3142 key,
3143 job: Box::new(move |state, cx: &mut AsyncApp| {
3144 let job = job(state, cx.clone());
3145 cx.spawn(async move |cx| {
3146 if let Some(s) = status.clone() {
3147 this.update(cx, |this, cx| {
3148 this.active_jobs.insert(
3149 job_id,
3150 JobInfo {
3151 start: Instant::now(),
3152 message: s.clone(),
3153 },
3154 );
3155
3156 cx.notify();
3157 })
3158 .ok();
3159 }
3160 let result = job.await;
3161
3162 this.update(cx, |this, cx| {
3163 this.active_jobs.remove(&job_id);
3164 cx.notify();
3165 })
3166 .ok();
3167
3168 result_tx.send(result).ok();
3169 })
3170 }),
3171 })
3172 .ok();
3173 result_rx
3174 }
3175
3176 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3177 let Some(git_store) = self.git_store.upgrade() else {
3178 return;
3179 };
3180 let entity = cx.entity();
3181 git_store.update(cx, |git_store, cx| {
3182 let Some((&id, _)) = git_store
3183 .repositories
3184 .iter()
3185 .find(|(_, handle)| *handle == &entity)
3186 else {
3187 return;
3188 };
3189 git_store.active_repo_id = Some(id);
3190 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3191 });
3192 }
3193
3194 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3195 self.snapshot.status()
3196 }
3197
3198 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3199 let git_store = self.git_store.upgrade()?;
3200 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3201 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3202 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3203 Some(ProjectPath {
3204 worktree_id: worktree.read(cx).id(),
3205 path: relative_path.into(),
3206 })
3207 }
3208
3209 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3210 let git_store = self.git_store.upgrade()?;
3211 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3212 let abs_path = worktree_store.absolutize(path, cx)?;
3213 self.snapshot.abs_path_to_repo_path(&abs_path)
3214 }
3215
3216 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3217 other
3218 .read(cx)
3219 .snapshot
3220 .work_directory_abs_path
3221 .starts_with(&self.snapshot.work_directory_abs_path)
3222 }
3223
3224 pub fn open_commit_buffer(
3225 &mut self,
3226 languages: Option<Arc<LanguageRegistry>>,
3227 buffer_store: Entity<BufferStore>,
3228 cx: &mut Context<Self>,
3229 ) -> Task<Result<Entity<Buffer>>> {
3230 let id = self.id;
3231 if let Some(buffer) = self.commit_message_buffer.clone() {
3232 return Task::ready(Ok(buffer));
3233 }
3234 let this = cx.weak_entity();
3235
3236 let rx = self.send_job(None, move |state, mut cx| async move {
3237 let Some(this) = this.upgrade() else {
3238 bail!("git store was dropped");
3239 };
3240 match state {
3241 RepositoryState::Local { .. } => {
3242 this.update(&mut cx, |_, cx| {
3243 Self::open_local_commit_buffer(languages, buffer_store, cx)
3244 })?
3245 .await
3246 }
3247 RepositoryState::Remote { project_id, client } => {
3248 let request = client.request(proto::OpenCommitMessageBuffer {
3249 project_id: project_id.0,
3250 repository_id: id.to_proto(),
3251 });
3252 let response = request.await.context("requesting to open commit buffer")?;
3253 let buffer_id = BufferId::new(response.buffer_id)?;
3254 let buffer = buffer_store
3255 .update(&mut cx, |buffer_store, cx| {
3256 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3257 })?
3258 .await?;
3259 if let Some(language_registry) = languages {
3260 let git_commit_language =
3261 language_registry.language_for_name("Git Commit").await?;
3262 buffer.update(&mut cx, |buffer, cx| {
3263 buffer.set_language(Some(git_commit_language), cx);
3264 })?;
3265 }
3266 this.update(&mut cx, |this, _| {
3267 this.commit_message_buffer = Some(buffer.clone());
3268 })?;
3269 Ok(buffer)
3270 }
3271 }
3272 });
3273
3274 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3275 }
3276
3277 fn open_local_commit_buffer(
3278 language_registry: Option<Arc<LanguageRegistry>>,
3279 buffer_store: Entity<BufferStore>,
3280 cx: &mut Context<Self>,
3281 ) -> Task<Result<Entity<Buffer>>> {
3282 cx.spawn(async move |repository, cx| {
3283 let buffer = buffer_store
3284 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
3285 .await?;
3286
3287 if let Some(language_registry) = language_registry {
3288 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3289 buffer.update(cx, |buffer, cx| {
3290 buffer.set_language(Some(git_commit_language), cx);
3291 })?;
3292 }
3293
3294 repository.update(cx, |repository, _| {
3295 repository.commit_message_buffer = Some(buffer.clone());
3296 })?;
3297 Ok(buffer)
3298 })
3299 }
3300
3301 pub fn checkout_files(
3302 &mut self,
3303 commit: &str,
3304 paths: Vec<RepoPath>,
3305 _cx: &mut App,
3306 ) -> oneshot::Receiver<Result<()>> {
3307 let commit = commit.to_string();
3308 let id = self.id;
3309
3310 self.send_job(
3311 Some(format!("git checkout {}", commit).into()),
3312 move |git_repo, _| async move {
3313 match git_repo {
3314 RepositoryState::Local {
3315 backend,
3316 environment,
3317 ..
3318 } => {
3319 backend
3320 .checkout_files(commit, paths, environment.clone())
3321 .await
3322 }
3323 RepositoryState::Remote { project_id, client } => {
3324 client
3325 .request(proto::GitCheckoutFiles {
3326 project_id: project_id.0,
3327 repository_id: id.to_proto(),
3328 commit,
3329 paths: paths
3330 .into_iter()
3331 .map(|p| p.to_string_lossy().to_string())
3332 .collect(),
3333 })
3334 .await?;
3335
3336 Ok(())
3337 }
3338 }
3339 },
3340 )
3341 }
3342
3343 pub fn reset(
3344 &mut self,
3345 commit: String,
3346 reset_mode: ResetMode,
3347 _cx: &mut App,
3348 ) -> oneshot::Receiver<Result<()>> {
3349 let commit = commit.to_string();
3350 let id = self.id;
3351
3352 self.send_job(None, move |git_repo, _| async move {
3353 match git_repo {
3354 RepositoryState::Local {
3355 backend,
3356 environment,
3357 ..
3358 } => backend.reset(commit, reset_mode, environment).await,
3359 RepositoryState::Remote { project_id, client } => {
3360 client
3361 .request(proto::GitReset {
3362 project_id: project_id.0,
3363 repository_id: id.to_proto(),
3364 commit,
3365 mode: match reset_mode {
3366 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3367 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3368 },
3369 })
3370 .await?;
3371
3372 Ok(())
3373 }
3374 }
3375 })
3376 }
3377
3378 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3379 let id = self.id;
3380 self.send_job(None, move |git_repo, _cx| async move {
3381 match git_repo {
3382 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3383 RepositoryState::Remote { project_id, client } => {
3384 let resp = client
3385 .request(proto::GitShow {
3386 project_id: project_id.0,
3387 repository_id: id.to_proto(),
3388 commit,
3389 })
3390 .await?;
3391
3392 Ok(CommitDetails {
3393 sha: resp.sha.into(),
3394 message: resp.message.into(),
3395 commit_timestamp: resp.commit_timestamp,
3396 author_email: resp.author_email.into(),
3397 author_name: resp.author_name.into(),
3398 })
3399 }
3400 }
3401 })
3402 }
3403
3404 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3405 let id = self.id;
3406 self.send_job(None, move |git_repo, cx| async move {
3407 match git_repo {
3408 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3409 RepositoryState::Remote {
3410 client, project_id, ..
3411 } => {
3412 let response = client
3413 .request(proto::LoadCommitDiff {
3414 project_id: project_id.0,
3415 repository_id: id.to_proto(),
3416 commit,
3417 })
3418 .await?;
3419 Ok(CommitDiff {
3420 files: response
3421 .files
3422 .into_iter()
3423 .map(|file| CommitFile {
3424 path: Path::new(&file.path).into(),
3425 old_text: file.old_text,
3426 new_text: file.new_text,
3427 })
3428 .collect(),
3429 })
3430 }
3431 }
3432 })
3433 }
3434
3435 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3436 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3437 }
3438
3439 pub fn stage_entries(
3440 &self,
3441 entries: Vec<RepoPath>,
3442 cx: &mut Context<Self>,
3443 ) -> Task<anyhow::Result<()>> {
3444 if entries.is_empty() {
3445 return Task::ready(Ok(()));
3446 }
3447 let id = self.id;
3448
3449 let mut save_futures = Vec::new();
3450 if let Some(buffer_store) = self.buffer_store(cx) {
3451 buffer_store.update(cx, |buffer_store, cx| {
3452 for path in &entries {
3453 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3454 continue;
3455 };
3456 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3457 if buffer
3458 .read(cx)
3459 .file()
3460 .map_or(false, |file| file.disk_state().exists())
3461 {
3462 save_futures.push(buffer_store.save_buffer(buffer, cx));
3463 }
3464 }
3465 }
3466 })
3467 }
3468
3469 cx.spawn(async move |this, cx| {
3470 for save_future in save_futures {
3471 save_future.await?;
3472 }
3473
3474 this.update(cx, |this, _| {
3475 this.send_job(None, move |git_repo, _cx| async move {
3476 match git_repo {
3477 RepositoryState::Local {
3478 backend,
3479 environment,
3480 ..
3481 } => backend.stage_paths(entries, environment.clone()).await,
3482 RepositoryState::Remote { project_id, client } => {
3483 client
3484 .request(proto::Stage {
3485 project_id: project_id.0,
3486 repository_id: id.to_proto(),
3487 paths: entries
3488 .into_iter()
3489 .map(|repo_path| repo_path.as_ref().to_proto())
3490 .collect(),
3491 })
3492 .await
3493 .context("sending stage request")?;
3494
3495 Ok(())
3496 }
3497 }
3498 })
3499 })?
3500 .await??;
3501
3502 Ok(())
3503 })
3504 }
3505
3506 pub fn unstage_entries(
3507 &self,
3508 entries: Vec<RepoPath>,
3509 cx: &mut Context<Self>,
3510 ) -> Task<anyhow::Result<()>> {
3511 if entries.is_empty() {
3512 return Task::ready(Ok(()));
3513 }
3514 let id = self.id;
3515
3516 let mut save_futures = Vec::new();
3517 if let Some(buffer_store) = self.buffer_store(cx) {
3518 buffer_store.update(cx, |buffer_store, cx| {
3519 for path in &entries {
3520 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3521 continue;
3522 };
3523 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3524 if buffer
3525 .read(cx)
3526 .file()
3527 .map_or(false, |file| file.disk_state().exists())
3528 {
3529 save_futures.push(buffer_store.save_buffer(buffer, cx));
3530 }
3531 }
3532 }
3533 })
3534 }
3535
3536 cx.spawn(async move |this, cx| {
3537 for save_future in save_futures {
3538 save_future.await?;
3539 }
3540
3541 this.update(cx, |this, _| {
3542 this.send_job(None, move |git_repo, _cx| async move {
3543 match git_repo {
3544 RepositoryState::Local {
3545 backend,
3546 environment,
3547 ..
3548 } => backend.unstage_paths(entries, environment).await,
3549 RepositoryState::Remote { project_id, client } => {
3550 client
3551 .request(proto::Unstage {
3552 project_id: project_id.0,
3553 repository_id: id.to_proto(),
3554 paths: entries
3555 .into_iter()
3556 .map(|repo_path| repo_path.as_ref().to_proto())
3557 .collect(),
3558 })
3559 .await
3560 .context("sending unstage request")?;
3561
3562 Ok(())
3563 }
3564 }
3565 })
3566 })?
3567 .await??;
3568
3569 Ok(())
3570 })
3571 }
3572
3573 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3574 let to_stage = self
3575 .cached_status()
3576 .filter(|entry| !entry.status.staging().is_fully_staged())
3577 .map(|entry| entry.repo_path.clone())
3578 .collect();
3579 self.stage_entries(to_stage, cx)
3580 }
3581
3582 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3583 let to_unstage = self
3584 .cached_status()
3585 .filter(|entry| entry.status.staging().has_staged())
3586 .map(|entry| entry.repo_path.clone())
3587 .collect();
3588 self.unstage_entries(to_unstage, cx)
3589 }
3590
3591 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3592 let to_stash = self
3593 .cached_status()
3594 .map(|entry| entry.repo_path.clone())
3595 .collect();
3596
3597 self.stash_entries(to_stash, cx)
3598 }
3599
3600 pub fn stash_entries(
3601 &mut self,
3602 entries: Vec<RepoPath>,
3603 cx: &mut Context<Self>,
3604 ) -> Task<anyhow::Result<()>> {
3605 let id = self.id;
3606
3607 cx.spawn(async move |this, cx| {
3608 this.update(cx, |this, _| {
3609 this.send_job(None, move |git_repo, _cx| async move {
3610 match git_repo {
3611 RepositoryState::Local {
3612 backend,
3613 environment,
3614 ..
3615 } => backend.stash_paths(entries, environment).await,
3616 RepositoryState::Remote { project_id, client } => {
3617 client
3618 .request(proto::Stash {
3619 project_id: project_id.0,
3620 repository_id: id.to_proto(),
3621 paths: entries
3622 .into_iter()
3623 .map(|repo_path| repo_path.as_ref().to_proto())
3624 .collect(),
3625 })
3626 .await
3627 .context("sending stash request")?;
3628 Ok(())
3629 }
3630 }
3631 })
3632 })?
3633 .await??;
3634 Ok(())
3635 })
3636 }
3637
3638 pub fn stash_pop(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3639 let id = self.id;
3640 cx.spawn(async move |this, cx| {
3641 this.update(cx, |this, _| {
3642 this.send_job(None, move |git_repo, _cx| async move {
3643 match git_repo {
3644 RepositoryState::Local {
3645 backend,
3646 environment,
3647 ..
3648 } => backend.stash_pop(environment).await,
3649 RepositoryState::Remote { project_id, client } => {
3650 client
3651 .request(proto::StashPop {
3652 project_id: project_id.0,
3653 repository_id: id.to_proto(),
3654 })
3655 .await
3656 .context("sending stash pop request")?;
3657 Ok(())
3658 }
3659 }
3660 })
3661 })?
3662 .await??;
3663 Ok(())
3664 })
3665 }
3666
3667 pub fn commit(
3668 &mut self,
3669 message: SharedString,
3670 name_and_email: Option<(SharedString, SharedString)>,
3671 options: CommitOptions,
3672 _cx: &mut App,
3673 ) -> oneshot::Receiver<Result<()>> {
3674 let id = self.id;
3675
3676 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3677 match git_repo {
3678 RepositoryState::Local {
3679 backend,
3680 environment,
3681 ..
3682 } => {
3683 backend
3684 .commit(message, name_and_email, options, environment)
3685 .await
3686 }
3687 RepositoryState::Remote { project_id, client } => {
3688 let (name, email) = name_and_email.unzip();
3689 client
3690 .request(proto::Commit {
3691 project_id: project_id.0,
3692 repository_id: id.to_proto(),
3693 message: String::from(message),
3694 name: name.map(String::from),
3695 email: email.map(String::from),
3696 options: Some(proto::commit::CommitOptions {
3697 amend: options.amend,
3698 signoff: options.signoff,
3699 }),
3700 })
3701 .await
3702 .context("sending commit request")?;
3703
3704 Ok(())
3705 }
3706 }
3707 })
3708 }
3709
3710 pub fn fetch(
3711 &mut self,
3712 fetch_options: FetchOptions,
3713 askpass: AskPassDelegate,
3714 _cx: &mut App,
3715 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3716 let askpass_delegates = self.askpass_delegates.clone();
3717 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3718 let id = self.id;
3719
3720 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3721 match git_repo {
3722 RepositoryState::Local {
3723 backend,
3724 environment,
3725 ..
3726 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3727 RepositoryState::Remote { project_id, client } => {
3728 askpass_delegates.lock().insert(askpass_id, askpass);
3729 let _defer = util::defer(|| {
3730 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3731 debug_assert!(askpass_delegate.is_some());
3732 });
3733
3734 let response = client
3735 .request(proto::Fetch {
3736 project_id: project_id.0,
3737 repository_id: id.to_proto(),
3738 askpass_id,
3739 remote: fetch_options.to_proto(),
3740 })
3741 .await
3742 .context("sending fetch request")?;
3743
3744 Ok(RemoteCommandOutput {
3745 stdout: response.stdout,
3746 stderr: response.stderr,
3747 })
3748 }
3749 }
3750 })
3751 }
3752
3753 pub fn push(
3754 &mut self,
3755 branch: SharedString,
3756 remote: SharedString,
3757 options: Option<PushOptions>,
3758 askpass: AskPassDelegate,
3759 cx: &mut Context<Self>,
3760 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3761 let askpass_delegates = self.askpass_delegates.clone();
3762 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3763 let id = self.id;
3764
3765 let args = options
3766 .map(|option| match option {
3767 PushOptions::SetUpstream => " --set-upstream",
3768 PushOptions::Force => " --force-with-lease",
3769 })
3770 .unwrap_or("");
3771
3772 let updates_tx = self
3773 .git_store()
3774 .and_then(|git_store| match &git_store.read(cx).state {
3775 GitStoreState::Local { downstream, .. } => downstream
3776 .as_ref()
3777 .map(|downstream| downstream.updates_tx.clone()),
3778 _ => None,
3779 });
3780
3781 let this = cx.weak_entity();
3782 self.send_job(
3783 Some(format!("git push {} {} {}", args, branch, remote).into()),
3784 move |git_repo, mut cx| async move {
3785 match git_repo {
3786 RepositoryState::Local {
3787 backend,
3788 environment,
3789 ..
3790 } => {
3791 let result = backend
3792 .push(
3793 branch.to_string(),
3794 remote.to_string(),
3795 options,
3796 askpass,
3797 environment.clone(),
3798 cx.clone(),
3799 )
3800 .await;
3801 if result.is_ok() {
3802 let branches = backend.branches().await?;
3803 let branch = branches.into_iter().find(|branch| branch.is_head);
3804 log::info!("head branch after scan is {branch:?}");
3805 let snapshot = this.update(&mut cx, |this, cx| {
3806 this.snapshot.branch = branch;
3807 let snapshot = this.snapshot.clone();
3808 cx.emit(RepositoryEvent::Updated {
3809 full_scan: false,
3810 new_instance: false,
3811 });
3812 snapshot
3813 })?;
3814 if let Some(updates_tx) = updates_tx {
3815 updates_tx
3816 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3817 .ok();
3818 }
3819 }
3820 result
3821 }
3822 RepositoryState::Remote { project_id, client } => {
3823 askpass_delegates.lock().insert(askpass_id, askpass);
3824 let _defer = util::defer(|| {
3825 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3826 debug_assert!(askpass_delegate.is_some());
3827 });
3828 let response = client
3829 .request(proto::Push {
3830 project_id: project_id.0,
3831 repository_id: id.to_proto(),
3832 askpass_id,
3833 branch_name: branch.to_string(),
3834 remote_name: remote.to_string(),
3835 options: options.map(|options| match options {
3836 PushOptions::Force => proto::push::PushOptions::Force,
3837 PushOptions::SetUpstream => {
3838 proto::push::PushOptions::SetUpstream
3839 }
3840 }
3841 as i32),
3842 })
3843 .await
3844 .context("sending push request")?;
3845
3846 Ok(RemoteCommandOutput {
3847 stdout: response.stdout,
3848 stderr: response.stderr,
3849 })
3850 }
3851 }
3852 },
3853 )
3854 }
3855
3856 pub fn pull(
3857 &mut self,
3858 branch: SharedString,
3859 remote: SharedString,
3860 askpass: AskPassDelegate,
3861 _cx: &mut App,
3862 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3863 let askpass_delegates = self.askpass_delegates.clone();
3864 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3865 let id = self.id;
3866
3867 self.send_job(
3868 Some(format!("git pull {} {}", remote, branch).into()),
3869 move |git_repo, cx| async move {
3870 match git_repo {
3871 RepositoryState::Local {
3872 backend,
3873 environment,
3874 ..
3875 } => {
3876 backend
3877 .pull(
3878 branch.to_string(),
3879 remote.to_string(),
3880 askpass,
3881 environment.clone(),
3882 cx,
3883 )
3884 .await
3885 }
3886 RepositoryState::Remote { project_id, client } => {
3887 askpass_delegates.lock().insert(askpass_id, askpass);
3888 let _defer = util::defer(|| {
3889 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3890 debug_assert!(askpass_delegate.is_some());
3891 });
3892 let response = client
3893 .request(proto::Pull {
3894 project_id: project_id.0,
3895 repository_id: id.to_proto(),
3896 askpass_id,
3897 branch_name: branch.to_string(),
3898 remote_name: remote.to_string(),
3899 })
3900 .await
3901 .context("sending pull request")?;
3902
3903 Ok(RemoteCommandOutput {
3904 stdout: response.stdout,
3905 stderr: response.stderr,
3906 })
3907 }
3908 }
3909 },
3910 )
3911 }
3912
3913 fn spawn_set_index_text_job(
3914 &mut self,
3915 path: RepoPath,
3916 content: Option<String>,
3917 hunk_staging_operation_count: Option<usize>,
3918 cx: &mut Context<Self>,
3919 ) -> oneshot::Receiver<anyhow::Result<()>> {
3920 let id = self.id;
3921 let this = cx.weak_entity();
3922 let git_store = self.git_store.clone();
3923 self.send_keyed_job(
3924 Some(GitJobKey::WriteIndex(path.clone())),
3925 None,
3926 move |git_repo, mut cx| async move {
3927 log::debug!("start updating index text for buffer {}", path.display());
3928 match git_repo {
3929 RepositoryState::Local {
3930 backend,
3931 environment,
3932 ..
3933 } => {
3934 backend
3935 .set_index_text(path.clone(), content, environment.clone())
3936 .await?;
3937 }
3938 RepositoryState::Remote { project_id, client } => {
3939 client
3940 .request(proto::SetIndexText {
3941 project_id: project_id.0,
3942 repository_id: id.to_proto(),
3943 path: path.as_ref().to_proto(),
3944 text: content,
3945 })
3946 .await?;
3947 }
3948 }
3949 log::debug!("finish updating index text for buffer {}", path.display());
3950
3951 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
3952 let project_path = this
3953 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
3954 .ok()
3955 .flatten();
3956 git_store.update(&mut cx, |git_store, cx| {
3957 let buffer_id = git_store
3958 .buffer_store
3959 .read(cx)
3960 .get_by_path(&project_path?)?
3961 .read(cx)
3962 .remote_id();
3963 let diff_state = git_store.diffs.get(&buffer_id)?;
3964 diff_state.update(cx, |diff_state, _| {
3965 diff_state.hunk_staging_operation_count_as_of_write =
3966 hunk_staging_operation_count;
3967 });
3968 Some(())
3969 })?;
3970 }
3971 Ok(())
3972 },
3973 )
3974 }
3975
3976 pub fn get_remotes(
3977 &mut self,
3978 branch_name: Option<String>,
3979 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3980 let id = self.id;
3981 self.send_job(None, move |repo, _cx| async move {
3982 match repo {
3983 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3984 RepositoryState::Remote { project_id, client } => {
3985 let response = client
3986 .request(proto::GetRemotes {
3987 project_id: project_id.0,
3988 repository_id: id.to_proto(),
3989 branch_name,
3990 })
3991 .await?;
3992
3993 let remotes = response
3994 .remotes
3995 .into_iter()
3996 .map(|remotes| git::repository::Remote {
3997 name: remotes.name.into(),
3998 })
3999 .collect();
4000
4001 Ok(remotes)
4002 }
4003 }
4004 })
4005 }
4006
4007 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4008 let id = self.id;
4009 self.send_job(None, move |repo, _| async move {
4010 match repo {
4011 RepositoryState::Local { backend, .. } => backend.branches().await,
4012 RepositoryState::Remote { project_id, client } => {
4013 let response = client
4014 .request(proto::GitGetBranches {
4015 project_id: project_id.0,
4016 repository_id: id.to_proto(),
4017 })
4018 .await?;
4019
4020 let branches = response
4021 .branches
4022 .into_iter()
4023 .map(|branch| proto_to_branch(&branch))
4024 .collect();
4025
4026 Ok(branches)
4027 }
4028 }
4029 })
4030 }
4031
4032 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4033 let id = self.id;
4034 self.send_job(None, move |repo, _| async move {
4035 match repo {
4036 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4037 RepositoryState::Remote { project_id, client } => {
4038 let response = client
4039 .request(proto::GetDefaultBranch {
4040 project_id: project_id.0,
4041 repository_id: id.to_proto(),
4042 })
4043 .await?;
4044
4045 anyhow::Ok(response.branch.map(SharedString::from))
4046 }
4047 }
4048 })
4049 }
4050
4051 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4052 let id = self.id;
4053 self.send_job(None, move |repo, _cx| async move {
4054 match repo {
4055 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4056 RepositoryState::Remote { project_id, client } => {
4057 let response = client
4058 .request(proto::GitDiff {
4059 project_id: project_id.0,
4060 repository_id: id.to_proto(),
4061 diff_type: match diff_type {
4062 DiffType::HeadToIndex => {
4063 proto::git_diff::DiffType::HeadToIndex.into()
4064 }
4065 DiffType::HeadToWorktree => {
4066 proto::git_diff::DiffType::HeadToWorktree.into()
4067 }
4068 },
4069 })
4070 .await?;
4071
4072 Ok(response.diff)
4073 }
4074 }
4075 })
4076 }
4077
4078 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4079 let id = self.id;
4080 self.send_job(
4081 Some(format!("git switch -c {branch_name}").into()),
4082 move |repo, _cx| async move {
4083 match repo {
4084 RepositoryState::Local { backend, .. } => {
4085 backend.create_branch(branch_name).await
4086 }
4087 RepositoryState::Remote { project_id, client } => {
4088 client
4089 .request(proto::GitCreateBranch {
4090 project_id: project_id.0,
4091 repository_id: id.to_proto(),
4092 branch_name,
4093 })
4094 .await?;
4095
4096 Ok(())
4097 }
4098 }
4099 },
4100 )
4101 }
4102
4103 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4104 let id = self.id;
4105 self.send_job(
4106 Some(format!("git switch {branch_name}").into()),
4107 move |repo, _cx| async move {
4108 match repo {
4109 RepositoryState::Local { backend, .. } => {
4110 backend.change_branch(branch_name).await
4111 }
4112 RepositoryState::Remote { project_id, client } => {
4113 client
4114 .request(proto::GitChangeBranch {
4115 project_id: project_id.0,
4116 repository_id: id.to_proto(),
4117 branch_name,
4118 })
4119 .await?;
4120
4121 Ok(())
4122 }
4123 }
4124 },
4125 )
4126 }
4127
4128 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4129 let id = self.id;
4130 self.send_job(None, move |repo, _cx| async move {
4131 match repo {
4132 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4133 RepositoryState::Remote { project_id, client } => {
4134 let response = client
4135 .request(proto::CheckForPushedCommits {
4136 project_id: project_id.0,
4137 repository_id: id.to_proto(),
4138 })
4139 .await?;
4140
4141 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4142
4143 Ok(branches)
4144 }
4145 }
4146 })
4147 }
4148
4149 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4150 self.send_job(None, |repo, _cx| async move {
4151 match repo {
4152 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4153 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4154 }
4155 })
4156 }
4157
4158 pub fn restore_checkpoint(
4159 &mut self,
4160 checkpoint: GitRepositoryCheckpoint,
4161 ) -> oneshot::Receiver<Result<()>> {
4162 self.send_job(None, move |repo, _cx| async move {
4163 match repo {
4164 RepositoryState::Local { backend, .. } => {
4165 backend.restore_checkpoint(checkpoint).await
4166 }
4167 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4168 }
4169 })
4170 }
4171
4172 pub(crate) fn apply_remote_update(
4173 &mut self,
4174 update: proto::UpdateRepository,
4175 is_new: bool,
4176 cx: &mut Context<Self>,
4177 ) -> Result<()> {
4178 let conflicted_paths = TreeSet::from_ordered_entries(
4179 update
4180 .current_merge_conflicts
4181 .into_iter()
4182 .map(|path| RepoPath(Path::new(&path).into())),
4183 );
4184 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4185 self.snapshot.head_commit = update
4186 .head_commit_details
4187 .as_ref()
4188 .map(proto_to_commit_details);
4189
4190 self.snapshot.merge.conflicted_paths = conflicted_paths;
4191
4192 let edits = update
4193 .removed_statuses
4194 .into_iter()
4195 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4196 .chain(
4197 update
4198 .updated_statuses
4199 .into_iter()
4200 .filter_map(|updated_status| {
4201 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4202 }),
4203 )
4204 .collect::<Vec<_>>();
4205 self.snapshot.statuses_by_path.edit(edits, &());
4206 if update.is_last_update {
4207 self.snapshot.scan_id = update.scan_id;
4208 }
4209 cx.emit(RepositoryEvent::Updated {
4210 full_scan: true,
4211 new_instance: is_new,
4212 });
4213 Ok(())
4214 }
4215
4216 pub fn compare_checkpoints(
4217 &mut self,
4218 left: GitRepositoryCheckpoint,
4219 right: GitRepositoryCheckpoint,
4220 ) -> oneshot::Receiver<Result<bool>> {
4221 self.send_job(None, move |repo, _cx| async move {
4222 match repo {
4223 RepositoryState::Local { backend, .. } => {
4224 backend.compare_checkpoints(left, right).await
4225 }
4226 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4227 }
4228 })
4229 }
4230
4231 pub fn diff_checkpoints(
4232 &mut self,
4233 base_checkpoint: GitRepositoryCheckpoint,
4234 target_checkpoint: GitRepositoryCheckpoint,
4235 ) -> oneshot::Receiver<Result<String>> {
4236 self.send_job(None, move |repo, _cx| async move {
4237 match repo {
4238 RepositoryState::Local { backend, .. } => {
4239 backend
4240 .diff_checkpoints(base_checkpoint, target_checkpoint)
4241 .await
4242 }
4243 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4244 }
4245 })
4246 }
4247
4248 fn schedule_scan(
4249 &mut self,
4250 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4251 cx: &mut Context<Self>,
4252 ) {
4253 let this = cx.weak_entity();
4254 let _ = self.send_keyed_job(
4255 Some(GitJobKey::ReloadGitState),
4256 None,
4257 |state, mut cx| async move {
4258 log::debug!("run scheduled git status scan");
4259
4260 let Some(this) = this.upgrade() else {
4261 return Ok(());
4262 };
4263 let RepositoryState::Local { backend, .. } = state else {
4264 bail!("not a local repository")
4265 };
4266 let (snapshot, events) = this
4267 .read_with(&mut cx, |this, _| {
4268 compute_snapshot(
4269 this.id,
4270 this.work_directory_abs_path.clone(),
4271 this.snapshot.clone(),
4272 backend.clone(),
4273 )
4274 })?
4275 .await?;
4276 this.update(&mut cx, |this, cx| {
4277 this.snapshot = snapshot.clone();
4278 for event in events {
4279 cx.emit(event);
4280 }
4281 })?;
4282 if let Some(updates_tx) = updates_tx {
4283 updates_tx
4284 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4285 .ok();
4286 }
4287 Ok(())
4288 },
4289 );
4290 }
4291
4292 fn spawn_local_git_worker(
4293 work_directory_abs_path: Arc<Path>,
4294 dot_git_abs_path: Arc<Path>,
4295 _repository_dir_abs_path: Arc<Path>,
4296 _common_dir_abs_path: Arc<Path>,
4297 project_environment: WeakEntity<ProjectEnvironment>,
4298 fs: Arc<dyn Fs>,
4299 cx: &mut Context<Self>,
4300 ) -> mpsc::UnboundedSender<GitJob> {
4301 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4302
4303 cx.spawn(async move |_, cx| {
4304 let environment = project_environment
4305 .upgrade()
4306 .context("missing project environment")?
4307 .update(cx, |project_environment, cx| {
4308 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4309 })?
4310 .await
4311 .unwrap_or_else(|| {
4312 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4313 HashMap::default()
4314 });
4315 let backend = cx
4316 .background_spawn(async move {
4317 fs.open_repo(&dot_git_abs_path)
4318 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4319 })
4320 .await?;
4321
4322 if let Some(git_hosting_provider_registry) =
4323 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4324 {
4325 git_hosting_providers::register_additional_providers(
4326 git_hosting_provider_registry,
4327 backend.clone(),
4328 );
4329 }
4330
4331 let state = RepositoryState::Local {
4332 backend,
4333 environment: Arc::new(environment),
4334 };
4335 let mut jobs = VecDeque::new();
4336 loop {
4337 while let Ok(Some(next_job)) = job_rx.try_next() {
4338 jobs.push_back(next_job);
4339 }
4340
4341 if let Some(job) = jobs.pop_front() {
4342 if let Some(current_key) = &job.key {
4343 if jobs
4344 .iter()
4345 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4346 {
4347 continue;
4348 }
4349 }
4350 (job.job)(state.clone(), cx).await;
4351 } else if let Some(job) = job_rx.next().await {
4352 jobs.push_back(job);
4353 } else {
4354 break;
4355 }
4356 }
4357 anyhow::Ok(())
4358 })
4359 .detach_and_log_err(cx);
4360
4361 job_tx
4362 }
4363
4364 fn spawn_remote_git_worker(
4365 project_id: ProjectId,
4366 client: AnyProtoClient,
4367 cx: &mut Context<Self>,
4368 ) -> mpsc::UnboundedSender<GitJob> {
4369 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4370
4371 cx.spawn(async move |_, cx| {
4372 let state = RepositoryState::Remote { project_id, client };
4373 let mut jobs = VecDeque::new();
4374 loop {
4375 while let Ok(Some(next_job)) = job_rx.try_next() {
4376 jobs.push_back(next_job);
4377 }
4378
4379 if let Some(job) = jobs.pop_front() {
4380 if let Some(current_key) = &job.key {
4381 if jobs
4382 .iter()
4383 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4384 {
4385 continue;
4386 }
4387 }
4388 (job.job)(state.clone(), cx).await;
4389 } else if let Some(job) = job_rx.next().await {
4390 jobs.push_back(job);
4391 } else {
4392 break;
4393 }
4394 }
4395 anyhow::Ok(())
4396 })
4397 .detach_and_log_err(cx);
4398
4399 job_tx
4400 }
4401
4402 fn load_staged_text(
4403 &mut self,
4404 buffer_id: BufferId,
4405 repo_path: RepoPath,
4406 cx: &App,
4407 ) -> Task<Result<Option<String>>> {
4408 let rx = self.send_job(None, move |state, _| async move {
4409 match state {
4410 RepositoryState::Local { backend, .. } => {
4411 anyhow::Ok(backend.load_index_text(repo_path).await)
4412 }
4413 RepositoryState::Remote { project_id, client } => {
4414 let response = client
4415 .request(proto::OpenUnstagedDiff {
4416 project_id: project_id.to_proto(),
4417 buffer_id: buffer_id.to_proto(),
4418 })
4419 .await?;
4420 Ok(response.staged_text)
4421 }
4422 }
4423 });
4424 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4425 }
4426
4427 fn load_committed_text(
4428 &mut self,
4429 buffer_id: BufferId,
4430 repo_path: RepoPath,
4431 cx: &App,
4432 ) -> Task<Result<DiffBasesChange>> {
4433 let rx = self.send_job(None, move |state, _| async move {
4434 match state {
4435 RepositoryState::Local { backend, .. } => {
4436 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4437 let staged_text = backend.load_index_text(repo_path).await;
4438 let diff_bases_change = if committed_text == staged_text {
4439 DiffBasesChange::SetBoth(committed_text)
4440 } else {
4441 DiffBasesChange::SetEach {
4442 index: staged_text,
4443 head: committed_text,
4444 }
4445 };
4446 anyhow::Ok(diff_bases_change)
4447 }
4448 RepositoryState::Remote { project_id, client } => {
4449 use proto::open_uncommitted_diff_response::Mode;
4450
4451 let response = client
4452 .request(proto::OpenUncommittedDiff {
4453 project_id: project_id.to_proto(),
4454 buffer_id: buffer_id.to_proto(),
4455 })
4456 .await?;
4457 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4458 let bases = match mode {
4459 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4460 Mode::IndexAndHead => DiffBasesChange::SetEach {
4461 head: response.committed_text,
4462 index: response.staged_text,
4463 },
4464 };
4465 Ok(bases)
4466 }
4467 }
4468 });
4469
4470 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4471 }
4472
4473 fn paths_changed(
4474 &mut self,
4475 paths: Vec<RepoPath>,
4476 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4477 cx: &mut Context<Self>,
4478 ) {
4479 self.paths_needing_status_update.extend(paths);
4480
4481 let this = cx.weak_entity();
4482 let _ = self.send_keyed_job(
4483 Some(GitJobKey::RefreshStatuses),
4484 None,
4485 |state, mut cx| async move {
4486 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4487 (
4488 this.snapshot.clone(),
4489 mem::take(&mut this.paths_needing_status_update),
4490 )
4491 })?;
4492 let RepositoryState::Local { backend, .. } = state else {
4493 bail!("not a local repository")
4494 };
4495
4496 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4497 let statuses = backend.status(&paths).await?;
4498
4499 let changed_path_statuses = cx
4500 .background_spawn(async move {
4501 let mut changed_path_statuses = Vec::new();
4502 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4503 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4504
4505 for (repo_path, status) in &*statuses.entries {
4506 changed_paths.remove(repo_path);
4507 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) {
4508 if cursor.item().is_some_and(|entry| entry.status == *status) {
4509 continue;
4510 }
4511 }
4512
4513 changed_path_statuses.push(Edit::Insert(StatusEntry {
4514 repo_path: repo_path.clone(),
4515 status: *status,
4516 }));
4517 }
4518 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4519 for path in changed_paths.into_iter() {
4520 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4521 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4522 }
4523 }
4524 changed_path_statuses
4525 })
4526 .await;
4527
4528 this.update(&mut cx, |this, cx| {
4529 if !changed_path_statuses.is_empty() {
4530 this.snapshot
4531 .statuses_by_path
4532 .edit(changed_path_statuses, &());
4533 this.snapshot.scan_id += 1;
4534 if let Some(updates_tx) = updates_tx {
4535 updates_tx
4536 .unbounded_send(DownstreamUpdate::UpdateRepository(
4537 this.snapshot.clone(),
4538 ))
4539 .ok();
4540 }
4541 }
4542 cx.emit(RepositoryEvent::Updated {
4543 full_scan: false,
4544 new_instance: false,
4545 });
4546 })
4547 },
4548 );
4549 }
4550
4551 /// currently running git command and when it started
4552 pub fn current_job(&self) -> Option<JobInfo> {
4553 self.active_jobs.values().next().cloned()
4554 }
4555
4556 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4557 self.send_job(None, |_, _| async {})
4558 }
4559}
4560
4561fn get_permalink_in_rust_registry_src(
4562 provider_registry: Arc<GitHostingProviderRegistry>,
4563 path: PathBuf,
4564 selection: Range<u32>,
4565) -> Result<url::Url> {
4566 #[derive(Deserialize)]
4567 struct CargoVcsGit {
4568 sha1: String,
4569 }
4570
4571 #[derive(Deserialize)]
4572 struct CargoVcsInfo {
4573 git: CargoVcsGit,
4574 path_in_vcs: String,
4575 }
4576
4577 #[derive(Deserialize)]
4578 struct CargoPackage {
4579 repository: String,
4580 }
4581
4582 #[derive(Deserialize)]
4583 struct CargoToml {
4584 package: CargoPackage,
4585 }
4586
4587 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4588 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4589 Some((dir, json))
4590 }) else {
4591 bail!("No .cargo_vcs_info.json found in parent directories")
4592 };
4593 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4594 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4595 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4596 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4597 .context("parsing package.repository field of manifest")?;
4598 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4599 let permalink = provider.build_permalink(
4600 remote,
4601 BuildPermalinkParams {
4602 sha: &cargo_vcs_info.git.sha1,
4603 path: &path.to_string_lossy(),
4604 selection: Some(selection),
4605 },
4606 );
4607 Ok(permalink)
4608}
4609
4610fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4611 let Some(blame) = blame else {
4612 return proto::BlameBufferResponse {
4613 blame_response: None,
4614 };
4615 };
4616
4617 let entries = blame
4618 .entries
4619 .into_iter()
4620 .map(|entry| proto::BlameEntry {
4621 sha: entry.sha.as_bytes().into(),
4622 start_line: entry.range.start,
4623 end_line: entry.range.end,
4624 original_line_number: entry.original_line_number,
4625 author: entry.author,
4626 author_mail: entry.author_mail,
4627 author_time: entry.author_time,
4628 author_tz: entry.author_tz,
4629 committer: entry.committer_name,
4630 committer_mail: entry.committer_email,
4631 committer_time: entry.committer_time,
4632 committer_tz: entry.committer_tz,
4633 summary: entry.summary,
4634 previous: entry.previous,
4635 filename: entry.filename,
4636 })
4637 .collect::<Vec<_>>();
4638
4639 let messages = blame
4640 .messages
4641 .into_iter()
4642 .map(|(oid, message)| proto::CommitMessage {
4643 oid: oid.as_bytes().into(),
4644 message,
4645 })
4646 .collect::<Vec<_>>();
4647
4648 proto::BlameBufferResponse {
4649 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4650 entries,
4651 messages,
4652 remote_url: blame.remote_url,
4653 }),
4654 }
4655}
4656
4657fn deserialize_blame_buffer_response(
4658 response: proto::BlameBufferResponse,
4659) -> Option<git::blame::Blame> {
4660 let response = response.blame_response?;
4661 let entries = response
4662 .entries
4663 .into_iter()
4664 .filter_map(|entry| {
4665 Some(git::blame::BlameEntry {
4666 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4667 range: entry.start_line..entry.end_line,
4668 original_line_number: entry.original_line_number,
4669 committer_name: entry.committer,
4670 committer_time: entry.committer_time,
4671 committer_tz: entry.committer_tz,
4672 committer_email: entry.committer_mail,
4673 author: entry.author,
4674 author_mail: entry.author_mail,
4675 author_time: entry.author_time,
4676 author_tz: entry.author_tz,
4677 summary: entry.summary,
4678 previous: entry.previous,
4679 filename: entry.filename,
4680 })
4681 })
4682 .collect::<Vec<_>>();
4683
4684 let messages = response
4685 .messages
4686 .into_iter()
4687 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4688 .collect::<HashMap<_, _>>();
4689
4690 Some(Blame {
4691 entries,
4692 messages,
4693 remote_url: response.remote_url,
4694 })
4695}
4696
4697fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4698 proto::Branch {
4699 is_head: branch.is_head,
4700 ref_name: branch.ref_name.to_string(),
4701 unix_timestamp: branch
4702 .most_recent_commit
4703 .as_ref()
4704 .map(|commit| commit.commit_timestamp as u64),
4705 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4706 ref_name: upstream.ref_name.to_string(),
4707 tracking: upstream
4708 .tracking
4709 .status()
4710 .map(|upstream| proto::UpstreamTracking {
4711 ahead: upstream.ahead as u64,
4712 behind: upstream.behind as u64,
4713 }),
4714 }),
4715 most_recent_commit: branch
4716 .most_recent_commit
4717 .as_ref()
4718 .map(|commit| proto::CommitSummary {
4719 sha: commit.sha.to_string(),
4720 subject: commit.subject.to_string(),
4721 commit_timestamp: commit.commit_timestamp,
4722 }),
4723 }
4724}
4725
4726fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4727 git::repository::Branch {
4728 is_head: proto.is_head,
4729 ref_name: proto.ref_name.clone().into(),
4730 upstream: proto
4731 .upstream
4732 .as_ref()
4733 .map(|upstream| git::repository::Upstream {
4734 ref_name: upstream.ref_name.to_string().into(),
4735 tracking: upstream
4736 .tracking
4737 .as_ref()
4738 .map(|tracking| {
4739 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4740 ahead: tracking.ahead as u32,
4741 behind: tracking.behind as u32,
4742 })
4743 })
4744 .unwrap_or(git::repository::UpstreamTracking::Gone),
4745 }),
4746 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4747 git::repository::CommitSummary {
4748 sha: commit.sha.to_string().into(),
4749 subject: commit.subject.to_string().into(),
4750 commit_timestamp: commit.commit_timestamp,
4751 has_parent: true,
4752 }
4753 }),
4754 }
4755}
4756
4757fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4758 proto::GitCommitDetails {
4759 sha: commit.sha.to_string(),
4760 message: commit.message.to_string(),
4761 commit_timestamp: commit.commit_timestamp,
4762 author_email: commit.author_email.to_string(),
4763 author_name: commit.author_name.to_string(),
4764 }
4765}
4766
4767fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4768 CommitDetails {
4769 sha: proto.sha.clone().into(),
4770 message: proto.message.clone().into(),
4771 commit_timestamp: proto.commit_timestamp,
4772 author_email: proto.author_email.clone().into(),
4773 author_name: proto.author_name.clone().into(),
4774 }
4775}
4776
4777async fn compute_snapshot(
4778 id: RepositoryId,
4779 work_directory_abs_path: Arc<Path>,
4780 prev_snapshot: RepositorySnapshot,
4781 backend: Arc<dyn GitRepository>,
4782) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4783 let mut events = Vec::new();
4784 let branches = backend.branches().await?;
4785 let branch = branches.into_iter().find(|branch| branch.is_head);
4786 let statuses = backend
4787 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
4788 .await?;
4789 let statuses_by_path = SumTree::from_iter(
4790 statuses
4791 .entries
4792 .iter()
4793 .map(|(repo_path, status)| StatusEntry {
4794 repo_path: repo_path.clone(),
4795 status: *status,
4796 }),
4797 &(),
4798 );
4799 let (merge_details, merge_heads_changed) =
4800 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
4801 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
4802
4803 if merge_heads_changed
4804 || branch != prev_snapshot.branch
4805 || statuses_by_path != prev_snapshot.statuses_by_path
4806 {
4807 events.push(RepositoryEvent::Updated {
4808 full_scan: true,
4809 new_instance: false,
4810 });
4811 }
4812
4813 // Cache merge conflict paths so they don't change from staging/unstaging,
4814 // until the merge heads change (at commit time, etc.).
4815 if merge_heads_changed {
4816 events.push(RepositoryEvent::MergeHeadsChanged);
4817 }
4818
4819 // Useful when branch is None in detached head state
4820 let head_commit = match backend.head_sha().await {
4821 Some(head_sha) => backend.show(head_sha).await.log_err(),
4822 None => None,
4823 };
4824
4825 // Used by edit prediction data collection
4826 let remote_origin_url = backend.remote_url("origin");
4827 let remote_upstream_url = backend.remote_url("upstream");
4828
4829 let snapshot = RepositorySnapshot {
4830 id,
4831 statuses_by_path,
4832 work_directory_abs_path,
4833 scan_id: prev_snapshot.scan_id + 1,
4834 branch,
4835 head_commit,
4836 merge: merge_details,
4837 remote_origin_url,
4838 remote_upstream_url,
4839 };
4840
4841 Ok((snapshot, events))
4842}
4843
4844fn status_from_proto(
4845 simple_status: i32,
4846 status: Option<proto::GitFileStatus>,
4847) -> anyhow::Result<FileStatus> {
4848 use proto::git_file_status::Variant;
4849
4850 let Some(variant) = status.and_then(|status| status.variant) else {
4851 let code = proto::GitStatus::from_i32(simple_status)
4852 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
4853 let result = match code {
4854 proto::GitStatus::Added => TrackedStatus {
4855 worktree_status: StatusCode::Added,
4856 index_status: StatusCode::Unmodified,
4857 }
4858 .into(),
4859 proto::GitStatus::Modified => TrackedStatus {
4860 worktree_status: StatusCode::Modified,
4861 index_status: StatusCode::Unmodified,
4862 }
4863 .into(),
4864 proto::GitStatus::Conflict => UnmergedStatus {
4865 first_head: UnmergedStatusCode::Updated,
4866 second_head: UnmergedStatusCode::Updated,
4867 }
4868 .into(),
4869 proto::GitStatus::Deleted => TrackedStatus {
4870 worktree_status: StatusCode::Deleted,
4871 index_status: StatusCode::Unmodified,
4872 }
4873 .into(),
4874 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
4875 };
4876 return Ok(result);
4877 };
4878
4879 let result = match variant {
4880 Variant::Untracked(_) => FileStatus::Untracked,
4881 Variant::Ignored(_) => FileStatus::Ignored,
4882 Variant::Unmerged(unmerged) => {
4883 let [first_head, second_head] =
4884 [unmerged.first_head, unmerged.second_head].map(|head| {
4885 let code = proto::GitStatus::from_i32(head)
4886 .with_context(|| format!("Invalid git status code: {head}"))?;
4887 let result = match code {
4888 proto::GitStatus::Added => UnmergedStatusCode::Added,
4889 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4890 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4891 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
4892 };
4893 Ok(result)
4894 });
4895 let [first_head, second_head] = [first_head?, second_head?];
4896 UnmergedStatus {
4897 first_head,
4898 second_head,
4899 }
4900 .into()
4901 }
4902 Variant::Tracked(tracked) => {
4903 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4904 .map(|status| {
4905 let code = proto::GitStatus::from_i32(status)
4906 .with_context(|| format!("Invalid git status code: {status}"))?;
4907 let result = match code {
4908 proto::GitStatus::Modified => StatusCode::Modified,
4909 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4910 proto::GitStatus::Added => StatusCode::Added,
4911 proto::GitStatus::Deleted => StatusCode::Deleted,
4912 proto::GitStatus::Renamed => StatusCode::Renamed,
4913 proto::GitStatus::Copied => StatusCode::Copied,
4914 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4915 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
4916 };
4917 Ok(result)
4918 });
4919 let [index_status, worktree_status] = [index_status?, worktree_status?];
4920 TrackedStatus {
4921 index_status,
4922 worktree_status,
4923 }
4924 .into()
4925 }
4926 };
4927 Ok(result)
4928}
4929
4930fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4931 use proto::git_file_status::{Tracked, Unmerged, Variant};
4932
4933 let variant = match status {
4934 FileStatus::Untracked => Variant::Untracked(Default::default()),
4935 FileStatus::Ignored => Variant::Ignored(Default::default()),
4936 FileStatus::Unmerged(UnmergedStatus {
4937 first_head,
4938 second_head,
4939 }) => Variant::Unmerged(Unmerged {
4940 first_head: unmerged_status_to_proto(first_head),
4941 second_head: unmerged_status_to_proto(second_head),
4942 }),
4943 FileStatus::Tracked(TrackedStatus {
4944 index_status,
4945 worktree_status,
4946 }) => Variant::Tracked(Tracked {
4947 index_status: tracked_status_to_proto(index_status),
4948 worktree_status: tracked_status_to_proto(worktree_status),
4949 }),
4950 };
4951 proto::GitFileStatus {
4952 variant: Some(variant),
4953 }
4954}
4955
4956fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4957 match code {
4958 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4959 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4960 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4961 }
4962}
4963
4964fn tracked_status_to_proto(code: StatusCode) -> i32 {
4965 match code {
4966 StatusCode::Added => proto::GitStatus::Added as _,
4967 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4968 StatusCode::Modified => proto::GitStatus::Modified as _,
4969 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4970 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4971 StatusCode::Copied => proto::GitStatus::Copied as _,
4972 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4973 }
4974}