1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 status::{
32 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
33 },
34};
35use gpui::{
36 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
37 WeakEntity,
38};
39use language::{
40 Buffer, BufferEvent, Language, LanguageRegistry,
41 proto::{deserialize_version, serialize_version},
42};
43use parking_lot::Mutex;
44use postage::stream::Stream as _;
45use rpc::{
46 AnyProtoClient, TypedEnvelope,
47 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
48};
49use serde::Deserialize;
50use std::{
51 cmp::Ordering,
52 collections::{BTreeSet, VecDeque},
53 future::Future,
54 mem,
55 ops::Range,
56 path::{Path, PathBuf},
57 sync::{
58 Arc,
59 atomic::{self, AtomicU64},
60 },
61 time::Instant,
62};
63use sum_tree::{Edit, SumTree, TreeSet};
64use text::{Bias, BufferId};
65use util::{ResultExt, debug_panic, post_inc};
66use worktree::{
67 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
68 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
69};
70
71pub struct GitStore {
72 state: GitStoreState,
73 buffer_store: Entity<BufferStore>,
74 worktree_store: Entity<WorktreeStore>,
75 repositories: HashMap<RepositoryId, Entity<Repository>>,
76 active_repo_id: Option<RepositoryId>,
77 #[allow(clippy::type_complexity)]
78 loading_diffs:
79 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
80 diffs: HashMap<BufferId, Entity<BufferGitState>>,
81 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
82 _subscriptions: Vec<Subscription>,
83}
84
85#[derive(Default)]
86struct SharedDiffs {
87 unstaged: Option<Entity<BufferDiff>>,
88 uncommitted: Option<Entity<BufferDiff>>,
89}
90
91struct BufferGitState {
92 unstaged_diff: Option<WeakEntity<BufferDiff>>,
93 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
94 conflict_set: Option<WeakEntity<ConflictSet>>,
95 recalculate_diff_task: Option<Task<Result<()>>>,
96 reparse_conflict_markers_task: Option<Task<Result<()>>>,
97 language: Option<Arc<Language>>,
98 language_registry: Option<Arc<LanguageRegistry>>,
99 conflict_updated_futures: Vec<oneshot::Sender<()>>,
100 recalculating_tx: postage::watch::Sender<bool>,
101
102 /// These operation counts are used to ensure that head and index text
103 /// values read from the git repository are up-to-date with any hunk staging
104 /// operations that have been performed on the BufferDiff.
105 ///
106 /// The operation count is incremented immediately when the user initiates a
107 /// hunk stage/unstage operation. Then, upon finishing writing the new index
108 /// text do disk, the `operation count as of write` is updated to reflect
109 /// the operation count that prompted the write.
110 hunk_staging_operation_count: usize,
111 hunk_staging_operation_count_as_of_write: usize,
112
113 head_text: Option<Arc<String>>,
114 index_text: Option<Arc<String>>,
115 head_changed: bool,
116 index_changed: bool,
117 language_changed: bool,
118}
119
120#[derive(Clone, Debug)]
121enum DiffBasesChange {
122 SetIndex(Option<String>),
123 SetHead(Option<String>),
124 SetEach {
125 index: Option<String>,
126 head: Option<String>,
127 },
128 SetBoth(Option<String>),
129}
130
131#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
132enum DiffKind {
133 Unstaged,
134 Uncommitted,
135}
136
137enum GitStoreState {
138 Local {
139 next_repository_id: Arc<AtomicU64>,
140 downstream: Option<LocalDownstreamState>,
141 project_environment: Entity<ProjectEnvironment>,
142 fs: Arc<dyn Fs>,
143 },
144 Ssh {
145 upstream_client: AnyProtoClient,
146 upstream_project_id: ProjectId,
147 downstream: Option<(AnyProtoClient, ProjectId)>,
148 },
149 Remote {
150 upstream_client: AnyProtoClient,
151 upstream_project_id: ProjectId,
152 },
153}
154
155enum DownstreamUpdate {
156 UpdateRepository(RepositorySnapshot),
157 RemoveRepository(RepositoryId),
158}
159
160struct LocalDownstreamState {
161 client: AnyProtoClient,
162 project_id: ProjectId,
163 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
164 _task: Task<Result<()>>,
165}
166
167#[derive(Clone, Debug)]
168pub struct GitStoreCheckpoint {
169 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
170}
171
172#[derive(Clone, Debug, PartialEq, Eq)]
173pub struct StatusEntry {
174 pub repo_path: RepoPath,
175 pub status: FileStatus,
176}
177
178impl StatusEntry {
179 fn to_proto(&self) -> proto::StatusEntry {
180 let simple_status = match self.status {
181 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
182 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
183 FileStatus::Tracked(TrackedStatus {
184 index_status,
185 worktree_status,
186 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
187 worktree_status
188 } else {
189 index_status
190 }),
191 };
192
193 proto::StatusEntry {
194 repo_path: self.repo_path.as_ref().to_proto(),
195 simple_status,
196 status: Some(status_to_proto(self.status)),
197 }
198 }
199}
200
201impl TryFrom<proto::StatusEntry> for StatusEntry {
202 type Error = anyhow::Error;
203
204 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
205 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
206 let status = status_from_proto(value.simple_status, value.status)?;
207 Ok(Self { repo_path, status })
208 }
209}
210
211impl sum_tree::Item for StatusEntry {
212 type Summary = PathSummary<GitSummary>;
213
214 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
215 PathSummary {
216 max_path: self.repo_path.0.clone(),
217 item_summary: self.status.summary(),
218 }
219 }
220}
221
222impl sum_tree::KeyedItem for StatusEntry {
223 type Key = PathKey;
224
225 fn key(&self) -> Self::Key {
226 PathKey(self.repo_path.0.clone())
227 }
228}
229
230#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
231pub struct RepositoryId(pub u64);
232
233#[derive(Clone, Debug, Default, PartialEq, Eq)]
234pub struct MergeDetails {
235 pub conflicted_paths: TreeSet<RepoPath>,
236 pub message: Option<SharedString>,
237 pub heads: Vec<Option<SharedString>>,
238}
239
240#[derive(Clone, Debug, PartialEq, Eq)]
241pub struct RepositorySnapshot {
242 pub id: RepositoryId,
243 pub statuses_by_path: SumTree<StatusEntry>,
244 pub work_directory_abs_path: Arc<Path>,
245 pub branch: Option<Branch>,
246 pub head_commit: Option<CommitDetails>,
247 pub scan_id: u64,
248 pub merge: MergeDetails,
249}
250
251type JobId = u64;
252
253#[derive(Clone, Debug, PartialEq, Eq)]
254pub struct JobInfo {
255 pub start: Instant,
256 pub message: SharedString,
257}
258
259pub struct Repository {
260 this: WeakEntity<Self>,
261 snapshot: RepositorySnapshot,
262 commit_message_buffer: Option<Entity<Buffer>>,
263 git_store: WeakEntity<GitStore>,
264 // For a local repository, holds paths that have had worktree events since the last status scan completed,
265 // and that should be examined during the next status scan.
266 paths_needing_status_update: BTreeSet<RepoPath>,
267 job_sender: mpsc::UnboundedSender<GitJob>,
268 active_jobs: HashMap<JobId, JobInfo>,
269 job_id: JobId,
270 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
271 latest_askpass_id: u64,
272}
273
274impl std::ops::Deref for Repository {
275 type Target = RepositorySnapshot;
276
277 fn deref(&self) -> &Self::Target {
278 &self.snapshot
279 }
280}
281
282#[derive(Clone)]
283pub enum RepositoryState {
284 Local {
285 backend: Arc<dyn GitRepository>,
286 environment: Arc<HashMap<String, String>>,
287 },
288 Remote {
289 project_id: ProjectId,
290 client: AnyProtoClient,
291 },
292}
293
294#[derive(Clone, Debug)]
295pub enum RepositoryEvent {
296 Updated { full_scan: bool, new_instance: bool },
297 MergeHeadsChanged,
298}
299
300#[derive(Clone, Debug)]
301pub struct JobsUpdated;
302
303#[derive(Debug)]
304pub enum GitStoreEvent {
305 ActiveRepositoryChanged(Option<RepositoryId>),
306 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
307 RepositoryAdded(RepositoryId),
308 RepositoryRemoved(RepositoryId),
309 IndexWriteError(anyhow::Error),
310 JobsUpdated,
311 ConflictsUpdated,
312}
313
314impl EventEmitter<RepositoryEvent> for Repository {}
315impl EventEmitter<JobsUpdated> for Repository {}
316impl EventEmitter<GitStoreEvent> for GitStore {}
317
318pub struct GitJob {
319 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
320 key: Option<GitJobKey>,
321}
322
323#[derive(PartialEq, Eq)]
324enum GitJobKey {
325 WriteIndex(RepoPath),
326 ReloadBufferDiffBases,
327 RefreshStatuses,
328 ReloadGitState,
329}
330
331impl GitStore {
332 pub fn local(
333 worktree_store: &Entity<WorktreeStore>,
334 buffer_store: Entity<BufferStore>,
335 environment: Entity<ProjectEnvironment>,
336 fs: Arc<dyn Fs>,
337 cx: &mut Context<Self>,
338 ) -> Self {
339 Self::new(
340 worktree_store.clone(),
341 buffer_store,
342 GitStoreState::Local {
343 next_repository_id: Arc::new(AtomicU64::new(1)),
344 downstream: None,
345 project_environment: environment,
346 fs,
347 },
348 cx,
349 )
350 }
351
352 pub fn remote(
353 worktree_store: &Entity<WorktreeStore>,
354 buffer_store: Entity<BufferStore>,
355 upstream_client: AnyProtoClient,
356 project_id: ProjectId,
357 cx: &mut Context<Self>,
358 ) -> Self {
359 Self::new(
360 worktree_store.clone(),
361 buffer_store,
362 GitStoreState::Remote {
363 upstream_client,
364 upstream_project_id: project_id,
365 },
366 cx,
367 )
368 }
369
370 pub fn ssh(
371 worktree_store: &Entity<WorktreeStore>,
372 buffer_store: Entity<BufferStore>,
373 upstream_client: AnyProtoClient,
374 cx: &mut Context<Self>,
375 ) -> Self {
376 Self::new(
377 worktree_store.clone(),
378 buffer_store,
379 GitStoreState::Ssh {
380 upstream_client,
381 upstream_project_id: ProjectId(SSH_PROJECT_ID),
382 downstream: None,
383 },
384 cx,
385 )
386 }
387
388 fn new(
389 worktree_store: Entity<WorktreeStore>,
390 buffer_store: Entity<BufferStore>,
391 state: GitStoreState,
392 cx: &mut Context<Self>,
393 ) -> Self {
394 let _subscriptions = vec![
395 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
396 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
397 ];
398
399 GitStore {
400 state,
401 buffer_store,
402 worktree_store,
403 repositories: HashMap::default(),
404 active_repo_id: None,
405 _subscriptions,
406 loading_diffs: HashMap::default(),
407 shared_diffs: HashMap::default(),
408 diffs: HashMap::default(),
409 }
410 }
411
412 pub fn init(client: &AnyProtoClient) {
413 client.add_entity_request_handler(Self::handle_get_remotes);
414 client.add_entity_request_handler(Self::handle_get_branches);
415 client.add_entity_request_handler(Self::handle_change_branch);
416 client.add_entity_request_handler(Self::handle_create_branch);
417 client.add_entity_request_handler(Self::handle_git_init);
418 client.add_entity_request_handler(Self::handle_push);
419 client.add_entity_request_handler(Self::handle_pull);
420 client.add_entity_request_handler(Self::handle_fetch);
421 client.add_entity_request_handler(Self::handle_stage);
422 client.add_entity_request_handler(Self::handle_unstage);
423 client.add_entity_request_handler(Self::handle_stash);
424 client.add_entity_request_handler(Self::handle_stash_pop);
425 client.add_entity_request_handler(Self::handle_commit);
426 client.add_entity_request_handler(Self::handle_reset);
427 client.add_entity_request_handler(Self::handle_show);
428 client.add_entity_request_handler(Self::handle_load_commit_diff);
429 client.add_entity_request_handler(Self::handle_checkout_files);
430 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
431 client.add_entity_request_handler(Self::handle_set_index_text);
432 client.add_entity_request_handler(Self::handle_askpass);
433 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
434 client.add_entity_request_handler(Self::handle_git_diff);
435 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
436 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
437 client.add_entity_message_handler(Self::handle_update_diff_bases);
438 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
439 client.add_entity_request_handler(Self::handle_blame_buffer);
440 client.add_entity_message_handler(Self::handle_update_repository);
441 client.add_entity_message_handler(Self::handle_remove_repository);
442 }
443
444 pub fn is_local(&self) -> bool {
445 matches!(self.state, GitStoreState::Local { .. })
446 }
447
448 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
449 match &mut self.state {
450 GitStoreState::Ssh {
451 downstream: downstream_client,
452 ..
453 } => {
454 for repo in self.repositories.values() {
455 let update = repo.read(cx).snapshot.initial_update(project_id);
456 for update in split_repository_update(update) {
457 client.send(update).log_err();
458 }
459 }
460 *downstream_client = Some((client, ProjectId(project_id)));
461 }
462 GitStoreState::Local {
463 downstream: downstream_client,
464 ..
465 } => {
466 let mut snapshots = HashMap::default();
467 let (updates_tx, mut updates_rx) = mpsc::unbounded();
468 for repo in self.repositories.values() {
469 updates_tx
470 .unbounded_send(DownstreamUpdate::UpdateRepository(
471 repo.read(cx).snapshot.clone(),
472 ))
473 .ok();
474 }
475 *downstream_client = Some(LocalDownstreamState {
476 client: client.clone(),
477 project_id: ProjectId(project_id),
478 updates_tx,
479 _task: cx.spawn(async move |this, cx| {
480 cx.background_spawn(async move {
481 while let Some(update) = updates_rx.next().await {
482 match update {
483 DownstreamUpdate::UpdateRepository(snapshot) => {
484 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
485 {
486 let update =
487 snapshot.build_update(old_snapshot, project_id);
488 *old_snapshot = snapshot;
489 for update in split_repository_update(update) {
490 client.send(update)?;
491 }
492 } else {
493 let update = snapshot.initial_update(project_id);
494 for update in split_repository_update(update) {
495 client.send(update)?;
496 }
497 snapshots.insert(snapshot.id, snapshot);
498 }
499 }
500 DownstreamUpdate::RemoveRepository(id) => {
501 client.send(proto::RemoveRepository {
502 project_id,
503 id: id.to_proto(),
504 })?;
505 }
506 }
507 }
508 anyhow::Ok(())
509 })
510 .await
511 .ok();
512 this.update(cx, |this, _| {
513 if let GitStoreState::Local {
514 downstream: downstream_client,
515 ..
516 } = &mut this.state
517 {
518 downstream_client.take();
519 } else {
520 unreachable!("unshared called on remote store");
521 }
522 })
523 }),
524 });
525 }
526 GitStoreState::Remote { .. } => {
527 debug_panic!("shared called on remote store");
528 }
529 }
530 }
531
532 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
533 match &mut self.state {
534 GitStoreState::Local {
535 downstream: downstream_client,
536 ..
537 } => {
538 downstream_client.take();
539 }
540 GitStoreState::Ssh {
541 downstream: downstream_client,
542 ..
543 } => {
544 downstream_client.take();
545 }
546 GitStoreState::Remote { .. } => {
547 debug_panic!("unshared called on remote store");
548 }
549 }
550 self.shared_diffs.clear();
551 }
552
553 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
554 self.shared_diffs.remove(peer_id);
555 }
556
557 pub fn active_repository(&self) -> Option<Entity<Repository>> {
558 self.active_repo_id
559 .as_ref()
560 .map(|id| self.repositories[&id].clone())
561 }
562
563 pub fn open_unstaged_diff(
564 &mut self,
565 buffer: Entity<Buffer>,
566 cx: &mut Context<Self>,
567 ) -> Task<Result<Entity<BufferDiff>>> {
568 let buffer_id = buffer.read(cx).remote_id();
569 if let Some(diff_state) = self.diffs.get(&buffer_id) {
570 if let Some(unstaged_diff) = diff_state
571 .read(cx)
572 .unstaged_diff
573 .as_ref()
574 .and_then(|weak| weak.upgrade())
575 {
576 if let Some(task) =
577 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
578 {
579 return cx.background_executor().spawn(async move {
580 task.await;
581 Ok(unstaged_diff)
582 });
583 }
584 return Task::ready(Ok(unstaged_diff));
585 }
586 }
587
588 let Some((repo, repo_path)) =
589 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
590 else {
591 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
592 };
593
594 let task = self
595 .loading_diffs
596 .entry((buffer_id, DiffKind::Unstaged))
597 .or_insert_with(|| {
598 let staged_text = repo.update(cx, |repo, cx| {
599 repo.load_staged_text(buffer_id, repo_path, cx)
600 });
601 cx.spawn(async move |this, cx| {
602 Self::open_diff_internal(
603 this,
604 DiffKind::Unstaged,
605 staged_text.await.map(DiffBasesChange::SetIndex),
606 buffer,
607 cx,
608 )
609 .await
610 .map_err(Arc::new)
611 })
612 .shared()
613 })
614 .clone();
615
616 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
617 }
618
619 pub fn open_uncommitted_diff(
620 &mut self,
621 buffer: Entity<Buffer>,
622 cx: &mut Context<Self>,
623 ) -> Task<Result<Entity<BufferDiff>>> {
624 let buffer_id = buffer.read(cx).remote_id();
625
626 if let Some(diff_state) = self.diffs.get(&buffer_id) {
627 if let Some(uncommitted_diff) = diff_state
628 .read(cx)
629 .uncommitted_diff
630 .as_ref()
631 .and_then(|weak| weak.upgrade())
632 {
633 if let Some(task) =
634 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
635 {
636 return cx.background_executor().spawn(async move {
637 task.await;
638 Ok(uncommitted_diff)
639 });
640 }
641 return Task::ready(Ok(uncommitted_diff));
642 }
643 }
644
645 let Some((repo, repo_path)) =
646 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
647 else {
648 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
649 };
650
651 let task = self
652 .loading_diffs
653 .entry((buffer_id, DiffKind::Uncommitted))
654 .or_insert_with(|| {
655 let changes = repo.update(cx, |repo, cx| {
656 repo.load_committed_text(buffer_id, repo_path, cx)
657 });
658
659 cx.spawn(async move |this, cx| {
660 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
661 .await
662 .map_err(Arc::new)
663 })
664 .shared()
665 })
666 .clone();
667
668 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
669 }
670
671 async fn open_diff_internal(
672 this: WeakEntity<Self>,
673 kind: DiffKind,
674 texts: Result<DiffBasesChange>,
675 buffer_entity: Entity<Buffer>,
676 cx: &mut AsyncApp,
677 ) -> Result<Entity<BufferDiff>> {
678 let diff_bases_change = match texts {
679 Err(e) => {
680 this.update(cx, |this, cx| {
681 let buffer = buffer_entity.read(cx);
682 let buffer_id = buffer.remote_id();
683 this.loading_diffs.remove(&(buffer_id, kind));
684 })?;
685 return Err(e);
686 }
687 Ok(change) => change,
688 };
689
690 this.update(cx, |this, cx| {
691 let buffer = buffer_entity.read(cx);
692 let buffer_id = buffer.remote_id();
693 let language = buffer.language().cloned();
694 let language_registry = buffer.language_registry();
695 let text_snapshot = buffer.text_snapshot();
696 this.loading_diffs.remove(&(buffer_id, kind));
697
698 let git_store = cx.weak_entity();
699 let diff_state = this
700 .diffs
701 .entry(buffer_id)
702 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
703
704 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
705
706 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
707 diff_state.update(cx, |diff_state, cx| {
708 diff_state.language = language;
709 diff_state.language_registry = language_registry;
710
711 match kind {
712 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
713 DiffKind::Uncommitted => {
714 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
715 diff
716 } else {
717 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
718 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
719 unstaged_diff
720 };
721
722 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
723 diff_state.uncommitted_diff = Some(diff.downgrade())
724 }
725 }
726
727 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
728 let rx = diff_state.wait_for_recalculation();
729
730 anyhow::Ok(async move {
731 if let Some(rx) = rx {
732 rx.await;
733 }
734 Ok(diff)
735 })
736 })
737 })??
738 .await
739 }
740
741 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
742 let diff_state = self.diffs.get(&buffer_id)?;
743 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
744 }
745
746 pub fn get_uncommitted_diff(
747 &self,
748 buffer_id: BufferId,
749 cx: &App,
750 ) -> Option<Entity<BufferDiff>> {
751 let diff_state = self.diffs.get(&buffer_id)?;
752 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
753 }
754
755 pub fn open_conflict_set(
756 &mut self,
757 buffer: Entity<Buffer>,
758 cx: &mut Context<Self>,
759 ) -> Entity<ConflictSet> {
760 log::debug!("open conflict set");
761 let buffer_id = buffer.read(cx).remote_id();
762
763 if let Some(git_state) = self.diffs.get(&buffer_id) {
764 if let Some(conflict_set) = git_state
765 .read(cx)
766 .conflict_set
767 .as_ref()
768 .and_then(|weak| weak.upgrade())
769 {
770 let conflict_set = conflict_set.clone();
771 let buffer_snapshot = buffer.read(cx).text_snapshot();
772
773 git_state.update(cx, |state, cx| {
774 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
775 });
776
777 return conflict_set;
778 }
779 }
780
781 let is_unmerged = self
782 .repository_and_path_for_buffer_id(buffer_id, cx)
783 .map_or(false, |(repo, path)| {
784 repo.read(cx).snapshot.has_conflict(&path)
785 });
786 let git_store = cx.weak_entity();
787 let buffer_git_state = self
788 .diffs
789 .entry(buffer_id)
790 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
791 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
792
793 self._subscriptions
794 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
795 cx.emit(GitStoreEvent::ConflictsUpdated);
796 }));
797
798 buffer_git_state.update(cx, |state, cx| {
799 state.conflict_set = Some(conflict_set.downgrade());
800 let buffer_snapshot = buffer.read(cx).text_snapshot();
801 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
802 });
803
804 conflict_set
805 }
806
807 pub fn project_path_git_status(
808 &self,
809 project_path: &ProjectPath,
810 cx: &App,
811 ) -> Option<FileStatus> {
812 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
813 Some(repo.read(cx).status_for_path(&repo_path)?.status)
814 }
815
816 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
817 let mut work_directory_abs_paths = Vec::new();
818 let mut checkpoints = Vec::new();
819 for repository in self.repositories.values() {
820 repository.update(cx, |repository, _| {
821 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
822 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
823 });
824 }
825
826 cx.background_executor().spawn(async move {
827 let checkpoints = future::try_join_all(checkpoints).await?;
828 Ok(GitStoreCheckpoint {
829 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
830 .into_iter()
831 .zip(checkpoints)
832 .collect(),
833 })
834 })
835 }
836
837 pub fn restore_checkpoint(
838 &self,
839 checkpoint: GitStoreCheckpoint,
840 cx: &mut App,
841 ) -> Task<Result<()>> {
842 let repositories_by_work_dir_abs_path = self
843 .repositories
844 .values()
845 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
846 .collect::<HashMap<_, _>>();
847
848 let mut tasks = Vec::new();
849 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
850 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
851 let restore = repository.update(cx, |repository, _| {
852 repository.restore_checkpoint(checkpoint)
853 });
854 tasks.push(async move { restore.await? });
855 }
856 }
857 cx.background_spawn(async move {
858 future::try_join_all(tasks).await?;
859 Ok(())
860 })
861 }
862
863 /// Compares two checkpoints, returning true if they are equal.
864 pub fn compare_checkpoints(
865 &self,
866 left: GitStoreCheckpoint,
867 mut right: GitStoreCheckpoint,
868 cx: &mut App,
869 ) -> Task<Result<bool>> {
870 let repositories_by_work_dir_abs_path = self
871 .repositories
872 .values()
873 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
874 .collect::<HashMap<_, _>>();
875
876 let mut tasks = Vec::new();
877 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
878 if let Some(right_checkpoint) = right
879 .checkpoints_by_work_dir_abs_path
880 .remove(&work_dir_abs_path)
881 {
882 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
883 {
884 let compare = repository.update(cx, |repository, _| {
885 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
886 });
887
888 tasks.push(async move { compare.await? });
889 }
890 } else {
891 return Task::ready(Ok(false));
892 }
893 }
894 cx.background_spawn(async move {
895 Ok(future::try_join_all(tasks)
896 .await?
897 .into_iter()
898 .all(|result| result))
899 })
900 }
901
902 /// Blames a buffer.
903 pub fn blame_buffer(
904 &self,
905 buffer: &Entity<Buffer>,
906 version: Option<clock::Global>,
907 cx: &mut App,
908 ) -> Task<Result<Option<Blame>>> {
909 let buffer = buffer.read(cx);
910 let Some((repo, repo_path)) =
911 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
912 else {
913 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
914 };
915 let content = match &version {
916 Some(version) => buffer.rope_for_version(version).clone(),
917 None => buffer.as_rope().clone(),
918 };
919 let version = version.unwrap_or(buffer.version());
920 let buffer_id = buffer.remote_id();
921
922 let rx = repo.update(cx, |repo, _| {
923 repo.send_job(None, move |state, _| async move {
924 match state {
925 RepositoryState::Local { backend, .. } => backend
926 .blame(repo_path.clone(), content)
927 .await
928 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
929 .map(Some),
930 RepositoryState::Remote { project_id, client } => {
931 let response = client
932 .request(proto::BlameBuffer {
933 project_id: project_id.to_proto(),
934 buffer_id: buffer_id.into(),
935 version: serialize_version(&version),
936 })
937 .await?;
938 Ok(deserialize_blame_buffer_response(response))
939 }
940 }
941 })
942 });
943
944 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
945 }
946
947 pub fn get_permalink_to_line(
948 &self,
949 buffer: &Entity<Buffer>,
950 selection: Range<u32>,
951 cx: &mut App,
952 ) -> Task<Result<url::Url>> {
953 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
954 return Task::ready(Err(anyhow!("buffer has no file")));
955 };
956
957 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
958 &(file.worktree.read(cx).id(), file.path.clone()).into(),
959 cx,
960 ) else {
961 // If we're not in a Git repo, check whether this is a Rust source
962 // file in the Cargo registry (presumably opened with go-to-definition
963 // from a normal Rust file). If so, we can put together a permalink
964 // using crate metadata.
965 if buffer
966 .read(cx)
967 .language()
968 .is_none_or(|lang| lang.name() != "Rust".into())
969 {
970 return Task::ready(Err(anyhow!("no permalink available")));
971 }
972 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
973 return Task::ready(Err(anyhow!("no permalink available")));
974 };
975 return cx.spawn(async move |cx| {
976 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
977 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
978 .context("no permalink available")
979 });
980
981 // TODO remote case
982 };
983
984 let buffer_id = buffer.read(cx).remote_id();
985 let branch = repo.read(cx).branch.clone();
986 let remote = branch
987 .as_ref()
988 .and_then(|b| b.upstream.as_ref())
989 .and_then(|b| b.remote_name())
990 .unwrap_or("origin")
991 .to_string();
992
993 let rx = repo.update(cx, |repo, _| {
994 repo.send_job(None, move |state, cx| async move {
995 match state {
996 RepositoryState::Local { backend, .. } => {
997 let origin_url = backend
998 .remote_url(&remote)
999 .with_context(|| format!("remote \"{remote}\" not found"))?;
1000
1001 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1002
1003 let provider_registry =
1004 cx.update(GitHostingProviderRegistry::default_global)?;
1005
1006 let (provider, remote) =
1007 parse_git_remote_url(provider_registry, &origin_url)
1008 .context("parsing Git remote URL")?;
1009
1010 let path = repo_path.to_str().with_context(|| {
1011 format!("converting repo path {repo_path:?} to string")
1012 })?;
1013
1014 Ok(provider.build_permalink(
1015 remote,
1016 BuildPermalinkParams {
1017 sha: &sha,
1018 path,
1019 selection: Some(selection),
1020 },
1021 ))
1022 }
1023 RepositoryState::Remote { project_id, client } => {
1024 let response = client
1025 .request(proto::GetPermalinkToLine {
1026 project_id: project_id.to_proto(),
1027 buffer_id: buffer_id.into(),
1028 selection: Some(proto::Range {
1029 start: selection.start as u64,
1030 end: selection.end as u64,
1031 }),
1032 })
1033 .await?;
1034
1035 url::Url::parse(&response.permalink).context("failed to parse permalink")
1036 }
1037 }
1038 })
1039 });
1040 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1041 }
1042
1043 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1044 match &self.state {
1045 GitStoreState::Local {
1046 downstream: downstream_client,
1047 ..
1048 } => downstream_client
1049 .as_ref()
1050 .map(|state| (state.client.clone(), state.project_id)),
1051 GitStoreState::Ssh {
1052 downstream: downstream_client,
1053 ..
1054 } => downstream_client.clone(),
1055 GitStoreState::Remote { .. } => None,
1056 }
1057 }
1058
1059 fn upstream_client(&self) -> Option<AnyProtoClient> {
1060 match &self.state {
1061 GitStoreState::Local { .. } => None,
1062 GitStoreState::Ssh {
1063 upstream_client, ..
1064 }
1065 | GitStoreState::Remote {
1066 upstream_client, ..
1067 } => Some(upstream_client.clone()),
1068 }
1069 }
1070
1071 fn on_worktree_store_event(
1072 &mut self,
1073 worktree_store: Entity<WorktreeStore>,
1074 event: &WorktreeStoreEvent,
1075 cx: &mut Context<Self>,
1076 ) {
1077 let GitStoreState::Local {
1078 project_environment,
1079 downstream,
1080 next_repository_id,
1081 fs,
1082 } = &self.state
1083 else {
1084 return;
1085 };
1086
1087 match event {
1088 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1089 if let Some(worktree) = self
1090 .worktree_store
1091 .read(cx)
1092 .worktree_for_id(*worktree_id, cx)
1093 {
1094 let paths_by_git_repo =
1095 self.process_updated_entries(&worktree, updated_entries, cx);
1096 let downstream = downstream
1097 .as_ref()
1098 .map(|downstream| downstream.updates_tx.clone());
1099 cx.spawn(async move |_, cx| {
1100 let paths_by_git_repo = paths_by_git_repo.await;
1101 for (repo, paths) in paths_by_git_repo {
1102 repo.update(cx, |repo, cx| {
1103 repo.paths_changed(paths, downstream.clone(), cx);
1104 })
1105 .ok();
1106 }
1107 })
1108 .detach();
1109 }
1110 }
1111 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1112 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1113 else {
1114 return;
1115 };
1116 if !worktree.read(cx).is_visible() {
1117 log::debug!(
1118 "not adding repositories for local worktree {:?} because it's not visible",
1119 worktree.read(cx).abs_path()
1120 );
1121 return;
1122 }
1123 self.update_repositories_from_worktree(
1124 project_environment.clone(),
1125 next_repository_id.clone(),
1126 downstream
1127 .as_ref()
1128 .map(|downstream| downstream.updates_tx.clone()),
1129 changed_repos.clone(),
1130 fs.clone(),
1131 cx,
1132 );
1133 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1134 }
1135 _ => {}
1136 }
1137 }
1138
1139 fn on_repository_event(
1140 &mut self,
1141 repo: Entity<Repository>,
1142 event: &RepositoryEvent,
1143 cx: &mut Context<Self>,
1144 ) {
1145 let id = repo.read(cx).id;
1146 let repo_snapshot = repo.read(cx).snapshot.clone();
1147 for (buffer_id, diff) in self.diffs.iter() {
1148 if let Some((buffer_repo, repo_path)) =
1149 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1150 {
1151 if buffer_repo == repo {
1152 diff.update(cx, |diff, cx| {
1153 if let Some(conflict_set) = &diff.conflict_set {
1154 let conflict_status_changed =
1155 conflict_set.update(cx, |conflict_set, cx| {
1156 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1157 conflict_set.set_has_conflict(has_conflict, cx)
1158 })?;
1159 if conflict_status_changed {
1160 let buffer_store = self.buffer_store.read(cx);
1161 if let Some(buffer) = buffer_store.get(*buffer_id) {
1162 let _ = diff.reparse_conflict_markers(
1163 buffer.read(cx).text_snapshot(),
1164 cx,
1165 );
1166 }
1167 }
1168 }
1169 anyhow::Ok(())
1170 })
1171 .ok();
1172 }
1173 }
1174 }
1175 cx.emit(GitStoreEvent::RepositoryUpdated(
1176 id,
1177 event.clone(),
1178 self.active_repo_id == Some(id),
1179 ))
1180 }
1181
1182 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1183 cx.emit(GitStoreEvent::JobsUpdated)
1184 }
1185
1186 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1187 fn update_repositories_from_worktree(
1188 &mut self,
1189 project_environment: Entity<ProjectEnvironment>,
1190 next_repository_id: Arc<AtomicU64>,
1191 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1192 updated_git_repositories: UpdatedGitRepositoriesSet,
1193 fs: Arc<dyn Fs>,
1194 cx: &mut Context<Self>,
1195 ) {
1196 let mut removed_ids = Vec::new();
1197 for update in updated_git_repositories.iter() {
1198 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1199 let existing_work_directory_abs_path =
1200 repo.read(cx).work_directory_abs_path.clone();
1201 Some(&existing_work_directory_abs_path)
1202 == update.old_work_directory_abs_path.as_ref()
1203 || Some(&existing_work_directory_abs_path)
1204 == update.new_work_directory_abs_path.as_ref()
1205 }) {
1206 if let Some(new_work_directory_abs_path) =
1207 update.new_work_directory_abs_path.clone()
1208 {
1209 existing.update(cx, |existing, cx| {
1210 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1211 existing.schedule_scan(updates_tx.clone(), cx);
1212 });
1213 } else {
1214 removed_ids.push(*id);
1215 }
1216 } else if let UpdatedGitRepository {
1217 new_work_directory_abs_path: Some(work_directory_abs_path),
1218 dot_git_abs_path: Some(dot_git_abs_path),
1219 repository_dir_abs_path: Some(repository_dir_abs_path),
1220 common_dir_abs_path: Some(common_dir_abs_path),
1221 ..
1222 } = update
1223 {
1224 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1225 let git_store = cx.weak_entity();
1226 let repo = cx.new(|cx| {
1227 let mut repo = Repository::local(
1228 id,
1229 work_directory_abs_path.clone(),
1230 dot_git_abs_path.clone(),
1231 repository_dir_abs_path.clone(),
1232 common_dir_abs_path.clone(),
1233 project_environment.downgrade(),
1234 fs.clone(),
1235 git_store,
1236 cx,
1237 );
1238 repo.schedule_scan(updates_tx.clone(), cx);
1239 repo
1240 });
1241 self._subscriptions
1242 .push(cx.subscribe(&repo, Self::on_repository_event));
1243 self._subscriptions
1244 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1245 self.repositories.insert(id, repo);
1246 cx.emit(GitStoreEvent::RepositoryAdded(id));
1247 self.active_repo_id.get_or_insert_with(|| {
1248 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1249 id
1250 });
1251 }
1252 }
1253
1254 for id in removed_ids {
1255 if self.active_repo_id == Some(id) {
1256 self.active_repo_id = None;
1257 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1258 }
1259 self.repositories.remove(&id);
1260 if let Some(updates_tx) = updates_tx.as_ref() {
1261 updates_tx
1262 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1263 .ok();
1264 }
1265 }
1266 }
1267
1268 fn on_buffer_store_event(
1269 &mut self,
1270 _: Entity<BufferStore>,
1271 event: &BufferStoreEvent,
1272 cx: &mut Context<Self>,
1273 ) {
1274 match event {
1275 BufferStoreEvent::BufferAdded(buffer) => {
1276 cx.subscribe(&buffer, |this, buffer, event, cx| {
1277 if let BufferEvent::LanguageChanged = event {
1278 let buffer_id = buffer.read(cx).remote_id();
1279 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1280 diff_state.update(cx, |diff_state, cx| {
1281 diff_state.buffer_language_changed(buffer, cx);
1282 });
1283 }
1284 }
1285 })
1286 .detach();
1287 }
1288 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1289 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1290 diffs.remove(buffer_id);
1291 }
1292 }
1293 BufferStoreEvent::BufferDropped(buffer_id) => {
1294 self.diffs.remove(&buffer_id);
1295 for diffs in self.shared_diffs.values_mut() {
1296 diffs.remove(buffer_id);
1297 }
1298 }
1299
1300 _ => {}
1301 }
1302 }
1303
1304 pub fn recalculate_buffer_diffs(
1305 &mut self,
1306 buffers: Vec<Entity<Buffer>>,
1307 cx: &mut Context<Self>,
1308 ) -> impl Future<Output = ()> + use<> {
1309 let mut futures = Vec::new();
1310 for buffer in buffers {
1311 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1312 let buffer = buffer.read(cx).text_snapshot();
1313 diff_state.update(cx, |diff_state, cx| {
1314 diff_state.recalculate_diffs(buffer.clone(), cx);
1315 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1316 });
1317 futures.push(diff_state.update(cx, |diff_state, cx| {
1318 diff_state
1319 .reparse_conflict_markers(buffer, cx)
1320 .map(|_| {})
1321 .boxed()
1322 }));
1323 }
1324 }
1325 async move {
1326 futures::future::join_all(futures).await;
1327 }
1328 }
1329
1330 fn on_buffer_diff_event(
1331 &mut self,
1332 diff: Entity<buffer_diff::BufferDiff>,
1333 event: &BufferDiffEvent,
1334 cx: &mut Context<Self>,
1335 ) {
1336 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1337 let buffer_id = diff.read(cx).buffer_id;
1338 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1339 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1340 diff_state.hunk_staging_operation_count += 1;
1341 diff_state.hunk_staging_operation_count
1342 });
1343 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1344 let recv = repo.update(cx, |repo, cx| {
1345 log::debug!("hunks changed for {}", path.display());
1346 repo.spawn_set_index_text_job(
1347 path,
1348 new_index_text.as_ref().map(|rope| rope.to_string()),
1349 Some(hunk_staging_operation_count),
1350 cx,
1351 )
1352 });
1353 let diff = diff.downgrade();
1354 cx.spawn(async move |this, cx| {
1355 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1356 diff.update(cx, |diff, cx| {
1357 diff.clear_pending_hunks(cx);
1358 })
1359 .ok();
1360 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1361 .ok();
1362 }
1363 })
1364 .detach();
1365 }
1366 }
1367 }
1368 }
1369
1370 fn local_worktree_git_repos_changed(
1371 &mut self,
1372 worktree: Entity<Worktree>,
1373 changed_repos: &UpdatedGitRepositoriesSet,
1374 cx: &mut Context<Self>,
1375 ) {
1376 log::debug!("local worktree repos changed");
1377 debug_assert!(worktree.read(cx).is_local());
1378
1379 for repository in self.repositories.values() {
1380 repository.update(cx, |repository, cx| {
1381 let repo_abs_path = &repository.work_directory_abs_path;
1382 if changed_repos.iter().any(|update| {
1383 update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1384 || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1385 }) {
1386 repository.reload_buffer_diff_bases(cx);
1387 }
1388 });
1389 }
1390 }
1391
1392 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1393 &self.repositories
1394 }
1395
1396 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1397 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1398 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1399 Some(status.status)
1400 }
1401
1402 pub fn repository_and_path_for_buffer_id(
1403 &self,
1404 buffer_id: BufferId,
1405 cx: &App,
1406 ) -> Option<(Entity<Repository>, RepoPath)> {
1407 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1408 let project_path = buffer.read(cx).project_path(cx)?;
1409 self.repository_and_path_for_project_path(&project_path, cx)
1410 }
1411
1412 pub fn repository_and_path_for_project_path(
1413 &self,
1414 path: &ProjectPath,
1415 cx: &App,
1416 ) -> Option<(Entity<Repository>, RepoPath)> {
1417 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1418 self.repositories
1419 .values()
1420 .filter_map(|repo| {
1421 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1422 Some((repo.clone(), repo_path))
1423 })
1424 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1425 }
1426
1427 pub fn git_init(
1428 &self,
1429 path: Arc<Path>,
1430 fallback_branch_name: String,
1431 cx: &App,
1432 ) -> Task<Result<()>> {
1433 match &self.state {
1434 GitStoreState::Local { fs, .. } => {
1435 let fs = fs.clone();
1436 cx.background_executor()
1437 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1438 }
1439 GitStoreState::Ssh {
1440 upstream_client,
1441 upstream_project_id: project_id,
1442 ..
1443 }
1444 | GitStoreState::Remote {
1445 upstream_client,
1446 upstream_project_id: project_id,
1447 ..
1448 } => {
1449 let client = upstream_client.clone();
1450 let project_id = *project_id;
1451 cx.background_executor().spawn(async move {
1452 client
1453 .request(proto::GitInit {
1454 project_id: project_id.0,
1455 abs_path: path.to_string_lossy().to_string(),
1456 fallback_branch_name,
1457 })
1458 .await?;
1459 Ok(())
1460 })
1461 }
1462 }
1463 }
1464
1465 async fn handle_update_repository(
1466 this: Entity<Self>,
1467 envelope: TypedEnvelope<proto::UpdateRepository>,
1468 mut cx: AsyncApp,
1469 ) -> Result<()> {
1470 this.update(&mut cx, |this, cx| {
1471 let mut update = envelope.payload;
1472
1473 let id = RepositoryId::from_proto(update.id);
1474 let client = this
1475 .upstream_client()
1476 .context("no upstream client")?
1477 .clone();
1478
1479 let mut is_new = false;
1480 let repo = this.repositories.entry(id).or_insert_with(|| {
1481 is_new = true;
1482 let git_store = cx.weak_entity();
1483 cx.new(|cx| {
1484 Repository::remote(
1485 id,
1486 Path::new(&update.abs_path).into(),
1487 ProjectId(update.project_id),
1488 client,
1489 git_store,
1490 cx,
1491 )
1492 })
1493 });
1494 if is_new {
1495 this._subscriptions
1496 .push(cx.subscribe(&repo, Self::on_repository_event))
1497 }
1498
1499 repo.update(cx, {
1500 let update = update.clone();
1501 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1502 })?;
1503
1504 this.active_repo_id.get_or_insert_with(|| {
1505 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1506 id
1507 });
1508
1509 if let Some((client, project_id)) = this.downstream_client() {
1510 update.project_id = project_id.to_proto();
1511 client.send(update).log_err();
1512 }
1513 Ok(())
1514 })?
1515 }
1516
1517 async fn handle_remove_repository(
1518 this: Entity<Self>,
1519 envelope: TypedEnvelope<proto::RemoveRepository>,
1520 mut cx: AsyncApp,
1521 ) -> Result<()> {
1522 this.update(&mut cx, |this, cx| {
1523 let mut update = envelope.payload;
1524 let id = RepositoryId::from_proto(update.id);
1525 this.repositories.remove(&id);
1526 if let Some((client, project_id)) = this.downstream_client() {
1527 update.project_id = project_id.to_proto();
1528 client.send(update).log_err();
1529 }
1530 if this.active_repo_id == Some(id) {
1531 this.active_repo_id = None;
1532 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1533 }
1534 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1535 })
1536 }
1537
1538 async fn handle_git_init(
1539 this: Entity<Self>,
1540 envelope: TypedEnvelope<proto::GitInit>,
1541 cx: AsyncApp,
1542 ) -> Result<proto::Ack> {
1543 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1544 let name = envelope.payload.fallback_branch_name;
1545 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1546 .await?;
1547
1548 Ok(proto::Ack {})
1549 }
1550
1551 async fn handle_fetch(
1552 this: Entity<Self>,
1553 envelope: TypedEnvelope<proto::Fetch>,
1554 mut cx: AsyncApp,
1555 ) -> Result<proto::RemoteMessageResponse> {
1556 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1557 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1558 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1559 let askpass_id = envelope.payload.askpass_id;
1560
1561 let askpass = make_remote_delegate(
1562 this,
1563 envelope.payload.project_id,
1564 repository_id,
1565 askpass_id,
1566 &mut cx,
1567 );
1568
1569 let remote_output = repository_handle
1570 .update(&mut cx, |repository_handle, cx| {
1571 repository_handle.fetch(fetch_options, askpass, cx)
1572 })?
1573 .await??;
1574
1575 Ok(proto::RemoteMessageResponse {
1576 stdout: remote_output.stdout,
1577 stderr: remote_output.stderr,
1578 })
1579 }
1580
1581 async fn handle_push(
1582 this: Entity<Self>,
1583 envelope: TypedEnvelope<proto::Push>,
1584 mut cx: AsyncApp,
1585 ) -> Result<proto::RemoteMessageResponse> {
1586 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1587 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1588
1589 let askpass_id = envelope.payload.askpass_id;
1590 let askpass = make_remote_delegate(
1591 this,
1592 envelope.payload.project_id,
1593 repository_id,
1594 askpass_id,
1595 &mut cx,
1596 );
1597
1598 let options = envelope
1599 .payload
1600 .options
1601 .as_ref()
1602 .map(|_| match envelope.payload.options() {
1603 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1604 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1605 });
1606
1607 let branch_name = envelope.payload.branch_name.into();
1608 let remote_name = envelope.payload.remote_name.into();
1609
1610 let remote_output = repository_handle
1611 .update(&mut cx, |repository_handle, cx| {
1612 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1613 })?
1614 .await??;
1615 Ok(proto::RemoteMessageResponse {
1616 stdout: remote_output.stdout,
1617 stderr: remote_output.stderr,
1618 })
1619 }
1620
1621 async fn handle_pull(
1622 this: Entity<Self>,
1623 envelope: TypedEnvelope<proto::Pull>,
1624 mut cx: AsyncApp,
1625 ) -> Result<proto::RemoteMessageResponse> {
1626 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1627 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1628 let askpass_id = envelope.payload.askpass_id;
1629 let askpass = make_remote_delegate(
1630 this,
1631 envelope.payload.project_id,
1632 repository_id,
1633 askpass_id,
1634 &mut cx,
1635 );
1636
1637 let branch_name = envelope.payload.branch_name.into();
1638 let remote_name = envelope.payload.remote_name.into();
1639
1640 let remote_message = repository_handle
1641 .update(&mut cx, |repository_handle, cx| {
1642 repository_handle.pull(branch_name, remote_name, askpass, cx)
1643 })?
1644 .await??;
1645
1646 Ok(proto::RemoteMessageResponse {
1647 stdout: remote_message.stdout,
1648 stderr: remote_message.stderr,
1649 })
1650 }
1651
1652 async fn handle_stage(
1653 this: Entity<Self>,
1654 envelope: TypedEnvelope<proto::Stage>,
1655 mut cx: AsyncApp,
1656 ) -> Result<proto::Ack> {
1657 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1658 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1659
1660 let entries = envelope
1661 .payload
1662 .paths
1663 .into_iter()
1664 .map(PathBuf::from)
1665 .map(RepoPath::new)
1666 .collect();
1667
1668 repository_handle
1669 .update(&mut cx, |repository_handle, cx| {
1670 repository_handle.stage_entries(entries, cx)
1671 })?
1672 .await?;
1673 Ok(proto::Ack {})
1674 }
1675
1676 async fn handle_unstage(
1677 this: Entity<Self>,
1678 envelope: TypedEnvelope<proto::Unstage>,
1679 mut cx: AsyncApp,
1680 ) -> Result<proto::Ack> {
1681 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1682 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1683
1684 let entries = envelope
1685 .payload
1686 .paths
1687 .into_iter()
1688 .map(PathBuf::from)
1689 .map(RepoPath::new)
1690 .collect();
1691
1692 repository_handle
1693 .update(&mut cx, |repository_handle, cx| {
1694 repository_handle.unstage_entries(entries, cx)
1695 })?
1696 .await?;
1697
1698 Ok(proto::Ack {})
1699 }
1700
1701 async fn handle_stash(
1702 this: Entity<Self>,
1703 envelope: TypedEnvelope<proto::Stash>,
1704 mut cx: AsyncApp,
1705 ) -> Result<proto::Ack> {
1706 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1707 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1708
1709 let entries = envelope
1710 .payload
1711 .paths
1712 .into_iter()
1713 .map(PathBuf::from)
1714 .map(RepoPath::new)
1715 .collect();
1716
1717 repository_handle
1718 .update(&mut cx, |repository_handle, cx| {
1719 repository_handle.stash_entries(entries, cx)
1720 })?
1721 .await?;
1722
1723 Ok(proto::Ack {})
1724 }
1725
1726 async fn handle_stash_pop(
1727 this: Entity<Self>,
1728 envelope: TypedEnvelope<proto::StashPop>,
1729 mut cx: AsyncApp,
1730 ) -> Result<proto::Ack> {
1731 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1732 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1733
1734 repository_handle
1735 .update(&mut cx, |repository_handle, cx| {
1736 repository_handle.stash_pop(cx)
1737 })?
1738 .await?;
1739
1740 Ok(proto::Ack {})
1741 }
1742
1743 async fn handle_set_index_text(
1744 this: Entity<Self>,
1745 envelope: TypedEnvelope<proto::SetIndexText>,
1746 mut cx: AsyncApp,
1747 ) -> Result<proto::Ack> {
1748 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1749 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1750 let repo_path = RepoPath::from_str(&envelope.payload.path);
1751
1752 repository_handle
1753 .update(&mut cx, |repository_handle, cx| {
1754 repository_handle.spawn_set_index_text_job(
1755 repo_path,
1756 envelope.payload.text,
1757 None,
1758 cx,
1759 )
1760 })?
1761 .await??;
1762 Ok(proto::Ack {})
1763 }
1764
1765 async fn handle_commit(
1766 this: Entity<Self>,
1767 envelope: TypedEnvelope<proto::Commit>,
1768 mut cx: AsyncApp,
1769 ) -> Result<proto::Ack> {
1770 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1771 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1772
1773 let message = SharedString::from(envelope.payload.message);
1774 let name = envelope.payload.name.map(SharedString::from);
1775 let email = envelope.payload.email.map(SharedString::from);
1776 let options = envelope.payload.options.unwrap_or_default();
1777
1778 repository_handle
1779 .update(&mut cx, |repository_handle, cx| {
1780 repository_handle.commit(
1781 message,
1782 name.zip(email),
1783 CommitOptions {
1784 amend: options.amend,
1785 signoff: options.signoff,
1786 },
1787 cx,
1788 )
1789 })?
1790 .await??;
1791 Ok(proto::Ack {})
1792 }
1793
1794 async fn handle_get_remotes(
1795 this: Entity<Self>,
1796 envelope: TypedEnvelope<proto::GetRemotes>,
1797 mut cx: AsyncApp,
1798 ) -> Result<proto::GetRemotesResponse> {
1799 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1800 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1801
1802 let branch_name = envelope.payload.branch_name;
1803
1804 let remotes = repository_handle
1805 .update(&mut cx, |repository_handle, _| {
1806 repository_handle.get_remotes(branch_name)
1807 })?
1808 .await??;
1809
1810 Ok(proto::GetRemotesResponse {
1811 remotes: remotes
1812 .into_iter()
1813 .map(|remotes| proto::get_remotes_response::Remote {
1814 name: remotes.name.to_string(),
1815 })
1816 .collect::<Vec<_>>(),
1817 })
1818 }
1819
1820 async fn handle_get_branches(
1821 this: Entity<Self>,
1822 envelope: TypedEnvelope<proto::GitGetBranches>,
1823 mut cx: AsyncApp,
1824 ) -> Result<proto::GitBranchesResponse> {
1825 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1826 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1827
1828 let branches = repository_handle
1829 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1830 .await??;
1831
1832 Ok(proto::GitBranchesResponse {
1833 branches: branches
1834 .into_iter()
1835 .map(|branch| branch_to_proto(&branch))
1836 .collect::<Vec<_>>(),
1837 })
1838 }
1839 async fn handle_create_branch(
1840 this: Entity<Self>,
1841 envelope: TypedEnvelope<proto::GitCreateBranch>,
1842 mut cx: AsyncApp,
1843 ) -> Result<proto::Ack> {
1844 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1845 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1846 let branch_name = envelope.payload.branch_name;
1847
1848 repository_handle
1849 .update(&mut cx, |repository_handle, _| {
1850 repository_handle.create_branch(branch_name)
1851 })?
1852 .await??;
1853
1854 Ok(proto::Ack {})
1855 }
1856
1857 async fn handle_change_branch(
1858 this: Entity<Self>,
1859 envelope: TypedEnvelope<proto::GitChangeBranch>,
1860 mut cx: AsyncApp,
1861 ) -> Result<proto::Ack> {
1862 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1863 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1864 let branch_name = envelope.payload.branch_name;
1865
1866 repository_handle
1867 .update(&mut cx, |repository_handle, _| {
1868 repository_handle.change_branch(branch_name)
1869 })?
1870 .await??;
1871
1872 Ok(proto::Ack {})
1873 }
1874
1875 async fn handle_show(
1876 this: Entity<Self>,
1877 envelope: TypedEnvelope<proto::GitShow>,
1878 mut cx: AsyncApp,
1879 ) -> Result<proto::GitCommitDetails> {
1880 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1881 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1882
1883 let commit = repository_handle
1884 .update(&mut cx, |repository_handle, _| {
1885 repository_handle.show(envelope.payload.commit)
1886 })?
1887 .await??;
1888 Ok(proto::GitCommitDetails {
1889 sha: commit.sha.into(),
1890 message: commit.message.into(),
1891 commit_timestamp: commit.commit_timestamp,
1892 author_email: commit.author_email.into(),
1893 author_name: commit.author_name.into(),
1894 })
1895 }
1896
1897 async fn handle_load_commit_diff(
1898 this: Entity<Self>,
1899 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1900 mut cx: AsyncApp,
1901 ) -> Result<proto::LoadCommitDiffResponse> {
1902 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1903 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1904
1905 let commit_diff = repository_handle
1906 .update(&mut cx, |repository_handle, _| {
1907 repository_handle.load_commit_diff(envelope.payload.commit)
1908 })?
1909 .await??;
1910 Ok(proto::LoadCommitDiffResponse {
1911 files: commit_diff
1912 .files
1913 .into_iter()
1914 .map(|file| proto::CommitFile {
1915 path: file.path.to_string(),
1916 old_text: file.old_text,
1917 new_text: file.new_text,
1918 })
1919 .collect(),
1920 })
1921 }
1922
1923 async fn handle_reset(
1924 this: Entity<Self>,
1925 envelope: TypedEnvelope<proto::GitReset>,
1926 mut cx: AsyncApp,
1927 ) -> Result<proto::Ack> {
1928 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1929 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1930
1931 let mode = match envelope.payload.mode() {
1932 git_reset::ResetMode::Soft => ResetMode::Soft,
1933 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1934 };
1935
1936 repository_handle
1937 .update(&mut cx, |repository_handle, cx| {
1938 repository_handle.reset(envelope.payload.commit, mode, cx)
1939 })?
1940 .await??;
1941 Ok(proto::Ack {})
1942 }
1943
1944 async fn handle_checkout_files(
1945 this: Entity<Self>,
1946 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1947 mut cx: AsyncApp,
1948 ) -> Result<proto::Ack> {
1949 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1950 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1951 let paths = envelope
1952 .payload
1953 .paths
1954 .iter()
1955 .map(|s| RepoPath::from_str(s))
1956 .collect();
1957
1958 repository_handle
1959 .update(&mut cx, |repository_handle, cx| {
1960 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1961 })?
1962 .await??;
1963 Ok(proto::Ack {})
1964 }
1965
1966 async fn handle_open_commit_message_buffer(
1967 this: Entity<Self>,
1968 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1969 mut cx: AsyncApp,
1970 ) -> Result<proto::OpenBufferResponse> {
1971 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1972 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1973 let buffer = repository
1974 .update(&mut cx, |repository, cx| {
1975 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1976 })?
1977 .await?;
1978
1979 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1980 this.update(&mut cx, |this, cx| {
1981 this.buffer_store.update(cx, |buffer_store, cx| {
1982 buffer_store
1983 .create_buffer_for_peer(
1984 &buffer,
1985 envelope.original_sender_id.unwrap_or(envelope.sender_id),
1986 cx,
1987 )
1988 .detach_and_log_err(cx);
1989 })
1990 })?;
1991
1992 Ok(proto::OpenBufferResponse {
1993 buffer_id: buffer_id.to_proto(),
1994 })
1995 }
1996
1997 async fn handle_askpass(
1998 this: Entity<Self>,
1999 envelope: TypedEnvelope<proto::AskPassRequest>,
2000 mut cx: AsyncApp,
2001 ) -> Result<proto::AskPassResponse> {
2002 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2003 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2004
2005 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2006 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2007 debug_panic!("no askpass found");
2008 anyhow::bail!("no askpass found");
2009 };
2010
2011 let response = askpass.ask_password(envelope.payload.prompt).await?;
2012
2013 delegates
2014 .lock()
2015 .insert(envelope.payload.askpass_id, askpass);
2016
2017 Ok(proto::AskPassResponse { response })
2018 }
2019
2020 async fn handle_check_for_pushed_commits(
2021 this: Entity<Self>,
2022 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2023 mut cx: AsyncApp,
2024 ) -> Result<proto::CheckForPushedCommitsResponse> {
2025 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2026 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2027
2028 let branches = repository_handle
2029 .update(&mut cx, |repository_handle, _| {
2030 repository_handle.check_for_pushed_commits()
2031 })?
2032 .await??;
2033 Ok(proto::CheckForPushedCommitsResponse {
2034 pushed_to: branches
2035 .into_iter()
2036 .map(|commit| commit.to_string())
2037 .collect(),
2038 })
2039 }
2040
2041 async fn handle_git_diff(
2042 this: Entity<Self>,
2043 envelope: TypedEnvelope<proto::GitDiff>,
2044 mut cx: AsyncApp,
2045 ) -> Result<proto::GitDiffResponse> {
2046 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2047 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2048 let diff_type = match envelope.payload.diff_type() {
2049 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2050 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2051 };
2052
2053 let mut diff = repository_handle
2054 .update(&mut cx, |repository_handle, cx| {
2055 repository_handle.diff(diff_type, cx)
2056 })?
2057 .await??;
2058 const ONE_MB: usize = 1_000_000;
2059 if diff.len() > ONE_MB {
2060 diff = diff.chars().take(ONE_MB).collect()
2061 }
2062
2063 Ok(proto::GitDiffResponse { diff })
2064 }
2065
2066 async fn handle_open_unstaged_diff(
2067 this: Entity<Self>,
2068 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2069 mut cx: AsyncApp,
2070 ) -> Result<proto::OpenUnstagedDiffResponse> {
2071 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2072 let diff = this
2073 .update(&mut cx, |this, cx| {
2074 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2075 Some(this.open_unstaged_diff(buffer, cx))
2076 })?
2077 .context("missing buffer")?
2078 .await?;
2079 this.update(&mut cx, |this, _| {
2080 let shared_diffs = this
2081 .shared_diffs
2082 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2083 .or_default();
2084 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2085 })?;
2086 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2087 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2088 }
2089
2090 async fn handle_open_uncommitted_diff(
2091 this: Entity<Self>,
2092 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2093 mut cx: AsyncApp,
2094 ) -> Result<proto::OpenUncommittedDiffResponse> {
2095 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2096 let diff = this
2097 .update(&mut cx, |this, cx| {
2098 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2099 Some(this.open_uncommitted_diff(buffer, cx))
2100 })?
2101 .context("missing buffer")?
2102 .await?;
2103 this.update(&mut cx, |this, _| {
2104 let shared_diffs = this
2105 .shared_diffs
2106 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2107 .or_default();
2108 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2109 })?;
2110 diff.read_with(&cx, |diff, cx| {
2111 use proto::open_uncommitted_diff_response::Mode;
2112
2113 let unstaged_diff = diff.secondary_diff();
2114 let index_snapshot = unstaged_diff.and_then(|diff| {
2115 let diff = diff.read(cx);
2116 diff.base_text_exists().then(|| diff.base_text())
2117 });
2118
2119 let mode;
2120 let staged_text;
2121 let committed_text;
2122 if diff.base_text_exists() {
2123 let committed_snapshot = diff.base_text();
2124 committed_text = Some(committed_snapshot.text());
2125 if let Some(index_text) = index_snapshot {
2126 if index_text.remote_id() == committed_snapshot.remote_id() {
2127 mode = Mode::IndexMatchesHead;
2128 staged_text = None;
2129 } else {
2130 mode = Mode::IndexAndHead;
2131 staged_text = Some(index_text.text());
2132 }
2133 } else {
2134 mode = Mode::IndexAndHead;
2135 staged_text = None;
2136 }
2137 } else {
2138 mode = Mode::IndexAndHead;
2139 committed_text = None;
2140 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2141 }
2142
2143 proto::OpenUncommittedDiffResponse {
2144 committed_text,
2145 staged_text,
2146 mode: mode.into(),
2147 }
2148 })
2149 }
2150
2151 async fn handle_update_diff_bases(
2152 this: Entity<Self>,
2153 request: TypedEnvelope<proto::UpdateDiffBases>,
2154 mut cx: AsyncApp,
2155 ) -> Result<()> {
2156 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2157 this.update(&mut cx, |this, cx| {
2158 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2159 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2160 let buffer = buffer.read(cx).text_snapshot();
2161 diff_state.update(cx, |diff_state, cx| {
2162 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2163 })
2164 }
2165 }
2166 })
2167 }
2168
2169 async fn handle_blame_buffer(
2170 this: Entity<Self>,
2171 envelope: TypedEnvelope<proto::BlameBuffer>,
2172 mut cx: AsyncApp,
2173 ) -> Result<proto::BlameBufferResponse> {
2174 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2175 let version = deserialize_version(&envelope.payload.version);
2176 let buffer = this.read_with(&cx, |this, cx| {
2177 this.buffer_store.read(cx).get_existing(buffer_id)
2178 })??;
2179 buffer
2180 .update(&mut cx, |buffer, _| {
2181 buffer.wait_for_version(version.clone())
2182 })?
2183 .await?;
2184 let blame = this
2185 .update(&mut cx, |this, cx| {
2186 this.blame_buffer(&buffer, Some(version), cx)
2187 })?
2188 .await?;
2189 Ok(serialize_blame_buffer_response(blame))
2190 }
2191
2192 async fn handle_get_permalink_to_line(
2193 this: Entity<Self>,
2194 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2195 mut cx: AsyncApp,
2196 ) -> Result<proto::GetPermalinkToLineResponse> {
2197 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2198 // let version = deserialize_version(&envelope.payload.version);
2199 let selection = {
2200 let proto_selection = envelope
2201 .payload
2202 .selection
2203 .context("no selection to get permalink for defined")?;
2204 proto_selection.start as u32..proto_selection.end as u32
2205 };
2206 let buffer = this.read_with(&cx, |this, cx| {
2207 this.buffer_store.read(cx).get_existing(buffer_id)
2208 })??;
2209 let permalink = this
2210 .update(&mut cx, |this, cx| {
2211 this.get_permalink_to_line(&buffer, selection, cx)
2212 })?
2213 .await?;
2214 Ok(proto::GetPermalinkToLineResponse {
2215 permalink: permalink.to_string(),
2216 })
2217 }
2218
2219 fn repository_for_request(
2220 this: &Entity<Self>,
2221 id: RepositoryId,
2222 cx: &mut AsyncApp,
2223 ) -> Result<Entity<Repository>> {
2224 this.read_with(cx, |this, _| {
2225 this.repositories
2226 .get(&id)
2227 .context("missing repository handle")
2228 .cloned()
2229 })?
2230 }
2231
2232 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2233 self.repositories
2234 .iter()
2235 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2236 .collect()
2237 }
2238
2239 fn process_updated_entries(
2240 &self,
2241 worktree: &Entity<Worktree>,
2242 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2243 cx: &mut App,
2244 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2245 let mut repo_paths = self
2246 .repositories
2247 .values()
2248 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2249 .collect::<Vec<_>>();
2250 let mut entries: Vec<_> = updated_entries
2251 .iter()
2252 .map(|(path, _, _)| path.clone())
2253 .collect();
2254 entries.sort();
2255 let worktree = worktree.read(cx);
2256
2257 let entries = entries
2258 .into_iter()
2259 .filter_map(|path| worktree.absolutize(&path).ok())
2260 .collect::<Arc<[_]>>();
2261
2262 let executor = cx.background_executor().clone();
2263 cx.background_executor().spawn(async move {
2264 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2265 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2266 let mut tasks = FuturesOrdered::new();
2267 for (repo_path, repo) in repo_paths.into_iter().rev() {
2268 let entries = entries.clone();
2269 let task = executor.spawn(async move {
2270 // Find all repository paths that belong to this repo
2271 let mut ix = entries.partition_point(|path| path < &*repo_path);
2272 if ix == entries.len() {
2273 return None;
2274 };
2275
2276 let mut paths = vec![];
2277 // All paths prefixed by a given repo will constitute a continuous range.
2278 while let Some(path) = entries.get(ix)
2279 && let Some(repo_path) =
2280 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, &path)
2281 {
2282 paths.push((repo_path, ix));
2283 ix += 1;
2284 }
2285 Some((repo, paths))
2286 });
2287 tasks.push_back(task);
2288 }
2289
2290 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2291 let mut path_was_used = vec![false; entries.len()];
2292 let tasks = tasks.collect::<Vec<_>>().await;
2293 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2294 // We always want to assign a path to it's innermost repository.
2295 for t in tasks {
2296 let Some((repo, paths)) = t else {
2297 continue;
2298 };
2299 let entry = paths_by_git_repo.entry(repo).or_default();
2300 for (repo_path, ix) in paths {
2301 if path_was_used[ix] {
2302 continue;
2303 }
2304 path_was_used[ix] = true;
2305 entry.push(repo_path);
2306 }
2307 }
2308
2309 paths_by_git_repo
2310 })
2311 }
2312}
2313
2314impl BufferGitState {
2315 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2316 Self {
2317 unstaged_diff: Default::default(),
2318 uncommitted_diff: Default::default(),
2319 recalculate_diff_task: Default::default(),
2320 language: Default::default(),
2321 language_registry: Default::default(),
2322 recalculating_tx: postage::watch::channel_with(false).0,
2323 hunk_staging_operation_count: 0,
2324 hunk_staging_operation_count_as_of_write: 0,
2325 head_text: Default::default(),
2326 index_text: Default::default(),
2327 head_changed: Default::default(),
2328 index_changed: Default::default(),
2329 language_changed: Default::default(),
2330 conflict_updated_futures: Default::default(),
2331 conflict_set: Default::default(),
2332 reparse_conflict_markers_task: Default::default(),
2333 }
2334 }
2335
2336 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2337 self.language = buffer.read(cx).language().cloned();
2338 self.language_changed = true;
2339 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2340 }
2341
2342 fn reparse_conflict_markers(
2343 &mut self,
2344 buffer: text::BufferSnapshot,
2345 cx: &mut Context<Self>,
2346 ) -> oneshot::Receiver<()> {
2347 let (tx, rx) = oneshot::channel();
2348
2349 let Some(conflict_set) = self
2350 .conflict_set
2351 .as_ref()
2352 .and_then(|conflict_set| conflict_set.upgrade())
2353 else {
2354 return rx;
2355 };
2356
2357 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2358 if conflict_set.has_conflict {
2359 Some(conflict_set.snapshot())
2360 } else {
2361 None
2362 }
2363 });
2364
2365 if let Some(old_snapshot) = old_snapshot {
2366 self.conflict_updated_futures.push(tx);
2367 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2368 let (snapshot, changed_range) = cx
2369 .background_spawn(async move {
2370 let new_snapshot = ConflictSet::parse(&buffer);
2371 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2372 (new_snapshot, changed_range)
2373 })
2374 .await;
2375 this.update(cx, |this, cx| {
2376 if let Some(conflict_set) = &this.conflict_set {
2377 conflict_set
2378 .update(cx, |conflict_set, cx| {
2379 conflict_set.set_snapshot(snapshot, changed_range, cx);
2380 })
2381 .ok();
2382 }
2383 let futures = std::mem::take(&mut this.conflict_updated_futures);
2384 for tx in futures {
2385 tx.send(()).ok();
2386 }
2387 })
2388 }))
2389 }
2390
2391 rx
2392 }
2393
2394 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2395 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2396 }
2397
2398 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2399 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2400 }
2401
2402 fn handle_base_texts_updated(
2403 &mut self,
2404 buffer: text::BufferSnapshot,
2405 message: proto::UpdateDiffBases,
2406 cx: &mut Context<Self>,
2407 ) {
2408 use proto::update_diff_bases::Mode;
2409
2410 let Some(mode) = Mode::from_i32(message.mode) else {
2411 return;
2412 };
2413
2414 let diff_bases_change = match mode {
2415 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2416 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2417 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2418 Mode::IndexAndHead => DiffBasesChange::SetEach {
2419 index: message.staged_text,
2420 head: message.committed_text,
2421 },
2422 };
2423
2424 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2425 }
2426
2427 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2428 if *self.recalculating_tx.borrow() {
2429 let mut rx = self.recalculating_tx.subscribe();
2430 return Some(async move {
2431 loop {
2432 let is_recalculating = rx.recv().await;
2433 if is_recalculating != Some(true) {
2434 break;
2435 }
2436 }
2437 });
2438 } else {
2439 None
2440 }
2441 }
2442
2443 fn diff_bases_changed(
2444 &mut self,
2445 buffer: text::BufferSnapshot,
2446 diff_bases_change: Option<DiffBasesChange>,
2447 cx: &mut Context<Self>,
2448 ) {
2449 match diff_bases_change {
2450 Some(DiffBasesChange::SetIndex(index)) => {
2451 self.index_text = index.map(|mut index| {
2452 text::LineEnding::normalize(&mut index);
2453 Arc::new(index)
2454 });
2455 self.index_changed = true;
2456 }
2457 Some(DiffBasesChange::SetHead(head)) => {
2458 self.head_text = head.map(|mut head| {
2459 text::LineEnding::normalize(&mut head);
2460 Arc::new(head)
2461 });
2462 self.head_changed = true;
2463 }
2464 Some(DiffBasesChange::SetBoth(text)) => {
2465 let text = text.map(|mut text| {
2466 text::LineEnding::normalize(&mut text);
2467 Arc::new(text)
2468 });
2469 self.head_text = text.clone();
2470 self.index_text = text;
2471 self.head_changed = true;
2472 self.index_changed = true;
2473 }
2474 Some(DiffBasesChange::SetEach { index, head }) => {
2475 self.index_text = index.map(|mut index| {
2476 text::LineEnding::normalize(&mut index);
2477 Arc::new(index)
2478 });
2479 self.index_changed = true;
2480 self.head_text = head.map(|mut head| {
2481 text::LineEnding::normalize(&mut head);
2482 Arc::new(head)
2483 });
2484 self.head_changed = true;
2485 }
2486 None => {}
2487 }
2488
2489 self.recalculate_diffs(buffer, cx)
2490 }
2491
2492 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2493 *self.recalculating_tx.borrow_mut() = true;
2494
2495 let language = self.language.clone();
2496 let language_registry = self.language_registry.clone();
2497 let unstaged_diff = self.unstaged_diff();
2498 let uncommitted_diff = self.uncommitted_diff();
2499 let head = self.head_text.clone();
2500 let index = self.index_text.clone();
2501 let index_changed = self.index_changed;
2502 let head_changed = self.head_changed;
2503 let language_changed = self.language_changed;
2504 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2505 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2506 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2507 (None, None) => true,
2508 _ => false,
2509 };
2510 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2511 log::debug!(
2512 "start recalculating diffs for buffer {}",
2513 buffer.remote_id()
2514 );
2515
2516 let mut new_unstaged_diff = None;
2517 if let Some(unstaged_diff) = &unstaged_diff {
2518 new_unstaged_diff = Some(
2519 BufferDiff::update_diff(
2520 unstaged_diff.clone(),
2521 buffer.clone(),
2522 index,
2523 index_changed,
2524 language_changed,
2525 language.clone(),
2526 language_registry.clone(),
2527 cx,
2528 )
2529 .await?,
2530 );
2531 }
2532
2533 let mut new_uncommitted_diff = None;
2534 if let Some(uncommitted_diff) = &uncommitted_diff {
2535 new_uncommitted_diff = if index_matches_head {
2536 new_unstaged_diff.clone()
2537 } else {
2538 Some(
2539 BufferDiff::update_diff(
2540 uncommitted_diff.clone(),
2541 buffer.clone(),
2542 head,
2543 head_changed,
2544 language_changed,
2545 language.clone(),
2546 language_registry.clone(),
2547 cx,
2548 )
2549 .await?,
2550 )
2551 }
2552 }
2553
2554 let cancel = this.update(cx, |this, _| {
2555 // This checks whether all pending stage/unstage operations
2556 // have quiesced (i.e. both the corresponding write and the
2557 // read of that write have completed). If not, then we cancel
2558 // this recalculation attempt to avoid invalidating pending
2559 // state too quickly; another recalculation will come along
2560 // later and clear the pending state once the state of the index has settled.
2561 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2562 *this.recalculating_tx.borrow_mut() = false;
2563 true
2564 } else {
2565 false
2566 }
2567 })?;
2568 if cancel {
2569 log::debug!(
2570 concat!(
2571 "aborting recalculating diffs for buffer {}",
2572 "due to subsequent hunk operations",
2573 ),
2574 buffer.remote_id()
2575 );
2576 return Ok(());
2577 }
2578
2579 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2580 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2581 {
2582 unstaged_diff.update(cx, |diff, cx| {
2583 if language_changed {
2584 diff.language_changed(cx);
2585 }
2586 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2587 })?
2588 } else {
2589 None
2590 };
2591
2592 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2593 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2594 {
2595 uncommitted_diff.update(cx, |diff, cx| {
2596 if language_changed {
2597 diff.language_changed(cx);
2598 }
2599 diff.set_snapshot_with_secondary(
2600 new_uncommitted_diff,
2601 &buffer,
2602 unstaged_changed_range,
2603 true,
2604 cx,
2605 );
2606 })?;
2607 }
2608
2609 log::debug!(
2610 "finished recalculating diffs for buffer {}",
2611 buffer.remote_id()
2612 );
2613
2614 if let Some(this) = this.upgrade() {
2615 this.update(cx, |this, _| {
2616 this.index_changed = false;
2617 this.head_changed = false;
2618 this.language_changed = false;
2619 *this.recalculating_tx.borrow_mut() = false;
2620 })?;
2621 }
2622
2623 Ok(())
2624 }));
2625 }
2626}
2627
2628fn make_remote_delegate(
2629 this: Entity<GitStore>,
2630 project_id: u64,
2631 repository_id: RepositoryId,
2632 askpass_id: u64,
2633 cx: &mut AsyncApp,
2634) -> AskPassDelegate {
2635 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2636 this.update(cx, |this, cx| {
2637 let Some((client, _)) = this.downstream_client() else {
2638 return;
2639 };
2640 let response = client.request(proto::AskPassRequest {
2641 project_id,
2642 repository_id: repository_id.to_proto(),
2643 askpass_id,
2644 prompt,
2645 });
2646 cx.spawn(async move |_, _| {
2647 tx.send(response.await?.response).ok();
2648 anyhow::Ok(())
2649 })
2650 .detach_and_log_err(cx);
2651 })
2652 .log_err();
2653 })
2654}
2655
2656impl RepositoryId {
2657 pub fn to_proto(self) -> u64 {
2658 self.0
2659 }
2660
2661 pub fn from_proto(id: u64) -> Self {
2662 RepositoryId(id)
2663 }
2664}
2665
2666impl RepositorySnapshot {
2667 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2668 Self {
2669 id,
2670 statuses_by_path: Default::default(),
2671 work_directory_abs_path,
2672 branch: None,
2673 head_commit: None,
2674 scan_id: 0,
2675 merge: Default::default(),
2676 }
2677 }
2678
2679 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2680 proto::UpdateRepository {
2681 branch_summary: self.branch.as_ref().map(branch_to_proto),
2682 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2683 updated_statuses: self
2684 .statuses_by_path
2685 .iter()
2686 .map(|entry| entry.to_proto())
2687 .collect(),
2688 removed_statuses: Default::default(),
2689 current_merge_conflicts: self
2690 .merge
2691 .conflicted_paths
2692 .iter()
2693 .map(|repo_path| repo_path.to_proto())
2694 .collect(),
2695 project_id,
2696 id: self.id.to_proto(),
2697 abs_path: self.work_directory_abs_path.to_proto(),
2698 entry_ids: vec![self.id.to_proto()],
2699 scan_id: self.scan_id,
2700 is_last_update: true,
2701 }
2702 }
2703
2704 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2705 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2706 let mut removed_statuses: Vec<String> = Vec::new();
2707
2708 let mut new_statuses = self.statuses_by_path.iter().peekable();
2709 let mut old_statuses = old.statuses_by_path.iter().peekable();
2710
2711 let mut current_new_entry = new_statuses.next();
2712 let mut current_old_entry = old_statuses.next();
2713 loop {
2714 match (current_new_entry, current_old_entry) {
2715 (Some(new_entry), Some(old_entry)) => {
2716 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2717 Ordering::Less => {
2718 updated_statuses.push(new_entry.to_proto());
2719 current_new_entry = new_statuses.next();
2720 }
2721 Ordering::Equal => {
2722 if new_entry.status != old_entry.status {
2723 updated_statuses.push(new_entry.to_proto());
2724 }
2725 current_old_entry = old_statuses.next();
2726 current_new_entry = new_statuses.next();
2727 }
2728 Ordering::Greater => {
2729 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2730 current_old_entry = old_statuses.next();
2731 }
2732 }
2733 }
2734 (None, Some(old_entry)) => {
2735 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2736 current_old_entry = old_statuses.next();
2737 }
2738 (Some(new_entry), None) => {
2739 updated_statuses.push(new_entry.to_proto());
2740 current_new_entry = new_statuses.next();
2741 }
2742 (None, None) => break,
2743 }
2744 }
2745
2746 proto::UpdateRepository {
2747 branch_summary: self.branch.as_ref().map(branch_to_proto),
2748 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2749 updated_statuses,
2750 removed_statuses,
2751 current_merge_conflicts: self
2752 .merge
2753 .conflicted_paths
2754 .iter()
2755 .map(|path| path.as_ref().to_proto())
2756 .collect(),
2757 project_id,
2758 id: self.id.to_proto(),
2759 abs_path: self.work_directory_abs_path.to_proto(),
2760 entry_ids: vec![],
2761 scan_id: self.scan_id,
2762 is_last_update: true,
2763 }
2764 }
2765
2766 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2767 self.statuses_by_path.iter().cloned()
2768 }
2769
2770 pub fn status_summary(&self) -> GitSummary {
2771 self.statuses_by_path.summary().item_summary
2772 }
2773
2774 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2775 self.statuses_by_path
2776 .get(&PathKey(path.0.clone()), &())
2777 .cloned()
2778 }
2779
2780 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2781 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2782 }
2783
2784 #[inline]
2785 fn abs_path_to_repo_path_inner(
2786 work_directory_abs_path: &Path,
2787 abs_path: &Path,
2788 ) -> Option<RepoPath> {
2789 abs_path
2790 .strip_prefix(&work_directory_abs_path)
2791 .map(RepoPath::from)
2792 .ok()
2793 }
2794
2795 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2796 self.merge.conflicted_paths.contains(&repo_path)
2797 }
2798
2799 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2800 let had_conflict_on_last_merge_head_change =
2801 self.merge.conflicted_paths.contains(&repo_path);
2802 let has_conflict_currently = self
2803 .status_for_path(&repo_path)
2804 .map_or(false, |entry| entry.status.is_conflicted());
2805 had_conflict_on_last_merge_head_change || has_conflict_currently
2806 }
2807
2808 /// This is the name that will be displayed in the repository selector for this repository.
2809 pub fn display_name(&self) -> SharedString {
2810 self.work_directory_abs_path
2811 .file_name()
2812 .unwrap_or_default()
2813 .to_string_lossy()
2814 .to_string()
2815 .into()
2816 }
2817}
2818
2819impl MergeDetails {
2820 async fn load(
2821 backend: &Arc<dyn GitRepository>,
2822 status: &SumTree<StatusEntry>,
2823 prev_snapshot: &RepositorySnapshot,
2824 ) -> Result<(MergeDetails, bool)> {
2825 log::debug!("load merge details");
2826 let message = backend.merge_message().await;
2827 let heads = backend
2828 .revparse_batch(vec![
2829 "MERGE_HEAD".into(),
2830 "CHERRY_PICK_HEAD".into(),
2831 "REBASE_HEAD".into(),
2832 "REVERT_HEAD".into(),
2833 "APPLY_HEAD".into(),
2834 ])
2835 .await
2836 .log_err()
2837 .unwrap_or_default()
2838 .into_iter()
2839 .map(|opt| opt.map(SharedString::from))
2840 .collect::<Vec<_>>();
2841 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2842 let conflicted_paths = if merge_heads_changed {
2843 let current_conflicted_paths = TreeSet::from_ordered_entries(
2844 status
2845 .iter()
2846 .filter(|entry| entry.status.is_conflicted())
2847 .map(|entry| entry.repo_path.clone()),
2848 );
2849
2850 // It can happen that we run a scan while a lengthy merge is in progress
2851 // that will eventually result in conflicts, but before those conflicts
2852 // are reported by `git status`. Since for the moment we only care about
2853 // the merge heads state for the purposes of tracking conflicts, don't update
2854 // this state until we see some conflicts.
2855 if heads.iter().any(Option::is_some)
2856 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2857 && current_conflicted_paths.is_empty()
2858 {
2859 log::debug!("not updating merge heads because no conflicts found");
2860 return Ok((
2861 MergeDetails {
2862 message: message.map(SharedString::from),
2863 ..prev_snapshot.merge.clone()
2864 },
2865 false,
2866 ));
2867 }
2868
2869 current_conflicted_paths
2870 } else {
2871 prev_snapshot.merge.conflicted_paths.clone()
2872 };
2873 let details = MergeDetails {
2874 conflicted_paths,
2875 message: message.map(SharedString::from),
2876 heads,
2877 };
2878 Ok((details, merge_heads_changed))
2879 }
2880}
2881
2882impl Repository {
2883 pub fn snapshot(&self) -> RepositorySnapshot {
2884 self.snapshot.clone()
2885 }
2886
2887 fn local(
2888 id: RepositoryId,
2889 work_directory_abs_path: Arc<Path>,
2890 dot_git_abs_path: Arc<Path>,
2891 repository_dir_abs_path: Arc<Path>,
2892 common_dir_abs_path: Arc<Path>,
2893 project_environment: WeakEntity<ProjectEnvironment>,
2894 fs: Arc<dyn Fs>,
2895 git_store: WeakEntity<GitStore>,
2896 cx: &mut Context<Self>,
2897 ) -> Self {
2898 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2899 Repository {
2900 this: cx.weak_entity(),
2901 git_store,
2902 snapshot,
2903 commit_message_buffer: None,
2904 askpass_delegates: Default::default(),
2905 paths_needing_status_update: Default::default(),
2906 latest_askpass_id: 0,
2907 job_sender: Repository::spawn_local_git_worker(
2908 work_directory_abs_path,
2909 dot_git_abs_path,
2910 repository_dir_abs_path,
2911 common_dir_abs_path,
2912 project_environment,
2913 fs,
2914 cx,
2915 ),
2916 job_id: 0,
2917 active_jobs: Default::default(),
2918 }
2919 }
2920
2921 fn remote(
2922 id: RepositoryId,
2923 work_directory_abs_path: Arc<Path>,
2924 project_id: ProjectId,
2925 client: AnyProtoClient,
2926 git_store: WeakEntity<GitStore>,
2927 cx: &mut Context<Self>,
2928 ) -> Self {
2929 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2930 Self {
2931 this: cx.weak_entity(),
2932 snapshot,
2933 commit_message_buffer: None,
2934 git_store,
2935 paths_needing_status_update: Default::default(),
2936 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2937 askpass_delegates: Default::default(),
2938 latest_askpass_id: 0,
2939 active_jobs: Default::default(),
2940 job_id: 0,
2941 }
2942 }
2943
2944 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2945 self.git_store.upgrade()
2946 }
2947
2948 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
2949 let this = cx.weak_entity();
2950 let git_store = self.git_store.clone();
2951 let _ = self.send_keyed_job(
2952 Some(GitJobKey::ReloadBufferDiffBases),
2953 None,
2954 |state, mut cx| async move {
2955 let RepositoryState::Local { backend, .. } = state else {
2956 log::error!("tried to recompute diffs for a non-local repository");
2957 return Ok(());
2958 };
2959
2960 let Some(this) = this.upgrade() else {
2961 return Ok(());
2962 };
2963
2964 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
2965 git_store.update(cx, |git_store, cx| {
2966 git_store
2967 .diffs
2968 .iter()
2969 .filter_map(|(buffer_id, diff_state)| {
2970 let buffer_store = git_store.buffer_store.read(cx);
2971 let buffer = buffer_store.get(*buffer_id)?;
2972 let file = File::from_dyn(buffer.read(cx).file())?;
2973 let abs_path =
2974 file.worktree.read(cx).absolutize(&file.path).ok()?;
2975 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
2976 log::debug!(
2977 "start reload diff bases for repo path {}",
2978 repo_path.0.display()
2979 );
2980 diff_state.update(cx, |diff_state, _| {
2981 let has_unstaged_diff = diff_state
2982 .unstaged_diff
2983 .as_ref()
2984 .is_some_and(|diff| diff.is_upgradable());
2985 let has_uncommitted_diff = diff_state
2986 .uncommitted_diff
2987 .as_ref()
2988 .is_some_and(|set| set.is_upgradable());
2989
2990 Some((
2991 buffer,
2992 repo_path,
2993 has_unstaged_diff.then(|| diff_state.index_text.clone()),
2994 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
2995 ))
2996 })
2997 })
2998 .collect::<Vec<_>>()
2999 })
3000 })??;
3001
3002 let buffer_diff_base_changes = cx
3003 .background_spawn(async move {
3004 let mut changes = Vec::new();
3005 for (buffer, repo_path, current_index_text, current_head_text) in
3006 &repo_diff_state_updates
3007 {
3008 let index_text = if current_index_text.is_some() {
3009 backend.load_index_text(repo_path.clone()).await
3010 } else {
3011 None
3012 };
3013 let head_text = if current_head_text.is_some() {
3014 backend.load_committed_text(repo_path.clone()).await
3015 } else {
3016 None
3017 };
3018
3019 let change =
3020 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3021 (Some(current_index), Some(current_head)) => {
3022 let index_changed =
3023 index_text.as_ref() != current_index.as_deref();
3024 let head_changed =
3025 head_text.as_ref() != current_head.as_deref();
3026 if index_changed && head_changed {
3027 if index_text == head_text {
3028 Some(DiffBasesChange::SetBoth(head_text))
3029 } else {
3030 Some(DiffBasesChange::SetEach {
3031 index: index_text,
3032 head: head_text,
3033 })
3034 }
3035 } else if index_changed {
3036 Some(DiffBasesChange::SetIndex(index_text))
3037 } else if head_changed {
3038 Some(DiffBasesChange::SetHead(head_text))
3039 } else {
3040 None
3041 }
3042 }
3043 (Some(current_index), None) => {
3044 let index_changed =
3045 index_text.as_ref() != current_index.as_deref();
3046 index_changed
3047 .then_some(DiffBasesChange::SetIndex(index_text))
3048 }
3049 (None, Some(current_head)) => {
3050 let head_changed =
3051 head_text.as_ref() != current_head.as_deref();
3052 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3053 }
3054 (None, None) => None,
3055 };
3056
3057 changes.push((buffer.clone(), change))
3058 }
3059 changes
3060 })
3061 .await;
3062
3063 git_store.update(&mut cx, |git_store, cx| {
3064 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3065 let buffer_snapshot = buffer.read(cx).text_snapshot();
3066 let buffer_id = buffer_snapshot.remote_id();
3067 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3068 continue;
3069 };
3070
3071 let downstream_client = git_store.downstream_client();
3072 diff_state.update(cx, |diff_state, cx| {
3073 use proto::update_diff_bases::Mode;
3074
3075 if let Some((diff_bases_change, (client, project_id))) =
3076 diff_bases_change.clone().zip(downstream_client)
3077 {
3078 let (staged_text, committed_text, mode) = match diff_bases_change {
3079 DiffBasesChange::SetIndex(index) => {
3080 (index, None, Mode::IndexOnly)
3081 }
3082 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3083 DiffBasesChange::SetEach { index, head } => {
3084 (index, head, Mode::IndexAndHead)
3085 }
3086 DiffBasesChange::SetBoth(text) => {
3087 (None, text, Mode::IndexMatchesHead)
3088 }
3089 };
3090 client
3091 .send(proto::UpdateDiffBases {
3092 project_id: project_id.to_proto(),
3093 buffer_id: buffer_id.to_proto(),
3094 staged_text,
3095 committed_text,
3096 mode: mode as i32,
3097 })
3098 .log_err();
3099 }
3100
3101 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3102 });
3103 }
3104 })
3105 },
3106 );
3107 }
3108
3109 pub fn send_job<F, Fut, R>(
3110 &mut self,
3111 status: Option<SharedString>,
3112 job: F,
3113 ) -> oneshot::Receiver<R>
3114 where
3115 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3116 Fut: Future<Output = R> + 'static,
3117 R: Send + 'static,
3118 {
3119 self.send_keyed_job(None, status, job)
3120 }
3121
3122 fn send_keyed_job<F, Fut, R>(
3123 &mut self,
3124 key: Option<GitJobKey>,
3125 status: Option<SharedString>,
3126 job: F,
3127 ) -> oneshot::Receiver<R>
3128 where
3129 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3130 Fut: Future<Output = R> + 'static,
3131 R: Send + 'static,
3132 {
3133 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3134 let job_id = post_inc(&mut self.job_id);
3135 let this = self.this.clone();
3136 self.job_sender
3137 .unbounded_send(GitJob {
3138 key,
3139 job: Box::new(move |state, cx: &mut AsyncApp| {
3140 let job = job(state, cx.clone());
3141 cx.spawn(async move |cx| {
3142 if let Some(s) = status.clone() {
3143 this.update(cx, |this, cx| {
3144 this.active_jobs.insert(
3145 job_id,
3146 JobInfo {
3147 start: Instant::now(),
3148 message: s.clone(),
3149 },
3150 );
3151
3152 cx.notify();
3153 })
3154 .ok();
3155 }
3156 let result = job.await;
3157
3158 this.update(cx, |this, cx| {
3159 this.active_jobs.remove(&job_id);
3160 cx.notify();
3161 })
3162 .ok();
3163
3164 result_tx.send(result).ok();
3165 })
3166 }),
3167 })
3168 .ok();
3169 result_rx
3170 }
3171
3172 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3173 let Some(git_store) = self.git_store.upgrade() else {
3174 return;
3175 };
3176 let entity = cx.entity();
3177 git_store.update(cx, |git_store, cx| {
3178 let Some((&id, _)) = git_store
3179 .repositories
3180 .iter()
3181 .find(|(_, handle)| *handle == &entity)
3182 else {
3183 return;
3184 };
3185 git_store.active_repo_id = Some(id);
3186 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3187 });
3188 }
3189
3190 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3191 self.snapshot.status()
3192 }
3193
3194 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3195 let git_store = self.git_store.upgrade()?;
3196 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3197 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3198 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3199 Some(ProjectPath {
3200 worktree_id: worktree.read(cx).id(),
3201 path: relative_path.into(),
3202 })
3203 }
3204
3205 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3206 let git_store = self.git_store.upgrade()?;
3207 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3208 let abs_path = worktree_store.absolutize(path, cx)?;
3209 self.snapshot.abs_path_to_repo_path(&abs_path)
3210 }
3211
3212 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3213 other
3214 .read(cx)
3215 .snapshot
3216 .work_directory_abs_path
3217 .starts_with(&self.snapshot.work_directory_abs_path)
3218 }
3219
3220 pub fn open_commit_buffer(
3221 &mut self,
3222 languages: Option<Arc<LanguageRegistry>>,
3223 buffer_store: Entity<BufferStore>,
3224 cx: &mut Context<Self>,
3225 ) -> Task<Result<Entity<Buffer>>> {
3226 let id = self.id;
3227 if let Some(buffer) = self.commit_message_buffer.clone() {
3228 return Task::ready(Ok(buffer));
3229 }
3230 let this = cx.weak_entity();
3231
3232 let rx = self.send_job(None, move |state, mut cx| async move {
3233 let Some(this) = this.upgrade() else {
3234 bail!("git store was dropped");
3235 };
3236 match state {
3237 RepositoryState::Local { .. } => {
3238 this.update(&mut cx, |_, cx| {
3239 Self::open_local_commit_buffer(languages, buffer_store, cx)
3240 })?
3241 .await
3242 }
3243 RepositoryState::Remote { project_id, client } => {
3244 let request = client.request(proto::OpenCommitMessageBuffer {
3245 project_id: project_id.0,
3246 repository_id: id.to_proto(),
3247 });
3248 let response = request.await.context("requesting to open commit buffer")?;
3249 let buffer_id = BufferId::new(response.buffer_id)?;
3250 let buffer = buffer_store
3251 .update(&mut cx, |buffer_store, cx| {
3252 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3253 })?
3254 .await?;
3255 if let Some(language_registry) = languages {
3256 let git_commit_language =
3257 language_registry.language_for_name("Git Commit").await?;
3258 buffer.update(&mut cx, |buffer, cx| {
3259 buffer.set_language(Some(git_commit_language), cx);
3260 })?;
3261 }
3262 this.update(&mut cx, |this, _| {
3263 this.commit_message_buffer = Some(buffer.clone());
3264 })?;
3265 Ok(buffer)
3266 }
3267 }
3268 });
3269
3270 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3271 }
3272
3273 fn open_local_commit_buffer(
3274 language_registry: Option<Arc<LanguageRegistry>>,
3275 buffer_store: Entity<BufferStore>,
3276 cx: &mut Context<Self>,
3277 ) -> Task<Result<Entity<Buffer>>> {
3278 cx.spawn(async move |repository, cx| {
3279 let buffer = buffer_store
3280 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
3281 .await?;
3282
3283 if let Some(language_registry) = language_registry {
3284 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3285 buffer.update(cx, |buffer, cx| {
3286 buffer.set_language(Some(git_commit_language), cx);
3287 })?;
3288 }
3289
3290 repository.update(cx, |repository, _| {
3291 repository.commit_message_buffer = Some(buffer.clone());
3292 })?;
3293 Ok(buffer)
3294 })
3295 }
3296
3297 pub fn checkout_files(
3298 &mut self,
3299 commit: &str,
3300 paths: Vec<RepoPath>,
3301 _cx: &mut App,
3302 ) -> oneshot::Receiver<Result<()>> {
3303 let commit = commit.to_string();
3304 let id = self.id;
3305
3306 self.send_job(
3307 Some(format!("git checkout {}", commit).into()),
3308 move |git_repo, _| async move {
3309 match git_repo {
3310 RepositoryState::Local {
3311 backend,
3312 environment,
3313 ..
3314 } => {
3315 backend
3316 .checkout_files(commit, paths, environment.clone())
3317 .await
3318 }
3319 RepositoryState::Remote { project_id, client } => {
3320 client
3321 .request(proto::GitCheckoutFiles {
3322 project_id: project_id.0,
3323 repository_id: id.to_proto(),
3324 commit,
3325 paths: paths
3326 .into_iter()
3327 .map(|p| p.to_string_lossy().to_string())
3328 .collect(),
3329 })
3330 .await?;
3331
3332 Ok(())
3333 }
3334 }
3335 },
3336 )
3337 }
3338
3339 pub fn reset(
3340 &mut self,
3341 commit: String,
3342 reset_mode: ResetMode,
3343 _cx: &mut App,
3344 ) -> oneshot::Receiver<Result<()>> {
3345 let commit = commit.to_string();
3346 let id = self.id;
3347
3348 self.send_job(None, move |git_repo, _| async move {
3349 match git_repo {
3350 RepositoryState::Local {
3351 backend,
3352 environment,
3353 ..
3354 } => backend.reset(commit, reset_mode, environment).await,
3355 RepositoryState::Remote { project_id, client } => {
3356 client
3357 .request(proto::GitReset {
3358 project_id: project_id.0,
3359 repository_id: id.to_proto(),
3360 commit,
3361 mode: match reset_mode {
3362 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3363 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3364 },
3365 })
3366 .await?;
3367
3368 Ok(())
3369 }
3370 }
3371 })
3372 }
3373
3374 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3375 let id = self.id;
3376 self.send_job(None, move |git_repo, _cx| async move {
3377 match git_repo {
3378 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3379 RepositoryState::Remote { project_id, client } => {
3380 let resp = client
3381 .request(proto::GitShow {
3382 project_id: project_id.0,
3383 repository_id: id.to_proto(),
3384 commit,
3385 })
3386 .await?;
3387
3388 Ok(CommitDetails {
3389 sha: resp.sha.into(),
3390 message: resp.message.into(),
3391 commit_timestamp: resp.commit_timestamp,
3392 author_email: resp.author_email.into(),
3393 author_name: resp.author_name.into(),
3394 })
3395 }
3396 }
3397 })
3398 }
3399
3400 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3401 let id = self.id;
3402 self.send_job(None, move |git_repo, cx| async move {
3403 match git_repo {
3404 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3405 RepositoryState::Remote {
3406 client, project_id, ..
3407 } => {
3408 let response = client
3409 .request(proto::LoadCommitDiff {
3410 project_id: project_id.0,
3411 repository_id: id.to_proto(),
3412 commit,
3413 })
3414 .await?;
3415 Ok(CommitDiff {
3416 files: response
3417 .files
3418 .into_iter()
3419 .map(|file| CommitFile {
3420 path: Path::new(&file.path).into(),
3421 old_text: file.old_text,
3422 new_text: file.new_text,
3423 })
3424 .collect(),
3425 })
3426 }
3427 }
3428 })
3429 }
3430
3431 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3432 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3433 }
3434
3435 pub fn stage_entries(
3436 &self,
3437 entries: Vec<RepoPath>,
3438 cx: &mut Context<Self>,
3439 ) -> Task<anyhow::Result<()>> {
3440 if entries.is_empty() {
3441 return Task::ready(Ok(()));
3442 }
3443 let id = self.id;
3444
3445 let mut save_futures = Vec::new();
3446 if let Some(buffer_store) = self.buffer_store(cx) {
3447 buffer_store.update(cx, |buffer_store, cx| {
3448 for path in &entries {
3449 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3450 continue;
3451 };
3452 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3453 if buffer
3454 .read(cx)
3455 .file()
3456 .map_or(false, |file| file.disk_state().exists())
3457 {
3458 save_futures.push(buffer_store.save_buffer(buffer, cx));
3459 }
3460 }
3461 }
3462 })
3463 }
3464
3465 cx.spawn(async move |this, cx| {
3466 for save_future in save_futures {
3467 save_future.await?;
3468 }
3469
3470 this.update(cx, |this, _| {
3471 this.send_job(None, move |git_repo, _cx| async move {
3472 match git_repo {
3473 RepositoryState::Local {
3474 backend,
3475 environment,
3476 ..
3477 } => backend.stage_paths(entries, environment.clone()).await,
3478 RepositoryState::Remote { project_id, client } => {
3479 client
3480 .request(proto::Stage {
3481 project_id: project_id.0,
3482 repository_id: id.to_proto(),
3483 paths: entries
3484 .into_iter()
3485 .map(|repo_path| repo_path.as_ref().to_proto())
3486 .collect(),
3487 })
3488 .await
3489 .context("sending stage request")?;
3490
3491 Ok(())
3492 }
3493 }
3494 })
3495 })?
3496 .await??;
3497
3498 Ok(())
3499 })
3500 }
3501
3502 pub fn unstage_entries(
3503 &self,
3504 entries: Vec<RepoPath>,
3505 cx: &mut Context<Self>,
3506 ) -> Task<anyhow::Result<()>> {
3507 if entries.is_empty() {
3508 return Task::ready(Ok(()));
3509 }
3510 let id = self.id;
3511
3512 let mut save_futures = Vec::new();
3513 if let Some(buffer_store) = self.buffer_store(cx) {
3514 buffer_store.update(cx, |buffer_store, cx| {
3515 for path in &entries {
3516 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3517 continue;
3518 };
3519 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3520 if buffer
3521 .read(cx)
3522 .file()
3523 .map_or(false, |file| file.disk_state().exists())
3524 {
3525 save_futures.push(buffer_store.save_buffer(buffer, cx));
3526 }
3527 }
3528 }
3529 })
3530 }
3531
3532 cx.spawn(async move |this, cx| {
3533 for save_future in save_futures {
3534 save_future.await?;
3535 }
3536
3537 this.update(cx, |this, _| {
3538 this.send_job(None, move |git_repo, _cx| async move {
3539 match git_repo {
3540 RepositoryState::Local {
3541 backend,
3542 environment,
3543 ..
3544 } => backend.unstage_paths(entries, environment).await,
3545 RepositoryState::Remote { project_id, client } => {
3546 client
3547 .request(proto::Unstage {
3548 project_id: project_id.0,
3549 repository_id: id.to_proto(),
3550 paths: entries
3551 .into_iter()
3552 .map(|repo_path| repo_path.as_ref().to_proto())
3553 .collect(),
3554 })
3555 .await
3556 .context("sending unstage request")?;
3557
3558 Ok(())
3559 }
3560 }
3561 })
3562 })?
3563 .await??;
3564
3565 Ok(())
3566 })
3567 }
3568
3569 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3570 let to_stage = self
3571 .cached_status()
3572 .filter(|entry| !entry.status.staging().is_fully_staged())
3573 .map(|entry| entry.repo_path.clone())
3574 .collect();
3575 self.stage_entries(to_stage, cx)
3576 }
3577
3578 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3579 let to_unstage = self
3580 .cached_status()
3581 .filter(|entry| entry.status.staging().has_staged())
3582 .map(|entry| entry.repo_path.clone())
3583 .collect();
3584 self.unstage_entries(to_unstage, cx)
3585 }
3586
3587 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3588 let to_stash = self
3589 .cached_status()
3590 .map(|entry| entry.repo_path.clone())
3591 .collect();
3592
3593 self.stash_entries(to_stash, cx)
3594 }
3595
3596 pub fn stash_entries(
3597 &mut self,
3598 entries: Vec<RepoPath>,
3599 cx: &mut Context<Self>,
3600 ) -> Task<anyhow::Result<()>> {
3601 let id = self.id;
3602
3603 cx.spawn(async move |this, cx| {
3604 this.update(cx, |this, _| {
3605 this.send_job(None, move |git_repo, _cx| async move {
3606 match git_repo {
3607 RepositoryState::Local {
3608 backend,
3609 environment,
3610 ..
3611 } => backend.stash_paths(entries, environment).await,
3612 RepositoryState::Remote { project_id, client } => {
3613 client
3614 .request(proto::Stash {
3615 project_id: project_id.0,
3616 repository_id: id.to_proto(),
3617 paths: entries
3618 .into_iter()
3619 .map(|repo_path| repo_path.as_ref().to_proto())
3620 .collect(),
3621 })
3622 .await
3623 .context("sending stash request")?;
3624 Ok(())
3625 }
3626 }
3627 })
3628 })?
3629 .await??;
3630 Ok(())
3631 })
3632 }
3633
3634 pub fn stash_pop(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3635 let id = self.id;
3636 cx.spawn(async move |this, cx| {
3637 this.update(cx, |this, _| {
3638 this.send_job(None, move |git_repo, _cx| async move {
3639 match git_repo {
3640 RepositoryState::Local {
3641 backend,
3642 environment,
3643 ..
3644 } => backend.stash_pop(environment).await,
3645 RepositoryState::Remote { project_id, client } => {
3646 client
3647 .request(proto::StashPop {
3648 project_id: project_id.0,
3649 repository_id: id.to_proto(),
3650 })
3651 .await
3652 .context("sending stash pop request")?;
3653 Ok(())
3654 }
3655 }
3656 })
3657 })?
3658 .await??;
3659 Ok(())
3660 })
3661 }
3662
3663 pub fn commit(
3664 &mut self,
3665 message: SharedString,
3666 name_and_email: Option<(SharedString, SharedString)>,
3667 options: CommitOptions,
3668 _cx: &mut App,
3669 ) -> oneshot::Receiver<Result<()>> {
3670 let id = self.id;
3671
3672 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3673 match git_repo {
3674 RepositoryState::Local {
3675 backend,
3676 environment,
3677 ..
3678 } => {
3679 backend
3680 .commit(message, name_and_email, options, environment)
3681 .await
3682 }
3683 RepositoryState::Remote { project_id, client } => {
3684 let (name, email) = name_and_email.unzip();
3685 client
3686 .request(proto::Commit {
3687 project_id: project_id.0,
3688 repository_id: id.to_proto(),
3689 message: String::from(message),
3690 name: name.map(String::from),
3691 email: email.map(String::from),
3692 options: Some(proto::commit::CommitOptions {
3693 amend: options.amend,
3694 signoff: options.signoff,
3695 }),
3696 })
3697 .await
3698 .context("sending commit request")?;
3699
3700 Ok(())
3701 }
3702 }
3703 })
3704 }
3705
3706 pub fn fetch(
3707 &mut self,
3708 fetch_options: FetchOptions,
3709 askpass: AskPassDelegate,
3710 _cx: &mut App,
3711 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3712 let askpass_delegates = self.askpass_delegates.clone();
3713 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3714 let id = self.id;
3715
3716 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3717 match git_repo {
3718 RepositoryState::Local {
3719 backend,
3720 environment,
3721 ..
3722 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3723 RepositoryState::Remote { project_id, client } => {
3724 askpass_delegates.lock().insert(askpass_id, askpass);
3725 let _defer = util::defer(|| {
3726 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3727 debug_assert!(askpass_delegate.is_some());
3728 });
3729
3730 let response = client
3731 .request(proto::Fetch {
3732 project_id: project_id.0,
3733 repository_id: id.to_proto(),
3734 askpass_id,
3735 remote: fetch_options.to_proto(),
3736 })
3737 .await
3738 .context("sending fetch request")?;
3739
3740 Ok(RemoteCommandOutput {
3741 stdout: response.stdout,
3742 stderr: response.stderr,
3743 })
3744 }
3745 }
3746 })
3747 }
3748
3749 pub fn push(
3750 &mut self,
3751 branch: SharedString,
3752 remote: SharedString,
3753 options: Option<PushOptions>,
3754 askpass: AskPassDelegate,
3755 cx: &mut Context<Self>,
3756 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3757 let askpass_delegates = self.askpass_delegates.clone();
3758 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3759 let id = self.id;
3760
3761 let args = options
3762 .map(|option| match option {
3763 PushOptions::SetUpstream => " --set-upstream",
3764 PushOptions::Force => " --force-with-lease",
3765 })
3766 .unwrap_or("");
3767
3768 let updates_tx = self
3769 .git_store()
3770 .and_then(|git_store| match &git_store.read(cx).state {
3771 GitStoreState::Local { downstream, .. } => downstream
3772 .as_ref()
3773 .map(|downstream| downstream.updates_tx.clone()),
3774 _ => None,
3775 });
3776
3777 let this = cx.weak_entity();
3778 self.send_job(
3779 Some(format!("git push {} {} {}", args, branch, remote).into()),
3780 move |git_repo, mut cx| async move {
3781 match git_repo {
3782 RepositoryState::Local {
3783 backend,
3784 environment,
3785 ..
3786 } => {
3787 let result = backend
3788 .push(
3789 branch.to_string(),
3790 remote.to_string(),
3791 options,
3792 askpass,
3793 environment.clone(),
3794 cx.clone(),
3795 )
3796 .await;
3797 if result.is_ok() {
3798 let branches = backend.branches().await?;
3799 let branch = branches.into_iter().find(|branch| branch.is_head);
3800 log::info!("head branch after scan is {branch:?}");
3801 let snapshot = this.update(&mut cx, |this, cx| {
3802 this.snapshot.branch = branch;
3803 let snapshot = this.snapshot.clone();
3804 cx.emit(RepositoryEvent::Updated {
3805 full_scan: false,
3806 new_instance: false,
3807 });
3808 snapshot
3809 })?;
3810 if let Some(updates_tx) = updates_tx {
3811 updates_tx
3812 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3813 .ok();
3814 }
3815 }
3816 result
3817 }
3818 RepositoryState::Remote { project_id, client } => {
3819 askpass_delegates.lock().insert(askpass_id, askpass);
3820 let _defer = util::defer(|| {
3821 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3822 debug_assert!(askpass_delegate.is_some());
3823 });
3824 let response = client
3825 .request(proto::Push {
3826 project_id: project_id.0,
3827 repository_id: id.to_proto(),
3828 askpass_id,
3829 branch_name: branch.to_string(),
3830 remote_name: remote.to_string(),
3831 options: options.map(|options| match options {
3832 PushOptions::Force => proto::push::PushOptions::Force,
3833 PushOptions::SetUpstream => {
3834 proto::push::PushOptions::SetUpstream
3835 }
3836 }
3837 as i32),
3838 })
3839 .await
3840 .context("sending push request")?;
3841
3842 Ok(RemoteCommandOutput {
3843 stdout: response.stdout,
3844 stderr: response.stderr,
3845 })
3846 }
3847 }
3848 },
3849 )
3850 }
3851
3852 pub fn pull(
3853 &mut self,
3854 branch: SharedString,
3855 remote: SharedString,
3856 askpass: AskPassDelegate,
3857 _cx: &mut App,
3858 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3859 let askpass_delegates = self.askpass_delegates.clone();
3860 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3861 let id = self.id;
3862
3863 self.send_job(
3864 Some(format!("git pull {} {}", remote, branch).into()),
3865 move |git_repo, cx| async move {
3866 match git_repo {
3867 RepositoryState::Local {
3868 backend,
3869 environment,
3870 ..
3871 } => {
3872 backend
3873 .pull(
3874 branch.to_string(),
3875 remote.to_string(),
3876 askpass,
3877 environment.clone(),
3878 cx,
3879 )
3880 .await
3881 }
3882 RepositoryState::Remote { project_id, client } => {
3883 askpass_delegates.lock().insert(askpass_id, askpass);
3884 let _defer = util::defer(|| {
3885 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3886 debug_assert!(askpass_delegate.is_some());
3887 });
3888 let response = client
3889 .request(proto::Pull {
3890 project_id: project_id.0,
3891 repository_id: id.to_proto(),
3892 askpass_id,
3893 branch_name: branch.to_string(),
3894 remote_name: remote.to_string(),
3895 })
3896 .await
3897 .context("sending pull request")?;
3898
3899 Ok(RemoteCommandOutput {
3900 stdout: response.stdout,
3901 stderr: response.stderr,
3902 })
3903 }
3904 }
3905 },
3906 )
3907 }
3908
3909 fn spawn_set_index_text_job(
3910 &mut self,
3911 path: RepoPath,
3912 content: Option<String>,
3913 hunk_staging_operation_count: Option<usize>,
3914 cx: &mut Context<Self>,
3915 ) -> oneshot::Receiver<anyhow::Result<()>> {
3916 let id = self.id;
3917 let this = cx.weak_entity();
3918 let git_store = self.git_store.clone();
3919 self.send_keyed_job(
3920 Some(GitJobKey::WriteIndex(path.clone())),
3921 None,
3922 move |git_repo, mut cx| async move {
3923 log::debug!("start updating index text for buffer {}", path.display());
3924 match git_repo {
3925 RepositoryState::Local {
3926 backend,
3927 environment,
3928 ..
3929 } => {
3930 backend
3931 .set_index_text(path.clone(), content, environment.clone())
3932 .await?;
3933 }
3934 RepositoryState::Remote { project_id, client } => {
3935 client
3936 .request(proto::SetIndexText {
3937 project_id: project_id.0,
3938 repository_id: id.to_proto(),
3939 path: path.as_ref().to_proto(),
3940 text: content,
3941 })
3942 .await?;
3943 }
3944 }
3945 log::debug!("finish updating index text for buffer {}", path.display());
3946
3947 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
3948 let project_path = this
3949 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
3950 .ok()
3951 .flatten();
3952 git_store.update(&mut cx, |git_store, cx| {
3953 let buffer_id = git_store
3954 .buffer_store
3955 .read(cx)
3956 .get_by_path(&project_path?)?
3957 .read(cx)
3958 .remote_id();
3959 let diff_state = git_store.diffs.get(&buffer_id)?;
3960 diff_state.update(cx, |diff_state, _| {
3961 diff_state.hunk_staging_operation_count_as_of_write =
3962 hunk_staging_operation_count;
3963 });
3964 Some(())
3965 })?;
3966 }
3967 Ok(())
3968 },
3969 )
3970 }
3971
3972 pub fn get_remotes(
3973 &mut self,
3974 branch_name: Option<String>,
3975 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3976 let id = self.id;
3977 self.send_job(None, move |repo, _cx| async move {
3978 match repo {
3979 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3980 RepositoryState::Remote { project_id, client } => {
3981 let response = client
3982 .request(proto::GetRemotes {
3983 project_id: project_id.0,
3984 repository_id: id.to_proto(),
3985 branch_name,
3986 })
3987 .await?;
3988
3989 let remotes = response
3990 .remotes
3991 .into_iter()
3992 .map(|remotes| git::repository::Remote {
3993 name: remotes.name.into(),
3994 })
3995 .collect();
3996
3997 Ok(remotes)
3998 }
3999 }
4000 })
4001 }
4002
4003 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4004 let id = self.id;
4005 self.send_job(None, move |repo, _| async move {
4006 match repo {
4007 RepositoryState::Local { backend, .. } => backend.branches().await,
4008 RepositoryState::Remote { project_id, client } => {
4009 let response = client
4010 .request(proto::GitGetBranches {
4011 project_id: project_id.0,
4012 repository_id: id.to_proto(),
4013 })
4014 .await?;
4015
4016 let branches = response
4017 .branches
4018 .into_iter()
4019 .map(|branch| proto_to_branch(&branch))
4020 .collect();
4021
4022 Ok(branches)
4023 }
4024 }
4025 })
4026 }
4027
4028 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4029 let id = self.id;
4030 self.send_job(None, move |repo, _| async move {
4031 match repo {
4032 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4033 RepositoryState::Remote { project_id, client } => {
4034 let response = client
4035 .request(proto::GetDefaultBranch {
4036 project_id: project_id.0,
4037 repository_id: id.to_proto(),
4038 })
4039 .await?;
4040
4041 anyhow::Ok(response.branch.map(SharedString::from))
4042 }
4043 }
4044 })
4045 }
4046
4047 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4048 let id = self.id;
4049 self.send_job(None, move |repo, _cx| async move {
4050 match repo {
4051 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4052 RepositoryState::Remote { project_id, client } => {
4053 let response = client
4054 .request(proto::GitDiff {
4055 project_id: project_id.0,
4056 repository_id: id.to_proto(),
4057 diff_type: match diff_type {
4058 DiffType::HeadToIndex => {
4059 proto::git_diff::DiffType::HeadToIndex.into()
4060 }
4061 DiffType::HeadToWorktree => {
4062 proto::git_diff::DiffType::HeadToWorktree.into()
4063 }
4064 },
4065 })
4066 .await?;
4067
4068 Ok(response.diff)
4069 }
4070 }
4071 })
4072 }
4073
4074 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4075 let id = self.id;
4076 self.send_job(
4077 Some(format!("git switch -c {branch_name}").into()),
4078 move |repo, _cx| async move {
4079 match repo {
4080 RepositoryState::Local { backend, .. } => {
4081 backend.create_branch(branch_name).await
4082 }
4083 RepositoryState::Remote { project_id, client } => {
4084 client
4085 .request(proto::GitCreateBranch {
4086 project_id: project_id.0,
4087 repository_id: id.to_proto(),
4088 branch_name,
4089 })
4090 .await?;
4091
4092 Ok(())
4093 }
4094 }
4095 },
4096 )
4097 }
4098
4099 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4100 let id = self.id;
4101 self.send_job(
4102 Some(format!("git switch {branch_name}").into()),
4103 move |repo, _cx| async move {
4104 match repo {
4105 RepositoryState::Local { backend, .. } => {
4106 backend.change_branch(branch_name).await
4107 }
4108 RepositoryState::Remote { project_id, client } => {
4109 client
4110 .request(proto::GitChangeBranch {
4111 project_id: project_id.0,
4112 repository_id: id.to_proto(),
4113 branch_name,
4114 })
4115 .await?;
4116
4117 Ok(())
4118 }
4119 }
4120 },
4121 )
4122 }
4123
4124 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4125 let id = self.id;
4126 self.send_job(None, move |repo, _cx| async move {
4127 match repo {
4128 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4129 RepositoryState::Remote { project_id, client } => {
4130 let response = client
4131 .request(proto::CheckForPushedCommits {
4132 project_id: project_id.0,
4133 repository_id: id.to_proto(),
4134 })
4135 .await?;
4136
4137 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4138
4139 Ok(branches)
4140 }
4141 }
4142 })
4143 }
4144
4145 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4146 self.send_job(None, |repo, _cx| async move {
4147 match repo {
4148 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4149 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4150 }
4151 })
4152 }
4153
4154 pub fn restore_checkpoint(
4155 &mut self,
4156 checkpoint: GitRepositoryCheckpoint,
4157 ) -> oneshot::Receiver<Result<()>> {
4158 self.send_job(None, move |repo, _cx| async move {
4159 match repo {
4160 RepositoryState::Local { backend, .. } => {
4161 backend.restore_checkpoint(checkpoint).await
4162 }
4163 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4164 }
4165 })
4166 }
4167
4168 pub(crate) fn apply_remote_update(
4169 &mut self,
4170 update: proto::UpdateRepository,
4171 is_new: bool,
4172 cx: &mut Context<Self>,
4173 ) -> Result<()> {
4174 let conflicted_paths = TreeSet::from_ordered_entries(
4175 update
4176 .current_merge_conflicts
4177 .into_iter()
4178 .map(|path| RepoPath(Path::new(&path).into())),
4179 );
4180 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4181 self.snapshot.head_commit = update
4182 .head_commit_details
4183 .as_ref()
4184 .map(proto_to_commit_details);
4185
4186 self.snapshot.merge.conflicted_paths = conflicted_paths;
4187
4188 let edits = update
4189 .removed_statuses
4190 .into_iter()
4191 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4192 .chain(
4193 update
4194 .updated_statuses
4195 .into_iter()
4196 .filter_map(|updated_status| {
4197 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4198 }),
4199 )
4200 .collect::<Vec<_>>();
4201 self.snapshot.statuses_by_path.edit(edits, &());
4202 if update.is_last_update {
4203 self.snapshot.scan_id = update.scan_id;
4204 }
4205 cx.emit(RepositoryEvent::Updated {
4206 full_scan: true,
4207 new_instance: is_new,
4208 });
4209 Ok(())
4210 }
4211
4212 pub fn compare_checkpoints(
4213 &mut self,
4214 left: GitRepositoryCheckpoint,
4215 right: GitRepositoryCheckpoint,
4216 ) -> oneshot::Receiver<Result<bool>> {
4217 self.send_job(None, move |repo, _cx| async move {
4218 match repo {
4219 RepositoryState::Local { backend, .. } => {
4220 backend.compare_checkpoints(left, right).await
4221 }
4222 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4223 }
4224 })
4225 }
4226
4227 pub fn diff_checkpoints(
4228 &mut self,
4229 base_checkpoint: GitRepositoryCheckpoint,
4230 target_checkpoint: GitRepositoryCheckpoint,
4231 ) -> oneshot::Receiver<Result<String>> {
4232 self.send_job(None, move |repo, _cx| async move {
4233 match repo {
4234 RepositoryState::Local { backend, .. } => {
4235 backend
4236 .diff_checkpoints(base_checkpoint, target_checkpoint)
4237 .await
4238 }
4239 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4240 }
4241 })
4242 }
4243
4244 fn schedule_scan(
4245 &mut self,
4246 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4247 cx: &mut Context<Self>,
4248 ) {
4249 let this = cx.weak_entity();
4250 let _ = self.send_keyed_job(
4251 Some(GitJobKey::ReloadGitState),
4252 None,
4253 |state, mut cx| async move {
4254 log::debug!("run scheduled git status scan");
4255
4256 let Some(this) = this.upgrade() else {
4257 return Ok(());
4258 };
4259 let RepositoryState::Local { backend, .. } = state else {
4260 bail!("not a local repository")
4261 };
4262 let (snapshot, events) = this
4263 .read_with(&mut cx, |this, _| {
4264 compute_snapshot(
4265 this.id,
4266 this.work_directory_abs_path.clone(),
4267 this.snapshot.clone(),
4268 backend.clone(),
4269 )
4270 })?
4271 .await?;
4272 this.update(&mut cx, |this, cx| {
4273 this.snapshot = snapshot.clone();
4274 for event in events {
4275 cx.emit(event);
4276 }
4277 })?;
4278 if let Some(updates_tx) = updates_tx {
4279 updates_tx
4280 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4281 .ok();
4282 }
4283 Ok(())
4284 },
4285 );
4286 }
4287
4288 fn spawn_local_git_worker(
4289 work_directory_abs_path: Arc<Path>,
4290 dot_git_abs_path: Arc<Path>,
4291 _repository_dir_abs_path: Arc<Path>,
4292 _common_dir_abs_path: Arc<Path>,
4293 project_environment: WeakEntity<ProjectEnvironment>,
4294 fs: Arc<dyn Fs>,
4295 cx: &mut Context<Self>,
4296 ) -> mpsc::UnboundedSender<GitJob> {
4297 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4298
4299 cx.spawn(async move |_, cx| {
4300 let environment = project_environment
4301 .upgrade()
4302 .context("missing project environment")?
4303 .update(cx, |project_environment, cx| {
4304 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4305 })?
4306 .await
4307 .unwrap_or_else(|| {
4308 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4309 HashMap::default()
4310 });
4311 let backend = cx
4312 .background_spawn(async move {
4313 fs.open_repo(&dot_git_abs_path)
4314 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4315 })
4316 .await?;
4317
4318 if let Some(git_hosting_provider_registry) =
4319 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4320 {
4321 git_hosting_providers::register_additional_providers(
4322 git_hosting_provider_registry,
4323 backend.clone(),
4324 );
4325 }
4326
4327 let state = RepositoryState::Local {
4328 backend,
4329 environment: Arc::new(environment),
4330 };
4331 let mut jobs = VecDeque::new();
4332 loop {
4333 while let Ok(Some(next_job)) = job_rx.try_next() {
4334 jobs.push_back(next_job);
4335 }
4336
4337 if let Some(job) = jobs.pop_front() {
4338 if let Some(current_key) = &job.key {
4339 if jobs
4340 .iter()
4341 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4342 {
4343 continue;
4344 }
4345 }
4346 (job.job)(state.clone(), cx).await;
4347 } else if let Some(job) = job_rx.next().await {
4348 jobs.push_back(job);
4349 } else {
4350 break;
4351 }
4352 }
4353 anyhow::Ok(())
4354 })
4355 .detach_and_log_err(cx);
4356
4357 job_tx
4358 }
4359
4360 fn spawn_remote_git_worker(
4361 project_id: ProjectId,
4362 client: AnyProtoClient,
4363 cx: &mut Context<Self>,
4364 ) -> mpsc::UnboundedSender<GitJob> {
4365 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4366
4367 cx.spawn(async move |_, cx| {
4368 let state = RepositoryState::Remote { project_id, client };
4369 let mut jobs = VecDeque::new();
4370 loop {
4371 while let Ok(Some(next_job)) = job_rx.try_next() {
4372 jobs.push_back(next_job);
4373 }
4374
4375 if let Some(job) = jobs.pop_front() {
4376 if let Some(current_key) = &job.key {
4377 if jobs
4378 .iter()
4379 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4380 {
4381 continue;
4382 }
4383 }
4384 (job.job)(state.clone(), cx).await;
4385 } else if let Some(job) = job_rx.next().await {
4386 jobs.push_back(job);
4387 } else {
4388 break;
4389 }
4390 }
4391 anyhow::Ok(())
4392 })
4393 .detach_and_log_err(cx);
4394
4395 job_tx
4396 }
4397
4398 fn load_staged_text(
4399 &mut self,
4400 buffer_id: BufferId,
4401 repo_path: RepoPath,
4402 cx: &App,
4403 ) -> Task<Result<Option<String>>> {
4404 let rx = self.send_job(None, move |state, _| async move {
4405 match state {
4406 RepositoryState::Local { backend, .. } => {
4407 anyhow::Ok(backend.load_index_text(repo_path).await)
4408 }
4409 RepositoryState::Remote { project_id, client } => {
4410 let response = client
4411 .request(proto::OpenUnstagedDiff {
4412 project_id: project_id.to_proto(),
4413 buffer_id: buffer_id.to_proto(),
4414 })
4415 .await?;
4416 Ok(response.staged_text)
4417 }
4418 }
4419 });
4420 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4421 }
4422
4423 fn load_committed_text(
4424 &mut self,
4425 buffer_id: BufferId,
4426 repo_path: RepoPath,
4427 cx: &App,
4428 ) -> Task<Result<DiffBasesChange>> {
4429 let rx = self.send_job(None, move |state, _| async move {
4430 match state {
4431 RepositoryState::Local { backend, .. } => {
4432 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4433 let staged_text = backend.load_index_text(repo_path).await;
4434 let diff_bases_change = if committed_text == staged_text {
4435 DiffBasesChange::SetBoth(committed_text)
4436 } else {
4437 DiffBasesChange::SetEach {
4438 index: staged_text,
4439 head: committed_text,
4440 }
4441 };
4442 anyhow::Ok(diff_bases_change)
4443 }
4444 RepositoryState::Remote { project_id, client } => {
4445 use proto::open_uncommitted_diff_response::Mode;
4446
4447 let response = client
4448 .request(proto::OpenUncommittedDiff {
4449 project_id: project_id.to_proto(),
4450 buffer_id: buffer_id.to_proto(),
4451 })
4452 .await?;
4453 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4454 let bases = match mode {
4455 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4456 Mode::IndexAndHead => DiffBasesChange::SetEach {
4457 head: response.committed_text,
4458 index: response.staged_text,
4459 },
4460 };
4461 Ok(bases)
4462 }
4463 }
4464 });
4465
4466 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4467 }
4468
4469 fn paths_changed(
4470 &mut self,
4471 paths: Vec<RepoPath>,
4472 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4473 cx: &mut Context<Self>,
4474 ) {
4475 self.paths_needing_status_update.extend(paths);
4476
4477 let this = cx.weak_entity();
4478 let _ = self.send_keyed_job(
4479 Some(GitJobKey::RefreshStatuses),
4480 None,
4481 |state, mut cx| async move {
4482 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4483 (
4484 this.snapshot.clone(),
4485 mem::take(&mut this.paths_needing_status_update),
4486 )
4487 })?;
4488 let RepositoryState::Local { backend, .. } = state else {
4489 bail!("not a local repository")
4490 };
4491
4492 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4493 let statuses = backend.status(&paths).await?;
4494
4495 let changed_path_statuses = cx
4496 .background_spawn(async move {
4497 let mut changed_path_statuses = Vec::new();
4498 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4499 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4500
4501 for (repo_path, status) in &*statuses.entries {
4502 changed_paths.remove(repo_path);
4503 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) {
4504 if cursor.item().is_some_and(|entry| entry.status == *status) {
4505 continue;
4506 }
4507 }
4508
4509 changed_path_statuses.push(Edit::Insert(StatusEntry {
4510 repo_path: repo_path.clone(),
4511 status: *status,
4512 }));
4513 }
4514 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4515 for path in changed_paths.into_iter() {
4516 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4517 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4518 }
4519 }
4520 changed_path_statuses
4521 })
4522 .await;
4523
4524 this.update(&mut cx, |this, cx| {
4525 if !changed_path_statuses.is_empty() {
4526 this.snapshot
4527 .statuses_by_path
4528 .edit(changed_path_statuses, &());
4529 this.snapshot.scan_id += 1;
4530 if let Some(updates_tx) = updates_tx {
4531 updates_tx
4532 .unbounded_send(DownstreamUpdate::UpdateRepository(
4533 this.snapshot.clone(),
4534 ))
4535 .ok();
4536 }
4537 }
4538 cx.emit(RepositoryEvent::Updated {
4539 full_scan: false,
4540 new_instance: false,
4541 });
4542 })
4543 },
4544 );
4545 }
4546
4547 /// currently running git command and when it started
4548 pub fn current_job(&self) -> Option<JobInfo> {
4549 self.active_jobs.values().next().cloned()
4550 }
4551
4552 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4553 self.send_job(None, |_, _| async {})
4554 }
4555}
4556
4557fn get_permalink_in_rust_registry_src(
4558 provider_registry: Arc<GitHostingProviderRegistry>,
4559 path: PathBuf,
4560 selection: Range<u32>,
4561) -> Result<url::Url> {
4562 #[derive(Deserialize)]
4563 struct CargoVcsGit {
4564 sha1: String,
4565 }
4566
4567 #[derive(Deserialize)]
4568 struct CargoVcsInfo {
4569 git: CargoVcsGit,
4570 path_in_vcs: String,
4571 }
4572
4573 #[derive(Deserialize)]
4574 struct CargoPackage {
4575 repository: String,
4576 }
4577
4578 #[derive(Deserialize)]
4579 struct CargoToml {
4580 package: CargoPackage,
4581 }
4582
4583 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4584 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4585 Some((dir, json))
4586 }) else {
4587 bail!("No .cargo_vcs_info.json found in parent directories")
4588 };
4589 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4590 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4591 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4592 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4593 .context("parsing package.repository field of manifest")?;
4594 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4595 let permalink = provider.build_permalink(
4596 remote,
4597 BuildPermalinkParams {
4598 sha: &cargo_vcs_info.git.sha1,
4599 path: &path.to_string_lossy(),
4600 selection: Some(selection),
4601 },
4602 );
4603 Ok(permalink)
4604}
4605
4606fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4607 let Some(blame) = blame else {
4608 return proto::BlameBufferResponse {
4609 blame_response: None,
4610 };
4611 };
4612
4613 let entries = blame
4614 .entries
4615 .into_iter()
4616 .map(|entry| proto::BlameEntry {
4617 sha: entry.sha.as_bytes().into(),
4618 start_line: entry.range.start,
4619 end_line: entry.range.end,
4620 original_line_number: entry.original_line_number,
4621 author: entry.author,
4622 author_mail: entry.author_mail,
4623 author_time: entry.author_time,
4624 author_tz: entry.author_tz,
4625 committer: entry.committer_name,
4626 committer_mail: entry.committer_email,
4627 committer_time: entry.committer_time,
4628 committer_tz: entry.committer_tz,
4629 summary: entry.summary,
4630 previous: entry.previous,
4631 filename: entry.filename,
4632 })
4633 .collect::<Vec<_>>();
4634
4635 let messages = blame
4636 .messages
4637 .into_iter()
4638 .map(|(oid, message)| proto::CommitMessage {
4639 oid: oid.as_bytes().into(),
4640 message,
4641 })
4642 .collect::<Vec<_>>();
4643
4644 proto::BlameBufferResponse {
4645 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4646 entries,
4647 messages,
4648 remote_url: blame.remote_url,
4649 }),
4650 }
4651}
4652
4653fn deserialize_blame_buffer_response(
4654 response: proto::BlameBufferResponse,
4655) -> Option<git::blame::Blame> {
4656 let response = response.blame_response?;
4657 let entries = response
4658 .entries
4659 .into_iter()
4660 .filter_map(|entry| {
4661 Some(git::blame::BlameEntry {
4662 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4663 range: entry.start_line..entry.end_line,
4664 original_line_number: entry.original_line_number,
4665 committer_name: entry.committer,
4666 committer_time: entry.committer_time,
4667 committer_tz: entry.committer_tz,
4668 committer_email: entry.committer_mail,
4669 author: entry.author,
4670 author_mail: entry.author_mail,
4671 author_time: entry.author_time,
4672 author_tz: entry.author_tz,
4673 summary: entry.summary,
4674 previous: entry.previous,
4675 filename: entry.filename,
4676 })
4677 })
4678 .collect::<Vec<_>>();
4679
4680 let messages = response
4681 .messages
4682 .into_iter()
4683 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4684 .collect::<HashMap<_, _>>();
4685
4686 Some(Blame {
4687 entries,
4688 messages,
4689 remote_url: response.remote_url,
4690 })
4691}
4692
4693fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4694 proto::Branch {
4695 is_head: branch.is_head,
4696 ref_name: branch.ref_name.to_string(),
4697 unix_timestamp: branch
4698 .most_recent_commit
4699 .as_ref()
4700 .map(|commit| commit.commit_timestamp as u64),
4701 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4702 ref_name: upstream.ref_name.to_string(),
4703 tracking: upstream
4704 .tracking
4705 .status()
4706 .map(|upstream| proto::UpstreamTracking {
4707 ahead: upstream.ahead as u64,
4708 behind: upstream.behind as u64,
4709 }),
4710 }),
4711 most_recent_commit: branch
4712 .most_recent_commit
4713 .as_ref()
4714 .map(|commit| proto::CommitSummary {
4715 sha: commit.sha.to_string(),
4716 subject: commit.subject.to_string(),
4717 commit_timestamp: commit.commit_timestamp,
4718 }),
4719 }
4720}
4721
4722fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4723 git::repository::Branch {
4724 is_head: proto.is_head,
4725 ref_name: proto.ref_name.clone().into(),
4726 upstream: proto
4727 .upstream
4728 .as_ref()
4729 .map(|upstream| git::repository::Upstream {
4730 ref_name: upstream.ref_name.to_string().into(),
4731 tracking: upstream
4732 .tracking
4733 .as_ref()
4734 .map(|tracking| {
4735 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4736 ahead: tracking.ahead as u32,
4737 behind: tracking.behind as u32,
4738 })
4739 })
4740 .unwrap_or(git::repository::UpstreamTracking::Gone),
4741 }),
4742 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4743 git::repository::CommitSummary {
4744 sha: commit.sha.to_string().into(),
4745 subject: commit.subject.to_string().into(),
4746 commit_timestamp: commit.commit_timestamp,
4747 has_parent: true,
4748 }
4749 }),
4750 }
4751}
4752
4753fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4754 proto::GitCommitDetails {
4755 sha: commit.sha.to_string(),
4756 message: commit.message.to_string(),
4757 commit_timestamp: commit.commit_timestamp,
4758 author_email: commit.author_email.to_string(),
4759 author_name: commit.author_name.to_string(),
4760 }
4761}
4762
4763fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4764 CommitDetails {
4765 sha: proto.sha.clone().into(),
4766 message: proto.message.clone().into(),
4767 commit_timestamp: proto.commit_timestamp,
4768 author_email: proto.author_email.clone().into(),
4769 author_name: proto.author_name.clone().into(),
4770 }
4771}
4772
4773async fn compute_snapshot(
4774 id: RepositoryId,
4775 work_directory_abs_path: Arc<Path>,
4776 prev_snapshot: RepositorySnapshot,
4777 backend: Arc<dyn GitRepository>,
4778) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4779 let mut events = Vec::new();
4780 let branches = backend.branches().await?;
4781 let branch = branches.into_iter().find(|branch| branch.is_head);
4782 let statuses = backend
4783 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
4784 .await?;
4785 let statuses_by_path = SumTree::from_iter(
4786 statuses
4787 .entries
4788 .iter()
4789 .map(|(repo_path, status)| StatusEntry {
4790 repo_path: repo_path.clone(),
4791 status: *status,
4792 }),
4793 &(),
4794 );
4795 let (merge_details, merge_heads_changed) =
4796 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
4797 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
4798
4799 if merge_heads_changed
4800 || branch != prev_snapshot.branch
4801 || statuses_by_path != prev_snapshot.statuses_by_path
4802 {
4803 events.push(RepositoryEvent::Updated {
4804 full_scan: true,
4805 new_instance: false,
4806 });
4807 }
4808
4809 // Cache merge conflict paths so they don't change from staging/unstaging,
4810 // until the merge heads change (at commit time, etc.).
4811 if merge_heads_changed {
4812 events.push(RepositoryEvent::MergeHeadsChanged);
4813 }
4814
4815 // Useful when branch is None in detached head state
4816 let head_commit = match backend.head_sha().await {
4817 Some(head_sha) => backend.show(head_sha).await.log_err(),
4818 None => None,
4819 };
4820
4821 let snapshot = RepositorySnapshot {
4822 id,
4823 statuses_by_path,
4824 work_directory_abs_path,
4825 scan_id: prev_snapshot.scan_id + 1,
4826 branch,
4827 head_commit,
4828 merge: merge_details,
4829 };
4830
4831 Ok((snapshot, events))
4832}
4833
4834fn status_from_proto(
4835 simple_status: i32,
4836 status: Option<proto::GitFileStatus>,
4837) -> anyhow::Result<FileStatus> {
4838 use proto::git_file_status::Variant;
4839
4840 let Some(variant) = status.and_then(|status| status.variant) else {
4841 let code = proto::GitStatus::from_i32(simple_status)
4842 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
4843 let result = match code {
4844 proto::GitStatus::Added => TrackedStatus {
4845 worktree_status: StatusCode::Added,
4846 index_status: StatusCode::Unmodified,
4847 }
4848 .into(),
4849 proto::GitStatus::Modified => TrackedStatus {
4850 worktree_status: StatusCode::Modified,
4851 index_status: StatusCode::Unmodified,
4852 }
4853 .into(),
4854 proto::GitStatus::Conflict => UnmergedStatus {
4855 first_head: UnmergedStatusCode::Updated,
4856 second_head: UnmergedStatusCode::Updated,
4857 }
4858 .into(),
4859 proto::GitStatus::Deleted => TrackedStatus {
4860 worktree_status: StatusCode::Deleted,
4861 index_status: StatusCode::Unmodified,
4862 }
4863 .into(),
4864 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
4865 };
4866 return Ok(result);
4867 };
4868
4869 let result = match variant {
4870 Variant::Untracked(_) => FileStatus::Untracked,
4871 Variant::Ignored(_) => FileStatus::Ignored,
4872 Variant::Unmerged(unmerged) => {
4873 let [first_head, second_head] =
4874 [unmerged.first_head, unmerged.second_head].map(|head| {
4875 let code = proto::GitStatus::from_i32(head)
4876 .with_context(|| format!("Invalid git status code: {head}"))?;
4877 let result = match code {
4878 proto::GitStatus::Added => UnmergedStatusCode::Added,
4879 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4880 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4881 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
4882 };
4883 Ok(result)
4884 });
4885 let [first_head, second_head] = [first_head?, second_head?];
4886 UnmergedStatus {
4887 first_head,
4888 second_head,
4889 }
4890 .into()
4891 }
4892 Variant::Tracked(tracked) => {
4893 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4894 .map(|status| {
4895 let code = proto::GitStatus::from_i32(status)
4896 .with_context(|| format!("Invalid git status code: {status}"))?;
4897 let result = match code {
4898 proto::GitStatus::Modified => StatusCode::Modified,
4899 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4900 proto::GitStatus::Added => StatusCode::Added,
4901 proto::GitStatus::Deleted => StatusCode::Deleted,
4902 proto::GitStatus::Renamed => StatusCode::Renamed,
4903 proto::GitStatus::Copied => StatusCode::Copied,
4904 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4905 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
4906 };
4907 Ok(result)
4908 });
4909 let [index_status, worktree_status] = [index_status?, worktree_status?];
4910 TrackedStatus {
4911 index_status,
4912 worktree_status,
4913 }
4914 .into()
4915 }
4916 };
4917 Ok(result)
4918}
4919
4920fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4921 use proto::git_file_status::{Tracked, Unmerged, Variant};
4922
4923 let variant = match status {
4924 FileStatus::Untracked => Variant::Untracked(Default::default()),
4925 FileStatus::Ignored => Variant::Ignored(Default::default()),
4926 FileStatus::Unmerged(UnmergedStatus {
4927 first_head,
4928 second_head,
4929 }) => Variant::Unmerged(Unmerged {
4930 first_head: unmerged_status_to_proto(first_head),
4931 second_head: unmerged_status_to_proto(second_head),
4932 }),
4933 FileStatus::Tracked(TrackedStatus {
4934 index_status,
4935 worktree_status,
4936 }) => Variant::Tracked(Tracked {
4937 index_status: tracked_status_to_proto(index_status),
4938 worktree_status: tracked_status_to_proto(worktree_status),
4939 }),
4940 };
4941 proto::GitFileStatus {
4942 variant: Some(variant),
4943 }
4944}
4945
4946fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4947 match code {
4948 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4949 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4950 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4951 }
4952}
4953
4954fn tracked_status_to_proto(code: StatusCode) -> i32 {
4955 match code {
4956 StatusCode::Added => proto::GitStatus::Added as _,
4957 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4958 StatusCode::Modified => proto::GitStatus::Modified as _,
4959 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4960 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4961 StatusCode::Copied => proto::GitStatus::Copied as _,
4962 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4963 }
4964}