1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 status::{
32 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
33 },
34};
35use gpui::{
36 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
37 WeakEntity,
38};
39use language::{
40 Buffer, BufferEvent, Language, LanguageRegistry,
41 proto::{deserialize_version, serialize_version},
42};
43use parking_lot::Mutex;
44use postage::stream::Stream as _;
45use rpc::{
46 AnyProtoClient, TypedEnvelope,
47 proto::{self, FromProto, ToProto, git_reset, split_repository_update},
48};
49use serde::Deserialize;
50use std::{
51 cmp::Ordering,
52 collections::{BTreeSet, VecDeque},
53 future::Future,
54 mem,
55 ops::Range,
56 path::{Path, PathBuf},
57 sync::{
58 Arc,
59 atomic::{self, AtomicU64},
60 },
61 time::Instant,
62};
63use sum_tree::{Edit, SumTree, TreeSet};
64use text::{Bias, BufferId};
65use util::{ResultExt, debug_panic, paths::SanitizedPath, post_inc};
66use worktree::{
67 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
68 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
69};
70
71pub struct GitStore {
72 state: GitStoreState,
73 buffer_store: Entity<BufferStore>,
74 worktree_store: Entity<WorktreeStore>,
75 repositories: HashMap<RepositoryId, Entity<Repository>>,
76 active_repo_id: Option<RepositoryId>,
77 #[allow(clippy::type_complexity)]
78 loading_diffs:
79 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
80 diffs: HashMap<BufferId, Entity<BufferGitState>>,
81 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
82 _subscriptions: Vec<Subscription>,
83}
84
85#[derive(Default)]
86struct SharedDiffs {
87 unstaged: Option<Entity<BufferDiff>>,
88 uncommitted: Option<Entity<BufferDiff>>,
89}
90
91struct BufferGitState {
92 unstaged_diff: Option<WeakEntity<BufferDiff>>,
93 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
94 conflict_set: Option<WeakEntity<ConflictSet>>,
95 recalculate_diff_task: Option<Task<Result<()>>>,
96 reparse_conflict_markers_task: Option<Task<Result<()>>>,
97 language: Option<Arc<Language>>,
98 language_registry: Option<Arc<LanguageRegistry>>,
99 conflict_updated_futures: Vec<oneshot::Sender<()>>,
100 recalculating_tx: postage::watch::Sender<bool>,
101
102 /// These operation counts are used to ensure that head and index text
103 /// values read from the git repository are up-to-date with any hunk staging
104 /// operations that have been performed on the BufferDiff.
105 ///
106 /// The operation count is incremented immediately when the user initiates a
107 /// hunk stage/unstage operation. Then, upon finishing writing the new index
108 /// text do disk, the `operation count as of write` is updated to reflect
109 /// the operation count that prompted the write.
110 hunk_staging_operation_count: usize,
111 hunk_staging_operation_count_as_of_write: usize,
112
113 head_text: Option<Arc<String>>,
114 index_text: Option<Arc<String>>,
115 head_changed: bool,
116 index_changed: bool,
117 language_changed: bool,
118}
119
120#[derive(Clone, Debug)]
121enum DiffBasesChange {
122 SetIndex(Option<String>),
123 SetHead(Option<String>),
124 SetEach {
125 index: Option<String>,
126 head: Option<String>,
127 },
128 SetBoth(Option<String>),
129}
130
131#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
132enum DiffKind {
133 Unstaged,
134 Uncommitted,
135}
136
137enum GitStoreState {
138 Local {
139 next_repository_id: Arc<AtomicU64>,
140 downstream: Option<LocalDownstreamState>,
141 project_environment: Entity<ProjectEnvironment>,
142 fs: Arc<dyn Fs>,
143 },
144 Remote {
145 upstream_client: AnyProtoClient,
146 upstream_project_id: u64,
147 downstream: Option<(AnyProtoClient, ProjectId)>,
148 },
149}
150
151enum DownstreamUpdate {
152 UpdateRepository(RepositorySnapshot),
153 RemoveRepository(RepositoryId),
154}
155
156struct LocalDownstreamState {
157 client: AnyProtoClient,
158 project_id: ProjectId,
159 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
160 _task: Task<Result<()>>,
161}
162
163#[derive(Clone, Debug)]
164pub struct GitStoreCheckpoint {
165 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
166}
167
168#[derive(Clone, Debug, PartialEq, Eq)]
169pub struct StatusEntry {
170 pub repo_path: RepoPath,
171 pub status: FileStatus,
172}
173
174impl StatusEntry {
175 fn to_proto(&self) -> proto::StatusEntry {
176 let simple_status = match self.status {
177 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
178 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
179 FileStatus::Tracked(TrackedStatus {
180 index_status,
181 worktree_status,
182 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
183 worktree_status
184 } else {
185 index_status
186 }),
187 };
188
189 proto::StatusEntry {
190 repo_path: self.repo_path.as_ref().to_proto(),
191 simple_status,
192 status: Some(status_to_proto(self.status)),
193 }
194 }
195}
196
197impl TryFrom<proto::StatusEntry> for StatusEntry {
198 type Error = anyhow::Error;
199
200 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
201 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
202 let status = status_from_proto(value.simple_status, value.status)?;
203 Ok(Self { repo_path, status })
204 }
205}
206
207impl sum_tree::Item for StatusEntry {
208 type Summary = PathSummary<GitSummary>;
209
210 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
211 PathSummary {
212 max_path: self.repo_path.0.clone(),
213 item_summary: self.status.summary(),
214 }
215 }
216}
217
218impl sum_tree::KeyedItem for StatusEntry {
219 type Key = PathKey;
220
221 fn key(&self) -> Self::Key {
222 PathKey(self.repo_path.0.clone())
223 }
224}
225
226#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
227pub struct RepositoryId(pub u64);
228
229#[derive(Clone, Debug, Default, PartialEq, Eq)]
230pub struct MergeDetails {
231 pub conflicted_paths: TreeSet<RepoPath>,
232 pub message: Option<SharedString>,
233 pub heads: Vec<Option<SharedString>>,
234}
235
236#[derive(Clone, Debug, PartialEq, Eq)]
237pub struct RepositorySnapshot {
238 pub id: RepositoryId,
239 pub statuses_by_path: SumTree<StatusEntry>,
240 pub work_directory_abs_path: Arc<Path>,
241 pub branch: Option<Branch>,
242 pub head_commit: Option<CommitDetails>,
243 pub scan_id: u64,
244 pub merge: MergeDetails,
245 pub remote_origin_url: Option<String>,
246 pub remote_upstream_url: Option<String>,
247}
248
249type JobId = u64;
250
251#[derive(Clone, Debug, PartialEq, Eq)]
252pub struct JobInfo {
253 pub start: Instant,
254 pub message: SharedString,
255}
256
257pub struct Repository {
258 this: WeakEntity<Self>,
259 snapshot: RepositorySnapshot,
260 commit_message_buffer: Option<Entity<Buffer>>,
261 git_store: WeakEntity<GitStore>,
262 // For a local repository, holds paths that have had worktree events since the last status scan completed,
263 // and that should be examined during the next status scan.
264 paths_needing_status_update: BTreeSet<RepoPath>,
265 job_sender: mpsc::UnboundedSender<GitJob>,
266 active_jobs: HashMap<JobId, JobInfo>,
267 job_id: JobId,
268 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
269 latest_askpass_id: u64,
270}
271
272impl std::ops::Deref for Repository {
273 type Target = RepositorySnapshot;
274
275 fn deref(&self) -> &Self::Target {
276 &self.snapshot
277 }
278}
279
280#[derive(Clone)]
281pub enum RepositoryState {
282 Local {
283 backend: Arc<dyn GitRepository>,
284 environment: Arc<HashMap<String, String>>,
285 },
286 Remote {
287 project_id: ProjectId,
288 client: AnyProtoClient,
289 },
290}
291
292#[derive(Clone, Debug)]
293pub enum RepositoryEvent {
294 Updated { full_scan: bool, new_instance: bool },
295 MergeHeadsChanged,
296}
297
298#[derive(Clone, Debug)]
299pub struct JobsUpdated;
300
301#[derive(Debug)]
302pub enum GitStoreEvent {
303 ActiveRepositoryChanged(Option<RepositoryId>),
304 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
305 RepositoryAdded(RepositoryId),
306 RepositoryRemoved(RepositoryId),
307 IndexWriteError(anyhow::Error),
308 JobsUpdated,
309 ConflictsUpdated,
310}
311
312impl EventEmitter<RepositoryEvent> for Repository {}
313impl EventEmitter<JobsUpdated> for Repository {}
314impl EventEmitter<GitStoreEvent> for GitStore {}
315
316pub struct GitJob {
317 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
318 key: Option<GitJobKey>,
319}
320
321#[derive(PartialEq, Eq)]
322enum GitJobKey {
323 WriteIndex(RepoPath),
324 ReloadBufferDiffBases,
325 RefreshStatuses,
326 ReloadGitState,
327}
328
329impl GitStore {
330 pub fn local(
331 worktree_store: &Entity<WorktreeStore>,
332 buffer_store: Entity<BufferStore>,
333 environment: Entity<ProjectEnvironment>,
334 fs: Arc<dyn Fs>,
335 cx: &mut Context<Self>,
336 ) -> Self {
337 Self::new(
338 worktree_store.clone(),
339 buffer_store,
340 GitStoreState::Local {
341 next_repository_id: Arc::new(AtomicU64::new(1)),
342 downstream: None,
343 project_environment: environment,
344 fs,
345 },
346 cx,
347 )
348 }
349
350 pub fn remote(
351 worktree_store: &Entity<WorktreeStore>,
352 buffer_store: Entity<BufferStore>,
353 upstream_client: AnyProtoClient,
354 project_id: u64,
355 cx: &mut Context<Self>,
356 ) -> Self {
357 Self::new(
358 worktree_store.clone(),
359 buffer_store,
360 GitStoreState::Remote {
361 upstream_client,
362 upstream_project_id: project_id,
363 downstream: None,
364 },
365 cx,
366 )
367 }
368
369 fn new(
370 worktree_store: Entity<WorktreeStore>,
371 buffer_store: Entity<BufferStore>,
372 state: GitStoreState,
373 cx: &mut Context<Self>,
374 ) -> Self {
375 let _subscriptions = vec![
376 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
377 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
378 ];
379
380 GitStore {
381 state,
382 buffer_store,
383 worktree_store,
384 repositories: HashMap::default(),
385 active_repo_id: None,
386 _subscriptions,
387 loading_diffs: HashMap::default(),
388 shared_diffs: HashMap::default(),
389 diffs: HashMap::default(),
390 }
391 }
392
393 pub fn init(client: &AnyProtoClient) {
394 client.add_entity_request_handler(Self::handle_get_remotes);
395 client.add_entity_request_handler(Self::handle_get_branches);
396 client.add_entity_request_handler(Self::handle_get_default_branch);
397 client.add_entity_request_handler(Self::handle_change_branch);
398 client.add_entity_request_handler(Self::handle_create_branch);
399 client.add_entity_request_handler(Self::handle_git_init);
400 client.add_entity_request_handler(Self::handle_push);
401 client.add_entity_request_handler(Self::handle_pull);
402 client.add_entity_request_handler(Self::handle_fetch);
403 client.add_entity_request_handler(Self::handle_stage);
404 client.add_entity_request_handler(Self::handle_unstage);
405 client.add_entity_request_handler(Self::handle_stash);
406 client.add_entity_request_handler(Self::handle_stash_pop);
407 client.add_entity_request_handler(Self::handle_commit);
408 client.add_entity_request_handler(Self::handle_reset);
409 client.add_entity_request_handler(Self::handle_show);
410 client.add_entity_request_handler(Self::handle_load_commit_diff);
411 client.add_entity_request_handler(Self::handle_checkout_files);
412 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
413 client.add_entity_request_handler(Self::handle_set_index_text);
414 client.add_entity_request_handler(Self::handle_askpass);
415 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
416 client.add_entity_request_handler(Self::handle_git_diff);
417 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
418 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
419 client.add_entity_message_handler(Self::handle_update_diff_bases);
420 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
421 client.add_entity_request_handler(Self::handle_blame_buffer);
422 client.add_entity_message_handler(Self::handle_update_repository);
423 client.add_entity_message_handler(Self::handle_remove_repository);
424 client.add_entity_request_handler(Self::handle_git_clone);
425 }
426
427 pub fn is_local(&self) -> bool {
428 matches!(self.state, GitStoreState::Local { .. })
429 }
430
431 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
432 match &mut self.state {
433 GitStoreState::Remote {
434 downstream: downstream_client,
435 ..
436 } => {
437 for repo in self.repositories.values() {
438 let update = repo.read(cx).snapshot.initial_update(project_id);
439 for update in split_repository_update(update) {
440 client.send(update).log_err();
441 }
442 }
443 *downstream_client = Some((client, ProjectId(project_id)));
444 }
445 GitStoreState::Local {
446 downstream: downstream_client,
447 ..
448 } => {
449 let mut snapshots = HashMap::default();
450 let (updates_tx, mut updates_rx) = mpsc::unbounded();
451 for repo in self.repositories.values() {
452 updates_tx
453 .unbounded_send(DownstreamUpdate::UpdateRepository(
454 repo.read(cx).snapshot.clone(),
455 ))
456 .ok();
457 }
458 *downstream_client = Some(LocalDownstreamState {
459 client: client.clone(),
460 project_id: ProjectId(project_id),
461 updates_tx,
462 _task: cx.spawn(async move |this, cx| {
463 cx.background_spawn(async move {
464 while let Some(update) = updates_rx.next().await {
465 match update {
466 DownstreamUpdate::UpdateRepository(snapshot) => {
467 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
468 {
469 let update =
470 snapshot.build_update(old_snapshot, project_id);
471 *old_snapshot = snapshot;
472 for update in split_repository_update(update) {
473 client.send(update)?;
474 }
475 } else {
476 let update = snapshot.initial_update(project_id);
477 for update in split_repository_update(update) {
478 client.send(update)?;
479 }
480 snapshots.insert(snapshot.id, snapshot);
481 }
482 }
483 DownstreamUpdate::RemoveRepository(id) => {
484 client.send(proto::RemoveRepository {
485 project_id,
486 id: id.to_proto(),
487 })?;
488 }
489 }
490 }
491 anyhow::Ok(())
492 })
493 .await
494 .ok();
495 this.update(cx, |this, _| {
496 if let GitStoreState::Local {
497 downstream: downstream_client,
498 ..
499 } = &mut this.state
500 {
501 downstream_client.take();
502 } else {
503 unreachable!("unshared called on remote store");
504 }
505 })
506 }),
507 });
508 }
509 }
510 }
511
512 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
513 match &mut self.state {
514 GitStoreState::Local {
515 downstream: downstream_client,
516 ..
517 } => {
518 downstream_client.take();
519 }
520 GitStoreState::Remote {
521 downstream: downstream_client,
522 ..
523 } => {
524 downstream_client.take();
525 }
526 }
527 self.shared_diffs.clear();
528 }
529
530 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
531 self.shared_diffs.remove(peer_id);
532 }
533
534 pub fn active_repository(&self) -> Option<Entity<Repository>> {
535 self.active_repo_id
536 .as_ref()
537 .map(|id| self.repositories[id].clone())
538 }
539
540 pub fn open_unstaged_diff(
541 &mut self,
542 buffer: Entity<Buffer>,
543 cx: &mut Context<Self>,
544 ) -> Task<Result<Entity<BufferDiff>>> {
545 let buffer_id = buffer.read(cx).remote_id();
546 if let Some(diff_state) = self.diffs.get(&buffer_id)
547 && let Some(unstaged_diff) = diff_state
548 .read(cx)
549 .unstaged_diff
550 .as_ref()
551 .and_then(|weak| weak.upgrade())
552 {
553 if let Some(task) =
554 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
555 {
556 return cx.background_executor().spawn(async move {
557 task.await;
558 Ok(unstaged_diff)
559 });
560 }
561 return Task::ready(Ok(unstaged_diff));
562 }
563
564 let Some((repo, repo_path)) =
565 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
566 else {
567 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
568 };
569
570 let task = self
571 .loading_diffs
572 .entry((buffer_id, DiffKind::Unstaged))
573 .or_insert_with(|| {
574 let staged_text = repo.update(cx, |repo, cx| {
575 repo.load_staged_text(buffer_id, repo_path, cx)
576 });
577 cx.spawn(async move |this, cx| {
578 Self::open_diff_internal(
579 this,
580 DiffKind::Unstaged,
581 staged_text.await.map(DiffBasesChange::SetIndex),
582 buffer,
583 cx,
584 )
585 .await
586 .map_err(Arc::new)
587 })
588 .shared()
589 })
590 .clone();
591
592 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
593 }
594
595 pub fn open_uncommitted_diff(
596 &mut self,
597 buffer: Entity<Buffer>,
598 cx: &mut Context<Self>,
599 ) -> Task<Result<Entity<BufferDiff>>> {
600 let buffer_id = buffer.read(cx).remote_id();
601
602 if let Some(diff_state) = self.diffs.get(&buffer_id)
603 && let Some(uncommitted_diff) = diff_state
604 .read(cx)
605 .uncommitted_diff
606 .as_ref()
607 .and_then(|weak| weak.upgrade())
608 {
609 if let Some(task) =
610 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
611 {
612 return cx.background_executor().spawn(async move {
613 task.await;
614 Ok(uncommitted_diff)
615 });
616 }
617 return Task::ready(Ok(uncommitted_diff));
618 }
619
620 let Some((repo, repo_path)) =
621 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
622 else {
623 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
624 };
625
626 let task = self
627 .loading_diffs
628 .entry((buffer_id, DiffKind::Uncommitted))
629 .or_insert_with(|| {
630 let changes = repo.update(cx, |repo, cx| {
631 repo.load_committed_text(buffer_id, repo_path, cx)
632 });
633
634 cx.spawn(async move |this, cx| {
635 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
636 .await
637 .map_err(Arc::new)
638 })
639 .shared()
640 })
641 .clone();
642
643 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
644 }
645
646 async fn open_diff_internal(
647 this: WeakEntity<Self>,
648 kind: DiffKind,
649 texts: Result<DiffBasesChange>,
650 buffer_entity: Entity<Buffer>,
651 cx: &mut AsyncApp,
652 ) -> Result<Entity<BufferDiff>> {
653 let diff_bases_change = match texts {
654 Err(e) => {
655 this.update(cx, |this, cx| {
656 let buffer = buffer_entity.read(cx);
657 let buffer_id = buffer.remote_id();
658 this.loading_diffs.remove(&(buffer_id, kind));
659 })?;
660 return Err(e);
661 }
662 Ok(change) => change,
663 };
664
665 this.update(cx, |this, cx| {
666 let buffer = buffer_entity.read(cx);
667 let buffer_id = buffer.remote_id();
668 let language = buffer.language().cloned();
669 let language_registry = buffer.language_registry();
670 let text_snapshot = buffer.text_snapshot();
671 this.loading_diffs.remove(&(buffer_id, kind));
672
673 let git_store = cx.weak_entity();
674 let diff_state = this
675 .diffs
676 .entry(buffer_id)
677 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
678
679 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
680
681 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
682 diff_state.update(cx, |diff_state, cx| {
683 diff_state.language = language;
684 diff_state.language_registry = language_registry;
685
686 match kind {
687 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
688 DiffKind::Uncommitted => {
689 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
690 diff
691 } else {
692 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
693 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
694 unstaged_diff
695 };
696
697 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
698 diff_state.uncommitted_diff = Some(diff.downgrade())
699 }
700 }
701
702 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
703 let rx = diff_state.wait_for_recalculation();
704
705 anyhow::Ok(async move {
706 if let Some(rx) = rx {
707 rx.await;
708 }
709 Ok(diff)
710 })
711 })
712 })??
713 .await
714 }
715
716 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
717 let diff_state = self.diffs.get(&buffer_id)?;
718 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
719 }
720
721 pub fn get_uncommitted_diff(
722 &self,
723 buffer_id: BufferId,
724 cx: &App,
725 ) -> Option<Entity<BufferDiff>> {
726 let diff_state = self.diffs.get(&buffer_id)?;
727 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
728 }
729
730 pub fn open_conflict_set(
731 &mut self,
732 buffer: Entity<Buffer>,
733 cx: &mut Context<Self>,
734 ) -> Entity<ConflictSet> {
735 log::debug!("open conflict set");
736 let buffer_id = buffer.read(cx).remote_id();
737
738 if let Some(git_state) = self.diffs.get(&buffer_id)
739 && let Some(conflict_set) = git_state
740 .read(cx)
741 .conflict_set
742 .as_ref()
743 .and_then(|weak| weak.upgrade())
744 {
745 let conflict_set = conflict_set;
746 let buffer_snapshot = buffer.read(cx).text_snapshot();
747
748 git_state.update(cx, |state, cx| {
749 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
750 });
751
752 return conflict_set;
753 }
754
755 let is_unmerged = self
756 .repository_and_path_for_buffer_id(buffer_id, cx)
757 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
758 let git_store = cx.weak_entity();
759 let buffer_git_state = self
760 .diffs
761 .entry(buffer_id)
762 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
763 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
764
765 self._subscriptions
766 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
767 cx.emit(GitStoreEvent::ConflictsUpdated);
768 }));
769
770 buffer_git_state.update(cx, |state, cx| {
771 state.conflict_set = Some(conflict_set.downgrade());
772 let buffer_snapshot = buffer.read(cx).text_snapshot();
773 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
774 });
775
776 conflict_set
777 }
778
779 pub fn project_path_git_status(
780 &self,
781 project_path: &ProjectPath,
782 cx: &App,
783 ) -> Option<FileStatus> {
784 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
785 Some(repo.read(cx).status_for_path(&repo_path)?.status)
786 }
787
788 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
789 let mut work_directory_abs_paths = Vec::new();
790 let mut checkpoints = Vec::new();
791 for repository in self.repositories.values() {
792 repository.update(cx, |repository, _| {
793 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
794 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
795 });
796 }
797
798 cx.background_executor().spawn(async move {
799 let checkpoints = future::try_join_all(checkpoints).await?;
800 Ok(GitStoreCheckpoint {
801 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
802 .into_iter()
803 .zip(checkpoints)
804 .collect(),
805 })
806 })
807 }
808
809 pub fn restore_checkpoint(
810 &self,
811 checkpoint: GitStoreCheckpoint,
812 cx: &mut App,
813 ) -> Task<Result<()>> {
814 let repositories_by_work_dir_abs_path = self
815 .repositories
816 .values()
817 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
818 .collect::<HashMap<_, _>>();
819
820 let mut tasks = Vec::new();
821 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
822 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
823 let restore = repository.update(cx, |repository, _| {
824 repository.restore_checkpoint(checkpoint)
825 });
826 tasks.push(async move { restore.await? });
827 }
828 }
829 cx.background_spawn(async move {
830 future::try_join_all(tasks).await?;
831 Ok(())
832 })
833 }
834
835 /// Compares two checkpoints, returning true if they are equal.
836 pub fn compare_checkpoints(
837 &self,
838 left: GitStoreCheckpoint,
839 mut right: GitStoreCheckpoint,
840 cx: &mut App,
841 ) -> Task<Result<bool>> {
842 let repositories_by_work_dir_abs_path = self
843 .repositories
844 .values()
845 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
846 .collect::<HashMap<_, _>>();
847
848 let mut tasks = Vec::new();
849 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
850 if let Some(right_checkpoint) = right
851 .checkpoints_by_work_dir_abs_path
852 .remove(&work_dir_abs_path)
853 {
854 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
855 {
856 let compare = repository.update(cx, |repository, _| {
857 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
858 });
859
860 tasks.push(async move { compare.await? });
861 }
862 } else {
863 return Task::ready(Ok(false));
864 }
865 }
866 cx.background_spawn(async move {
867 Ok(future::try_join_all(tasks)
868 .await?
869 .into_iter()
870 .all(|result| result))
871 })
872 }
873
874 /// Blames a buffer.
875 pub fn blame_buffer(
876 &self,
877 buffer: &Entity<Buffer>,
878 version: Option<clock::Global>,
879 cx: &mut App,
880 ) -> Task<Result<Option<Blame>>> {
881 let buffer = buffer.read(cx);
882 let Some((repo, repo_path)) =
883 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
884 else {
885 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
886 };
887 let content = match &version {
888 Some(version) => buffer.rope_for_version(version),
889 None => buffer.as_rope().clone(),
890 };
891 let version = version.unwrap_or(buffer.version());
892 let buffer_id = buffer.remote_id();
893
894 let rx = repo.update(cx, |repo, _| {
895 repo.send_job(None, move |state, _| async move {
896 match state {
897 RepositoryState::Local { backend, .. } => backend
898 .blame(repo_path.clone(), content)
899 .await
900 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
901 .map(Some),
902 RepositoryState::Remote { project_id, client } => {
903 let response = client
904 .request(proto::BlameBuffer {
905 project_id: project_id.to_proto(),
906 buffer_id: buffer_id.into(),
907 version: serialize_version(&version),
908 })
909 .await?;
910 Ok(deserialize_blame_buffer_response(response))
911 }
912 }
913 })
914 });
915
916 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
917 }
918
919 pub fn get_permalink_to_line(
920 &self,
921 buffer: &Entity<Buffer>,
922 selection: Range<u32>,
923 cx: &mut App,
924 ) -> Task<Result<url::Url>> {
925 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
926 return Task::ready(Err(anyhow!("buffer has no file")));
927 };
928
929 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
930 &(file.worktree.read(cx).id(), file.path.clone()).into(),
931 cx,
932 ) else {
933 // If we're not in a Git repo, check whether this is a Rust source
934 // file in the Cargo registry (presumably opened with go-to-definition
935 // from a normal Rust file). If so, we can put together a permalink
936 // using crate metadata.
937 if buffer
938 .read(cx)
939 .language()
940 .is_none_or(|lang| lang.name() != "Rust".into())
941 {
942 return Task::ready(Err(anyhow!("no permalink available")));
943 }
944 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
945 return Task::ready(Err(anyhow!("no permalink available")));
946 };
947 return cx.spawn(async move |cx| {
948 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
949 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
950 .context("no permalink available")
951 });
952
953 // TODO remote case
954 };
955
956 let buffer_id = buffer.read(cx).remote_id();
957 let branch = repo.read(cx).branch.clone();
958 let remote = branch
959 .as_ref()
960 .and_then(|b| b.upstream.as_ref())
961 .and_then(|b| b.remote_name())
962 .unwrap_or("origin")
963 .to_string();
964
965 let rx = repo.update(cx, |repo, _| {
966 repo.send_job(None, move |state, cx| async move {
967 match state {
968 RepositoryState::Local { backend, .. } => {
969 let origin_url = backend
970 .remote_url(&remote)
971 .with_context(|| format!("remote \"{remote}\" not found"))?;
972
973 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
974
975 let provider_registry =
976 cx.update(GitHostingProviderRegistry::default_global)?;
977
978 let (provider, remote) =
979 parse_git_remote_url(provider_registry, &origin_url)
980 .context("parsing Git remote URL")?;
981
982 let path = repo_path.to_str().with_context(|| {
983 format!("converting repo path {repo_path:?} to string")
984 })?;
985
986 Ok(provider.build_permalink(
987 remote,
988 BuildPermalinkParams {
989 sha: &sha,
990 path,
991 selection: Some(selection),
992 },
993 ))
994 }
995 RepositoryState::Remote { project_id, client } => {
996 let response = client
997 .request(proto::GetPermalinkToLine {
998 project_id: project_id.to_proto(),
999 buffer_id: buffer_id.into(),
1000 selection: Some(proto::Range {
1001 start: selection.start as u64,
1002 end: selection.end as u64,
1003 }),
1004 })
1005 .await?;
1006
1007 url::Url::parse(&response.permalink).context("failed to parse permalink")
1008 }
1009 }
1010 })
1011 });
1012 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1013 }
1014
1015 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1016 match &self.state {
1017 GitStoreState::Local {
1018 downstream: downstream_client,
1019 ..
1020 } => downstream_client
1021 .as_ref()
1022 .map(|state| (state.client.clone(), state.project_id)),
1023 GitStoreState::Remote {
1024 downstream: downstream_client,
1025 ..
1026 } => downstream_client.clone(),
1027 }
1028 }
1029
1030 fn upstream_client(&self) -> Option<AnyProtoClient> {
1031 match &self.state {
1032 GitStoreState::Local { .. } => None,
1033 GitStoreState::Remote {
1034 upstream_client, ..
1035 } => Some(upstream_client.clone()),
1036 }
1037 }
1038
1039 fn on_worktree_store_event(
1040 &mut self,
1041 worktree_store: Entity<WorktreeStore>,
1042 event: &WorktreeStoreEvent,
1043 cx: &mut Context<Self>,
1044 ) {
1045 let GitStoreState::Local {
1046 project_environment,
1047 downstream,
1048 next_repository_id,
1049 fs,
1050 } = &self.state
1051 else {
1052 return;
1053 };
1054
1055 match event {
1056 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1057 if let Some(worktree) = self
1058 .worktree_store
1059 .read(cx)
1060 .worktree_for_id(*worktree_id, cx)
1061 {
1062 let paths_by_git_repo =
1063 self.process_updated_entries(&worktree, updated_entries, cx);
1064 let downstream = downstream
1065 .as_ref()
1066 .map(|downstream| downstream.updates_tx.clone());
1067 cx.spawn(async move |_, cx| {
1068 let paths_by_git_repo = paths_by_git_repo.await;
1069 for (repo, paths) in paths_by_git_repo {
1070 repo.update(cx, |repo, cx| {
1071 repo.paths_changed(paths, downstream.clone(), cx);
1072 })
1073 .ok();
1074 }
1075 })
1076 .detach();
1077 }
1078 }
1079 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1080 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1081 else {
1082 return;
1083 };
1084 if !worktree.read(cx).is_visible() {
1085 log::debug!(
1086 "not adding repositories for local worktree {:?} because it's not visible",
1087 worktree.read(cx).abs_path()
1088 );
1089 return;
1090 }
1091 self.update_repositories_from_worktree(
1092 project_environment.clone(),
1093 next_repository_id.clone(),
1094 downstream
1095 .as_ref()
1096 .map(|downstream| downstream.updates_tx.clone()),
1097 changed_repos.clone(),
1098 fs.clone(),
1099 cx,
1100 );
1101 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1102 }
1103 _ => {}
1104 }
1105 }
1106
1107 fn on_repository_event(
1108 &mut self,
1109 repo: Entity<Repository>,
1110 event: &RepositoryEvent,
1111 cx: &mut Context<Self>,
1112 ) {
1113 let id = repo.read(cx).id;
1114 let repo_snapshot = repo.read(cx).snapshot.clone();
1115 for (buffer_id, diff) in self.diffs.iter() {
1116 if let Some((buffer_repo, repo_path)) =
1117 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1118 && buffer_repo == repo
1119 {
1120 diff.update(cx, |diff, cx| {
1121 if let Some(conflict_set) = &diff.conflict_set {
1122 let conflict_status_changed =
1123 conflict_set.update(cx, |conflict_set, cx| {
1124 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1125 conflict_set.set_has_conflict(has_conflict, cx)
1126 })?;
1127 if conflict_status_changed {
1128 let buffer_store = self.buffer_store.read(cx);
1129 if let Some(buffer) = buffer_store.get(*buffer_id) {
1130 let _ = diff
1131 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1132 }
1133 }
1134 }
1135 anyhow::Ok(())
1136 })
1137 .ok();
1138 }
1139 }
1140 cx.emit(GitStoreEvent::RepositoryUpdated(
1141 id,
1142 event.clone(),
1143 self.active_repo_id == Some(id),
1144 ))
1145 }
1146
1147 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1148 cx.emit(GitStoreEvent::JobsUpdated)
1149 }
1150
1151 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1152 fn update_repositories_from_worktree(
1153 &mut self,
1154 project_environment: Entity<ProjectEnvironment>,
1155 next_repository_id: Arc<AtomicU64>,
1156 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1157 updated_git_repositories: UpdatedGitRepositoriesSet,
1158 fs: Arc<dyn Fs>,
1159 cx: &mut Context<Self>,
1160 ) {
1161 let mut removed_ids = Vec::new();
1162 for update in updated_git_repositories.iter() {
1163 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1164 let existing_work_directory_abs_path =
1165 repo.read(cx).work_directory_abs_path.clone();
1166 Some(&existing_work_directory_abs_path)
1167 == update.old_work_directory_abs_path.as_ref()
1168 || Some(&existing_work_directory_abs_path)
1169 == update.new_work_directory_abs_path.as_ref()
1170 }) {
1171 if let Some(new_work_directory_abs_path) =
1172 update.new_work_directory_abs_path.clone()
1173 {
1174 existing.update(cx, |existing, cx| {
1175 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1176 existing.schedule_scan(updates_tx.clone(), cx);
1177 });
1178 } else {
1179 removed_ids.push(*id);
1180 }
1181 } else if let UpdatedGitRepository {
1182 new_work_directory_abs_path: Some(work_directory_abs_path),
1183 dot_git_abs_path: Some(dot_git_abs_path),
1184 repository_dir_abs_path: Some(repository_dir_abs_path),
1185 common_dir_abs_path: Some(common_dir_abs_path),
1186 ..
1187 } = update
1188 {
1189 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1190 let git_store = cx.weak_entity();
1191 let repo = cx.new(|cx| {
1192 let mut repo = Repository::local(
1193 id,
1194 work_directory_abs_path.clone(),
1195 dot_git_abs_path.clone(),
1196 repository_dir_abs_path.clone(),
1197 common_dir_abs_path.clone(),
1198 project_environment.downgrade(),
1199 fs.clone(),
1200 git_store,
1201 cx,
1202 );
1203 repo.schedule_scan(updates_tx.clone(), cx);
1204 repo
1205 });
1206 self._subscriptions
1207 .push(cx.subscribe(&repo, Self::on_repository_event));
1208 self._subscriptions
1209 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1210 self.repositories.insert(id, repo);
1211 cx.emit(GitStoreEvent::RepositoryAdded(id));
1212 self.active_repo_id.get_or_insert_with(|| {
1213 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1214 id
1215 });
1216 }
1217 }
1218
1219 for id in removed_ids {
1220 if self.active_repo_id == Some(id) {
1221 self.active_repo_id = None;
1222 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1223 }
1224 self.repositories.remove(&id);
1225 if let Some(updates_tx) = updates_tx.as_ref() {
1226 updates_tx
1227 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1228 .ok();
1229 }
1230 }
1231 }
1232
1233 fn on_buffer_store_event(
1234 &mut self,
1235 _: Entity<BufferStore>,
1236 event: &BufferStoreEvent,
1237 cx: &mut Context<Self>,
1238 ) {
1239 match event {
1240 BufferStoreEvent::BufferAdded(buffer) => {
1241 cx.subscribe(buffer, |this, buffer, event, cx| {
1242 if let BufferEvent::LanguageChanged = event {
1243 let buffer_id = buffer.read(cx).remote_id();
1244 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1245 diff_state.update(cx, |diff_state, cx| {
1246 diff_state.buffer_language_changed(buffer, cx);
1247 });
1248 }
1249 }
1250 })
1251 .detach();
1252 }
1253 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1254 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1255 diffs.remove(buffer_id);
1256 }
1257 }
1258 BufferStoreEvent::BufferDropped(buffer_id) => {
1259 self.diffs.remove(buffer_id);
1260 for diffs in self.shared_diffs.values_mut() {
1261 diffs.remove(buffer_id);
1262 }
1263 }
1264
1265 _ => {}
1266 }
1267 }
1268
1269 pub fn recalculate_buffer_diffs(
1270 &mut self,
1271 buffers: Vec<Entity<Buffer>>,
1272 cx: &mut Context<Self>,
1273 ) -> impl Future<Output = ()> + use<> {
1274 let mut futures = Vec::new();
1275 for buffer in buffers {
1276 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1277 let buffer = buffer.read(cx).text_snapshot();
1278 diff_state.update(cx, |diff_state, cx| {
1279 diff_state.recalculate_diffs(buffer.clone(), cx);
1280 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1281 });
1282 futures.push(diff_state.update(cx, |diff_state, cx| {
1283 diff_state
1284 .reparse_conflict_markers(buffer, cx)
1285 .map(|_| {})
1286 .boxed()
1287 }));
1288 }
1289 }
1290 async move {
1291 futures::future::join_all(futures).await;
1292 }
1293 }
1294
1295 fn on_buffer_diff_event(
1296 &mut self,
1297 diff: Entity<buffer_diff::BufferDiff>,
1298 event: &BufferDiffEvent,
1299 cx: &mut Context<Self>,
1300 ) {
1301 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1302 let buffer_id = diff.read(cx).buffer_id;
1303 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1304 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1305 diff_state.hunk_staging_operation_count += 1;
1306 diff_state.hunk_staging_operation_count
1307 });
1308 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1309 let recv = repo.update(cx, |repo, cx| {
1310 log::debug!("hunks changed for {}", path.display());
1311 repo.spawn_set_index_text_job(
1312 path,
1313 new_index_text.as_ref().map(|rope| rope.to_string()),
1314 Some(hunk_staging_operation_count),
1315 cx,
1316 )
1317 });
1318 let diff = diff.downgrade();
1319 cx.spawn(async move |this, cx| {
1320 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1321 diff.update(cx, |diff, cx| {
1322 diff.clear_pending_hunks(cx);
1323 })
1324 .ok();
1325 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1326 .ok();
1327 }
1328 })
1329 .detach();
1330 }
1331 }
1332 }
1333 }
1334
1335 fn local_worktree_git_repos_changed(
1336 &mut self,
1337 worktree: Entity<Worktree>,
1338 changed_repos: &UpdatedGitRepositoriesSet,
1339 cx: &mut Context<Self>,
1340 ) {
1341 log::debug!("local worktree repos changed");
1342 debug_assert!(worktree.read(cx).is_local());
1343
1344 for repository in self.repositories.values() {
1345 repository.update(cx, |repository, cx| {
1346 let repo_abs_path = &repository.work_directory_abs_path;
1347 if changed_repos.iter().any(|update| {
1348 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1349 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1350 }) {
1351 repository.reload_buffer_diff_bases(cx);
1352 }
1353 });
1354 }
1355 }
1356
1357 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1358 &self.repositories
1359 }
1360
1361 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1362 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1363 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1364 Some(status.status)
1365 }
1366
1367 pub fn repository_and_path_for_buffer_id(
1368 &self,
1369 buffer_id: BufferId,
1370 cx: &App,
1371 ) -> Option<(Entity<Repository>, RepoPath)> {
1372 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1373 let project_path = buffer.read(cx).project_path(cx)?;
1374 self.repository_and_path_for_project_path(&project_path, cx)
1375 }
1376
1377 pub fn repository_and_path_for_project_path(
1378 &self,
1379 path: &ProjectPath,
1380 cx: &App,
1381 ) -> Option<(Entity<Repository>, RepoPath)> {
1382 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1383 self.repositories
1384 .values()
1385 .filter_map(|repo| {
1386 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1387 Some((repo.clone(), repo_path))
1388 })
1389 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1390 }
1391
1392 pub fn git_init(
1393 &self,
1394 path: Arc<Path>,
1395 fallback_branch_name: String,
1396 cx: &App,
1397 ) -> Task<Result<()>> {
1398 match &self.state {
1399 GitStoreState::Local { fs, .. } => {
1400 let fs = fs.clone();
1401 cx.background_executor()
1402 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1403 }
1404 GitStoreState::Remote {
1405 upstream_client,
1406 upstream_project_id: project_id,
1407 ..
1408 } => {
1409 let client = upstream_client.clone();
1410 let project_id = *project_id;
1411 cx.background_executor().spawn(async move {
1412 client
1413 .request(proto::GitInit {
1414 project_id: project_id,
1415 abs_path: path.to_string_lossy().to_string(),
1416 fallback_branch_name,
1417 })
1418 .await?;
1419 Ok(())
1420 })
1421 }
1422 }
1423 }
1424
1425 pub fn git_clone(
1426 &self,
1427 repo: String,
1428 path: impl Into<Arc<std::path::Path>>,
1429 cx: &App,
1430 ) -> Task<Result<()>> {
1431 let path = path.into();
1432 match &self.state {
1433 GitStoreState::Local { fs, .. } => {
1434 let fs = fs.clone();
1435 cx.background_executor()
1436 .spawn(async move { fs.git_clone(&repo, &path).await })
1437 }
1438 GitStoreState::Remote {
1439 upstream_client,
1440 upstream_project_id,
1441 ..
1442 } => {
1443 if upstream_client.is_via_collab() {
1444 return Task::ready(Err(anyhow!(
1445 "Git Clone isn't supported for project guests"
1446 )));
1447 }
1448 let request = upstream_client.request(proto::GitClone {
1449 project_id: *upstream_project_id,
1450 abs_path: path.to_string_lossy().to_string(),
1451 remote_repo: repo,
1452 });
1453
1454 cx.background_spawn(async move {
1455 let result = request.await?;
1456
1457 match result.success {
1458 true => Ok(()),
1459 false => Err(anyhow!("Git Clone failed")),
1460 }
1461 })
1462 }
1463 }
1464 }
1465
1466 async fn handle_update_repository(
1467 this: Entity<Self>,
1468 envelope: TypedEnvelope<proto::UpdateRepository>,
1469 mut cx: AsyncApp,
1470 ) -> Result<()> {
1471 this.update(&mut cx, |this, cx| {
1472 let mut update = envelope.payload;
1473
1474 let id = RepositoryId::from_proto(update.id);
1475 let client = this.upstream_client().context("no upstream client")?;
1476
1477 let mut is_new = false;
1478 let repo = this.repositories.entry(id).or_insert_with(|| {
1479 is_new = true;
1480 let git_store = cx.weak_entity();
1481 cx.new(|cx| {
1482 Repository::remote(
1483 id,
1484 Path::new(&update.abs_path).into(),
1485 ProjectId(update.project_id),
1486 client,
1487 git_store,
1488 cx,
1489 )
1490 })
1491 });
1492 if is_new {
1493 this._subscriptions
1494 .push(cx.subscribe(repo, Self::on_repository_event))
1495 }
1496
1497 repo.update(cx, {
1498 let update = update.clone();
1499 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1500 })?;
1501
1502 this.active_repo_id.get_or_insert_with(|| {
1503 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1504 id
1505 });
1506
1507 if let Some((client, project_id)) = this.downstream_client() {
1508 update.project_id = project_id.to_proto();
1509 client.send(update).log_err();
1510 }
1511 Ok(())
1512 })?
1513 }
1514
1515 async fn handle_remove_repository(
1516 this: Entity<Self>,
1517 envelope: TypedEnvelope<proto::RemoveRepository>,
1518 mut cx: AsyncApp,
1519 ) -> Result<()> {
1520 this.update(&mut cx, |this, cx| {
1521 let mut update = envelope.payload;
1522 let id = RepositoryId::from_proto(update.id);
1523 this.repositories.remove(&id);
1524 if let Some((client, project_id)) = this.downstream_client() {
1525 update.project_id = project_id.to_proto();
1526 client.send(update).log_err();
1527 }
1528 if this.active_repo_id == Some(id) {
1529 this.active_repo_id = None;
1530 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1531 }
1532 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1533 })
1534 }
1535
1536 async fn handle_git_init(
1537 this: Entity<Self>,
1538 envelope: TypedEnvelope<proto::GitInit>,
1539 cx: AsyncApp,
1540 ) -> Result<proto::Ack> {
1541 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1542 let name = envelope.payload.fallback_branch_name;
1543 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1544 .await?;
1545
1546 Ok(proto::Ack {})
1547 }
1548
1549 async fn handle_git_clone(
1550 this: Entity<Self>,
1551 envelope: TypedEnvelope<proto::GitClone>,
1552 cx: AsyncApp,
1553 ) -> Result<proto::GitCloneResponse> {
1554 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1555 let repo_name = envelope.payload.remote_repo;
1556 let result = cx
1557 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1558 .await;
1559
1560 Ok(proto::GitCloneResponse {
1561 success: result.is_ok(),
1562 })
1563 }
1564
1565 async fn handle_fetch(
1566 this: Entity<Self>,
1567 envelope: TypedEnvelope<proto::Fetch>,
1568 mut cx: AsyncApp,
1569 ) -> Result<proto::RemoteMessageResponse> {
1570 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1571 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1572 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1573 let askpass_id = envelope.payload.askpass_id;
1574
1575 let askpass = make_remote_delegate(
1576 this,
1577 envelope.payload.project_id,
1578 repository_id,
1579 askpass_id,
1580 &mut cx,
1581 );
1582
1583 let remote_output = repository_handle
1584 .update(&mut cx, |repository_handle, cx| {
1585 repository_handle.fetch(fetch_options, askpass, cx)
1586 })?
1587 .await??;
1588
1589 Ok(proto::RemoteMessageResponse {
1590 stdout: remote_output.stdout,
1591 stderr: remote_output.stderr,
1592 })
1593 }
1594
1595 async fn handle_push(
1596 this: Entity<Self>,
1597 envelope: TypedEnvelope<proto::Push>,
1598 mut cx: AsyncApp,
1599 ) -> Result<proto::RemoteMessageResponse> {
1600 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1601 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1602
1603 let askpass_id = envelope.payload.askpass_id;
1604 let askpass = make_remote_delegate(
1605 this,
1606 envelope.payload.project_id,
1607 repository_id,
1608 askpass_id,
1609 &mut cx,
1610 );
1611
1612 let options = envelope
1613 .payload
1614 .options
1615 .as_ref()
1616 .map(|_| match envelope.payload.options() {
1617 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1618 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1619 });
1620
1621 let branch_name = envelope.payload.branch_name.into();
1622 let remote_name = envelope.payload.remote_name.into();
1623
1624 let remote_output = repository_handle
1625 .update(&mut cx, |repository_handle, cx| {
1626 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1627 })?
1628 .await??;
1629 Ok(proto::RemoteMessageResponse {
1630 stdout: remote_output.stdout,
1631 stderr: remote_output.stderr,
1632 })
1633 }
1634
1635 async fn handle_pull(
1636 this: Entity<Self>,
1637 envelope: TypedEnvelope<proto::Pull>,
1638 mut cx: AsyncApp,
1639 ) -> Result<proto::RemoteMessageResponse> {
1640 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1641 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1642 let askpass_id = envelope.payload.askpass_id;
1643 let askpass = make_remote_delegate(
1644 this,
1645 envelope.payload.project_id,
1646 repository_id,
1647 askpass_id,
1648 &mut cx,
1649 );
1650
1651 let branch_name = envelope.payload.branch_name.into();
1652 let remote_name = envelope.payload.remote_name.into();
1653
1654 let remote_message = repository_handle
1655 .update(&mut cx, |repository_handle, cx| {
1656 repository_handle.pull(branch_name, remote_name, askpass, cx)
1657 })?
1658 .await??;
1659
1660 Ok(proto::RemoteMessageResponse {
1661 stdout: remote_message.stdout,
1662 stderr: remote_message.stderr,
1663 })
1664 }
1665
1666 async fn handle_stage(
1667 this: Entity<Self>,
1668 envelope: TypedEnvelope<proto::Stage>,
1669 mut cx: AsyncApp,
1670 ) -> Result<proto::Ack> {
1671 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1672 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1673
1674 let entries = envelope
1675 .payload
1676 .paths
1677 .into_iter()
1678 .map(PathBuf::from)
1679 .map(RepoPath::new)
1680 .collect();
1681
1682 repository_handle
1683 .update(&mut cx, |repository_handle, cx| {
1684 repository_handle.stage_entries(entries, cx)
1685 })?
1686 .await?;
1687 Ok(proto::Ack {})
1688 }
1689
1690 async fn handle_unstage(
1691 this: Entity<Self>,
1692 envelope: TypedEnvelope<proto::Unstage>,
1693 mut cx: AsyncApp,
1694 ) -> Result<proto::Ack> {
1695 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1696 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1697
1698 let entries = envelope
1699 .payload
1700 .paths
1701 .into_iter()
1702 .map(PathBuf::from)
1703 .map(RepoPath::new)
1704 .collect();
1705
1706 repository_handle
1707 .update(&mut cx, |repository_handle, cx| {
1708 repository_handle.unstage_entries(entries, cx)
1709 })?
1710 .await?;
1711
1712 Ok(proto::Ack {})
1713 }
1714
1715 async fn handle_stash(
1716 this: Entity<Self>,
1717 envelope: TypedEnvelope<proto::Stash>,
1718 mut cx: AsyncApp,
1719 ) -> Result<proto::Ack> {
1720 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1721 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1722
1723 let entries = envelope
1724 .payload
1725 .paths
1726 .into_iter()
1727 .map(PathBuf::from)
1728 .map(RepoPath::new)
1729 .collect();
1730
1731 repository_handle
1732 .update(&mut cx, |repository_handle, cx| {
1733 repository_handle.stash_entries(entries, cx)
1734 })?
1735 .await?;
1736
1737 Ok(proto::Ack {})
1738 }
1739
1740 async fn handle_stash_pop(
1741 this: Entity<Self>,
1742 envelope: TypedEnvelope<proto::StashPop>,
1743 mut cx: AsyncApp,
1744 ) -> Result<proto::Ack> {
1745 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1746 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1747
1748 repository_handle
1749 .update(&mut cx, |repository_handle, cx| {
1750 repository_handle.stash_pop(cx)
1751 })?
1752 .await?;
1753
1754 Ok(proto::Ack {})
1755 }
1756
1757 async fn handle_set_index_text(
1758 this: Entity<Self>,
1759 envelope: TypedEnvelope<proto::SetIndexText>,
1760 mut cx: AsyncApp,
1761 ) -> Result<proto::Ack> {
1762 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1763 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1764 let repo_path = RepoPath::from_str(&envelope.payload.path);
1765
1766 repository_handle
1767 .update(&mut cx, |repository_handle, cx| {
1768 repository_handle.spawn_set_index_text_job(
1769 repo_path,
1770 envelope.payload.text,
1771 None,
1772 cx,
1773 )
1774 })?
1775 .await??;
1776 Ok(proto::Ack {})
1777 }
1778
1779 async fn handle_commit(
1780 this: Entity<Self>,
1781 envelope: TypedEnvelope<proto::Commit>,
1782 mut cx: AsyncApp,
1783 ) -> Result<proto::Ack> {
1784 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1785 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1786
1787 let message = SharedString::from(envelope.payload.message);
1788 let name = envelope.payload.name.map(SharedString::from);
1789 let email = envelope.payload.email.map(SharedString::from);
1790 let options = envelope.payload.options.unwrap_or_default();
1791
1792 repository_handle
1793 .update(&mut cx, |repository_handle, cx| {
1794 repository_handle.commit(
1795 message,
1796 name.zip(email),
1797 CommitOptions {
1798 amend: options.amend,
1799 signoff: options.signoff,
1800 },
1801 cx,
1802 )
1803 })?
1804 .await??;
1805 Ok(proto::Ack {})
1806 }
1807
1808 async fn handle_get_remotes(
1809 this: Entity<Self>,
1810 envelope: TypedEnvelope<proto::GetRemotes>,
1811 mut cx: AsyncApp,
1812 ) -> Result<proto::GetRemotesResponse> {
1813 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1814 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1815
1816 let branch_name = envelope.payload.branch_name;
1817
1818 let remotes = repository_handle
1819 .update(&mut cx, |repository_handle, _| {
1820 repository_handle.get_remotes(branch_name)
1821 })?
1822 .await??;
1823
1824 Ok(proto::GetRemotesResponse {
1825 remotes: remotes
1826 .into_iter()
1827 .map(|remotes| proto::get_remotes_response::Remote {
1828 name: remotes.name.to_string(),
1829 })
1830 .collect::<Vec<_>>(),
1831 })
1832 }
1833
1834 async fn handle_get_branches(
1835 this: Entity<Self>,
1836 envelope: TypedEnvelope<proto::GitGetBranches>,
1837 mut cx: AsyncApp,
1838 ) -> Result<proto::GitBranchesResponse> {
1839 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1840 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1841
1842 let branches = repository_handle
1843 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1844 .await??;
1845
1846 Ok(proto::GitBranchesResponse {
1847 branches: branches
1848 .into_iter()
1849 .map(|branch| branch_to_proto(&branch))
1850 .collect::<Vec<_>>(),
1851 })
1852 }
1853 async fn handle_get_default_branch(
1854 this: Entity<Self>,
1855 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1856 mut cx: AsyncApp,
1857 ) -> Result<proto::GetDefaultBranchResponse> {
1858 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1859 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1860
1861 let branch = repository_handle
1862 .update(&mut cx, |repository_handle, _| {
1863 repository_handle.default_branch()
1864 })?
1865 .await??
1866 .map(Into::into);
1867
1868 Ok(proto::GetDefaultBranchResponse { branch })
1869 }
1870 async fn handle_create_branch(
1871 this: Entity<Self>,
1872 envelope: TypedEnvelope<proto::GitCreateBranch>,
1873 mut cx: AsyncApp,
1874 ) -> Result<proto::Ack> {
1875 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1876 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1877 let branch_name = envelope.payload.branch_name;
1878
1879 repository_handle
1880 .update(&mut cx, |repository_handle, _| {
1881 repository_handle.create_branch(branch_name)
1882 })?
1883 .await??;
1884
1885 Ok(proto::Ack {})
1886 }
1887
1888 async fn handle_change_branch(
1889 this: Entity<Self>,
1890 envelope: TypedEnvelope<proto::GitChangeBranch>,
1891 mut cx: AsyncApp,
1892 ) -> Result<proto::Ack> {
1893 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1894 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1895 let branch_name = envelope.payload.branch_name;
1896
1897 repository_handle
1898 .update(&mut cx, |repository_handle, _| {
1899 repository_handle.change_branch(branch_name)
1900 })?
1901 .await??;
1902
1903 Ok(proto::Ack {})
1904 }
1905
1906 async fn handle_show(
1907 this: Entity<Self>,
1908 envelope: TypedEnvelope<proto::GitShow>,
1909 mut cx: AsyncApp,
1910 ) -> Result<proto::GitCommitDetails> {
1911 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1912 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1913
1914 let commit = repository_handle
1915 .update(&mut cx, |repository_handle, _| {
1916 repository_handle.show(envelope.payload.commit)
1917 })?
1918 .await??;
1919 Ok(proto::GitCommitDetails {
1920 sha: commit.sha.into(),
1921 message: commit.message.into(),
1922 commit_timestamp: commit.commit_timestamp,
1923 author_email: commit.author_email.into(),
1924 author_name: commit.author_name.into(),
1925 })
1926 }
1927
1928 async fn handle_load_commit_diff(
1929 this: Entity<Self>,
1930 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1931 mut cx: AsyncApp,
1932 ) -> Result<proto::LoadCommitDiffResponse> {
1933 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1934 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1935
1936 let commit_diff = repository_handle
1937 .update(&mut cx, |repository_handle, _| {
1938 repository_handle.load_commit_diff(envelope.payload.commit)
1939 })?
1940 .await??;
1941 Ok(proto::LoadCommitDiffResponse {
1942 files: commit_diff
1943 .files
1944 .into_iter()
1945 .map(|file| proto::CommitFile {
1946 path: file.path.to_string(),
1947 old_text: file.old_text,
1948 new_text: file.new_text,
1949 })
1950 .collect(),
1951 })
1952 }
1953
1954 async fn handle_reset(
1955 this: Entity<Self>,
1956 envelope: TypedEnvelope<proto::GitReset>,
1957 mut cx: AsyncApp,
1958 ) -> Result<proto::Ack> {
1959 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1960 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1961
1962 let mode = match envelope.payload.mode() {
1963 git_reset::ResetMode::Soft => ResetMode::Soft,
1964 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1965 };
1966
1967 repository_handle
1968 .update(&mut cx, |repository_handle, cx| {
1969 repository_handle.reset(envelope.payload.commit, mode, cx)
1970 })?
1971 .await??;
1972 Ok(proto::Ack {})
1973 }
1974
1975 async fn handle_checkout_files(
1976 this: Entity<Self>,
1977 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1978 mut cx: AsyncApp,
1979 ) -> Result<proto::Ack> {
1980 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1981 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1982 let paths = envelope
1983 .payload
1984 .paths
1985 .iter()
1986 .map(|s| RepoPath::from_str(s))
1987 .collect();
1988
1989 repository_handle
1990 .update(&mut cx, |repository_handle, cx| {
1991 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1992 })?
1993 .await??;
1994 Ok(proto::Ack {})
1995 }
1996
1997 async fn handle_open_commit_message_buffer(
1998 this: Entity<Self>,
1999 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2000 mut cx: AsyncApp,
2001 ) -> Result<proto::OpenBufferResponse> {
2002 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2003 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2004 let buffer = repository
2005 .update(&mut cx, |repository, cx| {
2006 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2007 })?
2008 .await?;
2009
2010 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2011 this.update(&mut cx, |this, cx| {
2012 this.buffer_store.update(cx, |buffer_store, cx| {
2013 buffer_store
2014 .create_buffer_for_peer(
2015 &buffer,
2016 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2017 cx,
2018 )
2019 .detach_and_log_err(cx);
2020 })
2021 })?;
2022
2023 Ok(proto::OpenBufferResponse {
2024 buffer_id: buffer_id.to_proto(),
2025 })
2026 }
2027
2028 async fn handle_askpass(
2029 this: Entity<Self>,
2030 envelope: TypedEnvelope<proto::AskPassRequest>,
2031 mut cx: AsyncApp,
2032 ) -> Result<proto::AskPassResponse> {
2033 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2034 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2035
2036 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2037 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2038 debug_panic!("no askpass found");
2039 anyhow::bail!("no askpass found");
2040 };
2041
2042 let response = askpass.ask_password(envelope.payload.prompt).await?;
2043
2044 delegates
2045 .lock()
2046 .insert(envelope.payload.askpass_id, askpass);
2047
2048 Ok(proto::AskPassResponse { response })
2049 }
2050
2051 async fn handle_check_for_pushed_commits(
2052 this: Entity<Self>,
2053 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2054 mut cx: AsyncApp,
2055 ) -> Result<proto::CheckForPushedCommitsResponse> {
2056 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2057 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2058
2059 let branches = repository_handle
2060 .update(&mut cx, |repository_handle, _| {
2061 repository_handle.check_for_pushed_commits()
2062 })?
2063 .await??;
2064 Ok(proto::CheckForPushedCommitsResponse {
2065 pushed_to: branches
2066 .into_iter()
2067 .map(|commit| commit.to_string())
2068 .collect(),
2069 })
2070 }
2071
2072 async fn handle_git_diff(
2073 this: Entity<Self>,
2074 envelope: TypedEnvelope<proto::GitDiff>,
2075 mut cx: AsyncApp,
2076 ) -> Result<proto::GitDiffResponse> {
2077 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2078 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2079 let diff_type = match envelope.payload.diff_type() {
2080 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2081 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2082 };
2083
2084 let mut diff = repository_handle
2085 .update(&mut cx, |repository_handle, cx| {
2086 repository_handle.diff(diff_type, cx)
2087 })?
2088 .await??;
2089 const ONE_MB: usize = 1_000_000;
2090 if diff.len() > ONE_MB {
2091 diff = diff.chars().take(ONE_MB).collect()
2092 }
2093
2094 Ok(proto::GitDiffResponse { diff })
2095 }
2096
2097 async fn handle_open_unstaged_diff(
2098 this: Entity<Self>,
2099 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2100 mut cx: AsyncApp,
2101 ) -> Result<proto::OpenUnstagedDiffResponse> {
2102 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2103 let diff = this
2104 .update(&mut cx, |this, cx| {
2105 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2106 Some(this.open_unstaged_diff(buffer, cx))
2107 })?
2108 .context("missing buffer")?
2109 .await?;
2110 this.update(&mut cx, |this, _| {
2111 let shared_diffs = this
2112 .shared_diffs
2113 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2114 .or_default();
2115 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2116 })?;
2117 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2118 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2119 }
2120
2121 async fn handle_open_uncommitted_diff(
2122 this: Entity<Self>,
2123 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2124 mut cx: AsyncApp,
2125 ) -> Result<proto::OpenUncommittedDiffResponse> {
2126 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2127 let diff = this
2128 .update(&mut cx, |this, cx| {
2129 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2130 Some(this.open_uncommitted_diff(buffer, cx))
2131 })?
2132 .context("missing buffer")?
2133 .await?;
2134 this.update(&mut cx, |this, _| {
2135 let shared_diffs = this
2136 .shared_diffs
2137 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2138 .or_default();
2139 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2140 })?;
2141 diff.read_with(&cx, |diff, cx| {
2142 use proto::open_uncommitted_diff_response::Mode;
2143
2144 let unstaged_diff = diff.secondary_diff();
2145 let index_snapshot = unstaged_diff.and_then(|diff| {
2146 let diff = diff.read(cx);
2147 diff.base_text_exists().then(|| diff.base_text())
2148 });
2149
2150 let mode;
2151 let staged_text;
2152 let committed_text;
2153 if diff.base_text_exists() {
2154 let committed_snapshot = diff.base_text();
2155 committed_text = Some(committed_snapshot.text());
2156 if let Some(index_text) = index_snapshot {
2157 if index_text.remote_id() == committed_snapshot.remote_id() {
2158 mode = Mode::IndexMatchesHead;
2159 staged_text = None;
2160 } else {
2161 mode = Mode::IndexAndHead;
2162 staged_text = Some(index_text.text());
2163 }
2164 } else {
2165 mode = Mode::IndexAndHead;
2166 staged_text = None;
2167 }
2168 } else {
2169 mode = Mode::IndexAndHead;
2170 committed_text = None;
2171 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2172 }
2173
2174 proto::OpenUncommittedDiffResponse {
2175 committed_text,
2176 staged_text,
2177 mode: mode.into(),
2178 }
2179 })
2180 }
2181
2182 async fn handle_update_diff_bases(
2183 this: Entity<Self>,
2184 request: TypedEnvelope<proto::UpdateDiffBases>,
2185 mut cx: AsyncApp,
2186 ) -> Result<()> {
2187 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2188 this.update(&mut cx, |this, cx| {
2189 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2190 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2191 {
2192 let buffer = buffer.read(cx).text_snapshot();
2193 diff_state.update(cx, |diff_state, cx| {
2194 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2195 })
2196 }
2197 })
2198 }
2199
2200 async fn handle_blame_buffer(
2201 this: Entity<Self>,
2202 envelope: TypedEnvelope<proto::BlameBuffer>,
2203 mut cx: AsyncApp,
2204 ) -> Result<proto::BlameBufferResponse> {
2205 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2206 let version = deserialize_version(&envelope.payload.version);
2207 let buffer = this.read_with(&cx, |this, cx| {
2208 this.buffer_store.read(cx).get_existing(buffer_id)
2209 })??;
2210 buffer
2211 .update(&mut cx, |buffer, _| {
2212 buffer.wait_for_version(version.clone())
2213 })?
2214 .await?;
2215 let blame = this
2216 .update(&mut cx, |this, cx| {
2217 this.blame_buffer(&buffer, Some(version), cx)
2218 })?
2219 .await?;
2220 Ok(serialize_blame_buffer_response(blame))
2221 }
2222
2223 async fn handle_get_permalink_to_line(
2224 this: Entity<Self>,
2225 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2226 mut cx: AsyncApp,
2227 ) -> Result<proto::GetPermalinkToLineResponse> {
2228 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2229 // let version = deserialize_version(&envelope.payload.version);
2230 let selection = {
2231 let proto_selection = envelope
2232 .payload
2233 .selection
2234 .context("no selection to get permalink for defined")?;
2235 proto_selection.start as u32..proto_selection.end as u32
2236 };
2237 let buffer = this.read_with(&cx, |this, cx| {
2238 this.buffer_store.read(cx).get_existing(buffer_id)
2239 })??;
2240 let permalink = this
2241 .update(&mut cx, |this, cx| {
2242 this.get_permalink_to_line(&buffer, selection, cx)
2243 })?
2244 .await?;
2245 Ok(proto::GetPermalinkToLineResponse {
2246 permalink: permalink.to_string(),
2247 })
2248 }
2249
2250 fn repository_for_request(
2251 this: &Entity<Self>,
2252 id: RepositoryId,
2253 cx: &mut AsyncApp,
2254 ) -> Result<Entity<Repository>> {
2255 this.read_with(cx, |this, _| {
2256 this.repositories
2257 .get(&id)
2258 .context("missing repository handle")
2259 .cloned()
2260 })?
2261 }
2262
2263 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2264 self.repositories
2265 .iter()
2266 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2267 .collect()
2268 }
2269
2270 fn process_updated_entries(
2271 &self,
2272 worktree: &Entity<Worktree>,
2273 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2274 cx: &mut App,
2275 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2276 let mut repo_paths = self
2277 .repositories
2278 .values()
2279 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2280 .collect::<Vec<_>>();
2281 let mut entries: Vec<_> = updated_entries
2282 .iter()
2283 .map(|(path, _, _)| path.clone())
2284 .collect();
2285 entries.sort();
2286 let worktree = worktree.read(cx);
2287
2288 let entries = entries
2289 .into_iter()
2290 .filter_map(|path| worktree.absolutize(&path).ok())
2291 .collect::<Arc<[_]>>();
2292
2293 let executor = cx.background_executor().clone();
2294 cx.background_executor().spawn(async move {
2295 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2296 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2297 let mut tasks = FuturesOrdered::new();
2298 for (repo_path, repo) in repo_paths.into_iter().rev() {
2299 let entries = entries.clone();
2300 let task = executor.spawn(async move {
2301 // Find all repository paths that belong to this repo
2302 let mut ix = entries.partition_point(|path| path < &*repo_path);
2303 if ix == entries.len() {
2304 return None;
2305 };
2306
2307 let mut paths = Vec::new();
2308 // All paths prefixed by a given repo will constitute a continuous range.
2309 while let Some(path) = entries.get(ix)
2310 && let Some(repo_path) =
2311 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, path)
2312 {
2313 paths.push((repo_path, ix));
2314 ix += 1;
2315 }
2316 if paths.is_empty() {
2317 None
2318 } else {
2319 Some((repo, paths))
2320 }
2321 });
2322 tasks.push_back(task);
2323 }
2324
2325 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2326 let mut path_was_used = vec![false; entries.len()];
2327 let tasks = tasks.collect::<Vec<_>>().await;
2328 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2329 // We always want to assign a path to it's innermost repository.
2330 for t in tasks {
2331 let Some((repo, paths)) = t else {
2332 continue;
2333 };
2334 let entry = paths_by_git_repo.entry(repo).or_default();
2335 for (repo_path, ix) in paths {
2336 if path_was_used[ix] {
2337 continue;
2338 }
2339 path_was_used[ix] = true;
2340 entry.push(repo_path);
2341 }
2342 }
2343
2344 paths_by_git_repo
2345 })
2346 }
2347}
2348
2349impl BufferGitState {
2350 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2351 Self {
2352 unstaged_diff: Default::default(),
2353 uncommitted_diff: Default::default(),
2354 recalculate_diff_task: Default::default(),
2355 language: Default::default(),
2356 language_registry: Default::default(),
2357 recalculating_tx: postage::watch::channel_with(false).0,
2358 hunk_staging_operation_count: 0,
2359 hunk_staging_operation_count_as_of_write: 0,
2360 head_text: Default::default(),
2361 index_text: Default::default(),
2362 head_changed: Default::default(),
2363 index_changed: Default::default(),
2364 language_changed: Default::default(),
2365 conflict_updated_futures: Default::default(),
2366 conflict_set: Default::default(),
2367 reparse_conflict_markers_task: Default::default(),
2368 }
2369 }
2370
2371 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2372 self.language = buffer.read(cx).language().cloned();
2373 self.language_changed = true;
2374 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2375 }
2376
2377 fn reparse_conflict_markers(
2378 &mut self,
2379 buffer: text::BufferSnapshot,
2380 cx: &mut Context<Self>,
2381 ) -> oneshot::Receiver<()> {
2382 let (tx, rx) = oneshot::channel();
2383
2384 let Some(conflict_set) = self
2385 .conflict_set
2386 .as_ref()
2387 .and_then(|conflict_set| conflict_set.upgrade())
2388 else {
2389 return rx;
2390 };
2391
2392 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2393 if conflict_set.has_conflict {
2394 Some(conflict_set.snapshot())
2395 } else {
2396 None
2397 }
2398 });
2399
2400 if let Some(old_snapshot) = old_snapshot {
2401 self.conflict_updated_futures.push(tx);
2402 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2403 let (snapshot, changed_range) = cx
2404 .background_spawn(async move {
2405 let new_snapshot = ConflictSet::parse(&buffer);
2406 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2407 (new_snapshot, changed_range)
2408 })
2409 .await;
2410 this.update(cx, |this, cx| {
2411 if let Some(conflict_set) = &this.conflict_set {
2412 conflict_set
2413 .update(cx, |conflict_set, cx| {
2414 conflict_set.set_snapshot(snapshot, changed_range, cx);
2415 })
2416 .ok();
2417 }
2418 let futures = std::mem::take(&mut this.conflict_updated_futures);
2419 for tx in futures {
2420 tx.send(()).ok();
2421 }
2422 })
2423 }))
2424 }
2425
2426 rx
2427 }
2428
2429 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2430 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2431 }
2432
2433 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2434 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2435 }
2436
2437 fn handle_base_texts_updated(
2438 &mut self,
2439 buffer: text::BufferSnapshot,
2440 message: proto::UpdateDiffBases,
2441 cx: &mut Context<Self>,
2442 ) {
2443 use proto::update_diff_bases::Mode;
2444
2445 let Some(mode) = Mode::from_i32(message.mode) else {
2446 return;
2447 };
2448
2449 let diff_bases_change = match mode {
2450 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2451 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2452 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2453 Mode::IndexAndHead => DiffBasesChange::SetEach {
2454 index: message.staged_text,
2455 head: message.committed_text,
2456 },
2457 };
2458
2459 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2460 }
2461
2462 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2463 if *self.recalculating_tx.borrow() {
2464 let mut rx = self.recalculating_tx.subscribe();
2465 Some(async move {
2466 loop {
2467 let is_recalculating = rx.recv().await;
2468 if is_recalculating != Some(true) {
2469 break;
2470 }
2471 }
2472 })
2473 } else {
2474 None
2475 }
2476 }
2477
2478 fn diff_bases_changed(
2479 &mut self,
2480 buffer: text::BufferSnapshot,
2481 diff_bases_change: Option<DiffBasesChange>,
2482 cx: &mut Context<Self>,
2483 ) {
2484 match diff_bases_change {
2485 Some(DiffBasesChange::SetIndex(index)) => {
2486 self.index_text = index.map(|mut index| {
2487 text::LineEnding::normalize(&mut index);
2488 Arc::new(index)
2489 });
2490 self.index_changed = true;
2491 }
2492 Some(DiffBasesChange::SetHead(head)) => {
2493 self.head_text = head.map(|mut head| {
2494 text::LineEnding::normalize(&mut head);
2495 Arc::new(head)
2496 });
2497 self.head_changed = true;
2498 }
2499 Some(DiffBasesChange::SetBoth(text)) => {
2500 let text = text.map(|mut text| {
2501 text::LineEnding::normalize(&mut text);
2502 Arc::new(text)
2503 });
2504 self.head_text = text.clone();
2505 self.index_text = text;
2506 self.head_changed = true;
2507 self.index_changed = true;
2508 }
2509 Some(DiffBasesChange::SetEach { index, head }) => {
2510 self.index_text = index.map(|mut index| {
2511 text::LineEnding::normalize(&mut index);
2512 Arc::new(index)
2513 });
2514 self.index_changed = true;
2515 self.head_text = head.map(|mut head| {
2516 text::LineEnding::normalize(&mut head);
2517 Arc::new(head)
2518 });
2519 self.head_changed = true;
2520 }
2521 None => {}
2522 }
2523
2524 self.recalculate_diffs(buffer, cx)
2525 }
2526
2527 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2528 *self.recalculating_tx.borrow_mut() = true;
2529
2530 let language = self.language.clone();
2531 let language_registry = self.language_registry.clone();
2532 let unstaged_diff = self.unstaged_diff();
2533 let uncommitted_diff = self.uncommitted_diff();
2534 let head = self.head_text.clone();
2535 let index = self.index_text.clone();
2536 let index_changed = self.index_changed;
2537 let head_changed = self.head_changed;
2538 let language_changed = self.language_changed;
2539 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2540 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2541 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2542 (None, None) => true,
2543 _ => false,
2544 };
2545 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2546 log::debug!(
2547 "start recalculating diffs for buffer {}",
2548 buffer.remote_id()
2549 );
2550
2551 let mut new_unstaged_diff = None;
2552 if let Some(unstaged_diff) = &unstaged_diff {
2553 new_unstaged_diff = Some(
2554 BufferDiff::update_diff(
2555 unstaged_diff.clone(),
2556 buffer.clone(),
2557 index,
2558 index_changed,
2559 language_changed,
2560 language.clone(),
2561 language_registry.clone(),
2562 cx,
2563 )
2564 .await?,
2565 );
2566 }
2567
2568 let mut new_uncommitted_diff = None;
2569 if let Some(uncommitted_diff) = &uncommitted_diff {
2570 new_uncommitted_diff = if index_matches_head {
2571 new_unstaged_diff.clone()
2572 } else {
2573 Some(
2574 BufferDiff::update_diff(
2575 uncommitted_diff.clone(),
2576 buffer.clone(),
2577 head,
2578 head_changed,
2579 language_changed,
2580 language.clone(),
2581 language_registry.clone(),
2582 cx,
2583 )
2584 .await?,
2585 )
2586 }
2587 }
2588
2589 let cancel = this.update(cx, |this, _| {
2590 // This checks whether all pending stage/unstage operations
2591 // have quiesced (i.e. both the corresponding write and the
2592 // read of that write have completed). If not, then we cancel
2593 // this recalculation attempt to avoid invalidating pending
2594 // state too quickly; another recalculation will come along
2595 // later and clear the pending state once the state of the index has settled.
2596 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2597 *this.recalculating_tx.borrow_mut() = false;
2598 true
2599 } else {
2600 false
2601 }
2602 })?;
2603 if cancel {
2604 log::debug!(
2605 concat!(
2606 "aborting recalculating diffs for buffer {}",
2607 "due to subsequent hunk operations",
2608 ),
2609 buffer.remote_id()
2610 );
2611 return Ok(());
2612 }
2613
2614 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2615 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2616 {
2617 unstaged_diff.update(cx, |diff, cx| {
2618 if language_changed {
2619 diff.language_changed(cx);
2620 }
2621 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2622 })?
2623 } else {
2624 None
2625 };
2626
2627 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2628 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2629 {
2630 uncommitted_diff.update(cx, |diff, cx| {
2631 if language_changed {
2632 diff.language_changed(cx);
2633 }
2634 diff.set_snapshot_with_secondary(
2635 new_uncommitted_diff,
2636 &buffer,
2637 unstaged_changed_range,
2638 true,
2639 cx,
2640 );
2641 })?;
2642 }
2643
2644 log::debug!(
2645 "finished recalculating diffs for buffer {}",
2646 buffer.remote_id()
2647 );
2648
2649 if let Some(this) = this.upgrade() {
2650 this.update(cx, |this, _| {
2651 this.index_changed = false;
2652 this.head_changed = false;
2653 this.language_changed = false;
2654 *this.recalculating_tx.borrow_mut() = false;
2655 })?;
2656 }
2657
2658 Ok(())
2659 }));
2660 }
2661}
2662
2663fn make_remote_delegate(
2664 this: Entity<GitStore>,
2665 project_id: u64,
2666 repository_id: RepositoryId,
2667 askpass_id: u64,
2668 cx: &mut AsyncApp,
2669) -> AskPassDelegate {
2670 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2671 this.update(cx, |this, cx| {
2672 let Some((client, _)) = this.downstream_client() else {
2673 return;
2674 };
2675 let response = client.request(proto::AskPassRequest {
2676 project_id,
2677 repository_id: repository_id.to_proto(),
2678 askpass_id,
2679 prompt,
2680 });
2681 cx.spawn(async move |_, _| {
2682 tx.send(response.await?.response).ok();
2683 anyhow::Ok(())
2684 })
2685 .detach_and_log_err(cx);
2686 })
2687 .log_err();
2688 })
2689}
2690
2691impl RepositoryId {
2692 pub fn to_proto(self) -> u64 {
2693 self.0
2694 }
2695
2696 pub fn from_proto(id: u64) -> Self {
2697 RepositoryId(id)
2698 }
2699}
2700
2701impl RepositorySnapshot {
2702 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2703 Self {
2704 id,
2705 statuses_by_path: Default::default(),
2706 work_directory_abs_path,
2707 branch: None,
2708 head_commit: None,
2709 scan_id: 0,
2710 merge: Default::default(),
2711 remote_origin_url: None,
2712 remote_upstream_url: None,
2713 }
2714 }
2715
2716 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2717 proto::UpdateRepository {
2718 branch_summary: self.branch.as_ref().map(branch_to_proto),
2719 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2720 updated_statuses: self
2721 .statuses_by_path
2722 .iter()
2723 .map(|entry| entry.to_proto())
2724 .collect(),
2725 removed_statuses: Default::default(),
2726 current_merge_conflicts: self
2727 .merge
2728 .conflicted_paths
2729 .iter()
2730 .map(|repo_path| repo_path.to_proto())
2731 .collect(),
2732 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2733 project_id,
2734 id: self.id.to_proto(),
2735 abs_path: self.work_directory_abs_path.to_proto(),
2736 entry_ids: vec![self.id.to_proto()],
2737 scan_id: self.scan_id,
2738 is_last_update: true,
2739 }
2740 }
2741
2742 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2743 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2744 let mut removed_statuses: Vec<String> = Vec::new();
2745
2746 let mut new_statuses = self.statuses_by_path.iter().peekable();
2747 let mut old_statuses = old.statuses_by_path.iter().peekable();
2748
2749 let mut current_new_entry = new_statuses.next();
2750 let mut current_old_entry = old_statuses.next();
2751 loop {
2752 match (current_new_entry, current_old_entry) {
2753 (Some(new_entry), Some(old_entry)) => {
2754 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2755 Ordering::Less => {
2756 updated_statuses.push(new_entry.to_proto());
2757 current_new_entry = new_statuses.next();
2758 }
2759 Ordering::Equal => {
2760 if new_entry.status != old_entry.status {
2761 updated_statuses.push(new_entry.to_proto());
2762 }
2763 current_old_entry = old_statuses.next();
2764 current_new_entry = new_statuses.next();
2765 }
2766 Ordering::Greater => {
2767 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2768 current_old_entry = old_statuses.next();
2769 }
2770 }
2771 }
2772 (None, Some(old_entry)) => {
2773 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2774 current_old_entry = old_statuses.next();
2775 }
2776 (Some(new_entry), None) => {
2777 updated_statuses.push(new_entry.to_proto());
2778 current_new_entry = new_statuses.next();
2779 }
2780 (None, None) => break,
2781 }
2782 }
2783
2784 proto::UpdateRepository {
2785 branch_summary: self.branch.as_ref().map(branch_to_proto),
2786 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2787 updated_statuses,
2788 removed_statuses,
2789 current_merge_conflicts: self
2790 .merge
2791 .conflicted_paths
2792 .iter()
2793 .map(|path| path.as_ref().to_proto())
2794 .collect(),
2795 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2796 project_id,
2797 id: self.id.to_proto(),
2798 abs_path: self.work_directory_abs_path.to_proto(),
2799 entry_ids: vec![],
2800 scan_id: self.scan_id,
2801 is_last_update: true,
2802 }
2803 }
2804
2805 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2806 self.statuses_by_path.iter().cloned()
2807 }
2808
2809 pub fn status_summary(&self) -> GitSummary {
2810 self.statuses_by_path.summary().item_summary
2811 }
2812
2813 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2814 self.statuses_by_path
2815 .get(&PathKey(path.0.clone()), &())
2816 .cloned()
2817 }
2818
2819 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2820 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2821 }
2822
2823 #[inline]
2824 fn abs_path_to_repo_path_inner(
2825 work_directory_abs_path: &Path,
2826 abs_path: &Path,
2827 ) -> Option<RepoPath> {
2828 abs_path
2829 .strip_prefix(&work_directory_abs_path)
2830 .map(RepoPath::from)
2831 .ok()
2832 }
2833
2834 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2835 self.merge.conflicted_paths.contains(repo_path)
2836 }
2837
2838 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2839 let had_conflict_on_last_merge_head_change =
2840 self.merge.conflicted_paths.contains(repo_path);
2841 let has_conflict_currently = self
2842 .status_for_path(repo_path)
2843 .is_some_and(|entry| entry.status.is_conflicted());
2844 had_conflict_on_last_merge_head_change || has_conflict_currently
2845 }
2846
2847 /// This is the name that will be displayed in the repository selector for this repository.
2848 pub fn display_name(&self) -> SharedString {
2849 self.work_directory_abs_path
2850 .file_name()
2851 .unwrap_or_default()
2852 .to_string_lossy()
2853 .to_string()
2854 .into()
2855 }
2856}
2857
2858impl MergeDetails {
2859 async fn load(
2860 backend: &Arc<dyn GitRepository>,
2861 status: &SumTree<StatusEntry>,
2862 prev_snapshot: &RepositorySnapshot,
2863 ) -> Result<(MergeDetails, bool)> {
2864 log::debug!("load merge details");
2865 let message = backend.merge_message().await;
2866 let heads = backend
2867 .revparse_batch(vec![
2868 "MERGE_HEAD".into(),
2869 "CHERRY_PICK_HEAD".into(),
2870 "REBASE_HEAD".into(),
2871 "REVERT_HEAD".into(),
2872 "APPLY_HEAD".into(),
2873 ])
2874 .await
2875 .log_err()
2876 .unwrap_or_default()
2877 .into_iter()
2878 .map(|opt| opt.map(SharedString::from))
2879 .collect::<Vec<_>>();
2880 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2881 let conflicted_paths = if merge_heads_changed {
2882 let current_conflicted_paths = TreeSet::from_ordered_entries(
2883 status
2884 .iter()
2885 .filter(|entry| entry.status.is_conflicted())
2886 .map(|entry| entry.repo_path.clone()),
2887 );
2888
2889 // It can happen that we run a scan while a lengthy merge is in progress
2890 // that will eventually result in conflicts, but before those conflicts
2891 // are reported by `git status`. Since for the moment we only care about
2892 // the merge heads state for the purposes of tracking conflicts, don't update
2893 // this state until we see some conflicts.
2894 if heads.iter().any(Option::is_some)
2895 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2896 && current_conflicted_paths.is_empty()
2897 {
2898 log::debug!("not updating merge heads because no conflicts found");
2899 return Ok((
2900 MergeDetails {
2901 message: message.map(SharedString::from),
2902 ..prev_snapshot.merge.clone()
2903 },
2904 false,
2905 ));
2906 }
2907
2908 current_conflicted_paths
2909 } else {
2910 prev_snapshot.merge.conflicted_paths.clone()
2911 };
2912 let details = MergeDetails {
2913 conflicted_paths,
2914 message: message.map(SharedString::from),
2915 heads,
2916 };
2917 Ok((details, merge_heads_changed))
2918 }
2919}
2920
2921impl Repository {
2922 pub fn snapshot(&self) -> RepositorySnapshot {
2923 self.snapshot.clone()
2924 }
2925
2926 fn local(
2927 id: RepositoryId,
2928 work_directory_abs_path: Arc<Path>,
2929 dot_git_abs_path: Arc<Path>,
2930 repository_dir_abs_path: Arc<Path>,
2931 common_dir_abs_path: Arc<Path>,
2932 project_environment: WeakEntity<ProjectEnvironment>,
2933 fs: Arc<dyn Fs>,
2934 git_store: WeakEntity<GitStore>,
2935 cx: &mut Context<Self>,
2936 ) -> Self {
2937 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2938 Repository {
2939 this: cx.weak_entity(),
2940 git_store,
2941 snapshot,
2942 commit_message_buffer: None,
2943 askpass_delegates: Default::default(),
2944 paths_needing_status_update: Default::default(),
2945 latest_askpass_id: 0,
2946 job_sender: Repository::spawn_local_git_worker(
2947 work_directory_abs_path,
2948 dot_git_abs_path,
2949 repository_dir_abs_path,
2950 common_dir_abs_path,
2951 project_environment,
2952 fs,
2953 cx,
2954 ),
2955 job_id: 0,
2956 active_jobs: Default::default(),
2957 }
2958 }
2959
2960 fn remote(
2961 id: RepositoryId,
2962 work_directory_abs_path: Arc<Path>,
2963 project_id: ProjectId,
2964 client: AnyProtoClient,
2965 git_store: WeakEntity<GitStore>,
2966 cx: &mut Context<Self>,
2967 ) -> Self {
2968 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2969 Self {
2970 this: cx.weak_entity(),
2971 snapshot,
2972 commit_message_buffer: None,
2973 git_store,
2974 paths_needing_status_update: Default::default(),
2975 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2976 askpass_delegates: Default::default(),
2977 latest_askpass_id: 0,
2978 active_jobs: Default::default(),
2979 job_id: 0,
2980 }
2981 }
2982
2983 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2984 self.git_store.upgrade()
2985 }
2986
2987 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
2988 let this = cx.weak_entity();
2989 let git_store = self.git_store.clone();
2990 let _ = self.send_keyed_job(
2991 Some(GitJobKey::ReloadBufferDiffBases),
2992 None,
2993 |state, mut cx| async move {
2994 let RepositoryState::Local { backend, .. } = state else {
2995 log::error!("tried to recompute diffs for a non-local repository");
2996 return Ok(());
2997 };
2998
2999 let Some(this) = this.upgrade() else {
3000 return Ok(());
3001 };
3002
3003 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3004 git_store.update(cx, |git_store, cx| {
3005 git_store
3006 .diffs
3007 .iter()
3008 .filter_map(|(buffer_id, diff_state)| {
3009 let buffer_store = git_store.buffer_store.read(cx);
3010 let buffer = buffer_store.get(*buffer_id)?;
3011 let file = File::from_dyn(buffer.read(cx).file())?;
3012 let abs_path =
3013 file.worktree.read(cx).absolutize(&file.path).ok()?;
3014 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3015 log::debug!(
3016 "start reload diff bases for repo path {}",
3017 repo_path.0.display()
3018 );
3019 diff_state.update(cx, |diff_state, _| {
3020 let has_unstaged_diff = diff_state
3021 .unstaged_diff
3022 .as_ref()
3023 .is_some_and(|diff| diff.is_upgradable());
3024 let has_uncommitted_diff = diff_state
3025 .uncommitted_diff
3026 .as_ref()
3027 .is_some_and(|set| set.is_upgradable());
3028
3029 Some((
3030 buffer,
3031 repo_path,
3032 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3033 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3034 ))
3035 })
3036 })
3037 .collect::<Vec<_>>()
3038 })
3039 })??;
3040
3041 let buffer_diff_base_changes = cx
3042 .background_spawn(async move {
3043 let mut changes = Vec::new();
3044 for (buffer, repo_path, current_index_text, current_head_text) in
3045 &repo_diff_state_updates
3046 {
3047 let index_text = if current_index_text.is_some() {
3048 backend.load_index_text(repo_path.clone()).await
3049 } else {
3050 None
3051 };
3052 let head_text = if current_head_text.is_some() {
3053 backend.load_committed_text(repo_path.clone()).await
3054 } else {
3055 None
3056 };
3057
3058 let change =
3059 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3060 (Some(current_index), Some(current_head)) => {
3061 let index_changed =
3062 index_text.as_ref() != current_index.as_deref();
3063 let head_changed =
3064 head_text.as_ref() != current_head.as_deref();
3065 if index_changed && head_changed {
3066 if index_text == head_text {
3067 Some(DiffBasesChange::SetBoth(head_text))
3068 } else {
3069 Some(DiffBasesChange::SetEach {
3070 index: index_text,
3071 head: head_text,
3072 })
3073 }
3074 } else if index_changed {
3075 Some(DiffBasesChange::SetIndex(index_text))
3076 } else if head_changed {
3077 Some(DiffBasesChange::SetHead(head_text))
3078 } else {
3079 None
3080 }
3081 }
3082 (Some(current_index), None) => {
3083 let index_changed =
3084 index_text.as_ref() != current_index.as_deref();
3085 index_changed
3086 .then_some(DiffBasesChange::SetIndex(index_text))
3087 }
3088 (None, Some(current_head)) => {
3089 let head_changed =
3090 head_text.as_ref() != current_head.as_deref();
3091 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3092 }
3093 (None, None) => None,
3094 };
3095
3096 changes.push((buffer.clone(), change))
3097 }
3098 changes
3099 })
3100 .await;
3101
3102 git_store.update(&mut cx, |git_store, cx| {
3103 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3104 let buffer_snapshot = buffer.read(cx).text_snapshot();
3105 let buffer_id = buffer_snapshot.remote_id();
3106 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3107 continue;
3108 };
3109
3110 let downstream_client = git_store.downstream_client();
3111 diff_state.update(cx, |diff_state, cx| {
3112 use proto::update_diff_bases::Mode;
3113
3114 if let Some((diff_bases_change, (client, project_id))) =
3115 diff_bases_change.clone().zip(downstream_client)
3116 {
3117 let (staged_text, committed_text, mode) = match diff_bases_change {
3118 DiffBasesChange::SetIndex(index) => {
3119 (index, None, Mode::IndexOnly)
3120 }
3121 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3122 DiffBasesChange::SetEach { index, head } => {
3123 (index, head, Mode::IndexAndHead)
3124 }
3125 DiffBasesChange::SetBoth(text) => {
3126 (None, text, Mode::IndexMatchesHead)
3127 }
3128 };
3129 client
3130 .send(proto::UpdateDiffBases {
3131 project_id: project_id.to_proto(),
3132 buffer_id: buffer_id.to_proto(),
3133 staged_text,
3134 committed_text,
3135 mode: mode as i32,
3136 })
3137 .log_err();
3138 }
3139
3140 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3141 });
3142 }
3143 })
3144 },
3145 );
3146 }
3147
3148 pub fn send_job<F, Fut, R>(
3149 &mut self,
3150 status: Option<SharedString>,
3151 job: F,
3152 ) -> oneshot::Receiver<R>
3153 where
3154 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3155 Fut: Future<Output = R> + 'static,
3156 R: Send + 'static,
3157 {
3158 self.send_keyed_job(None, status, job)
3159 }
3160
3161 fn send_keyed_job<F, Fut, R>(
3162 &mut self,
3163 key: Option<GitJobKey>,
3164 status: Option<SharedString>,
3165 job: F,
3166 ) -> oneshot::Receiver<R>
3167 where
3168 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3169 Fut: Future<Output = R> + 'static,
3170 R: Send + 'static,
3171 {
3172 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3173 let job_id = post_inc(&mut self.job_id);
3174 let this = self.this.clone();
3175 self.job_sender
3176 .unbounded_send(GitJob {
3177 key,
3178 job: Box::new(move |state, cx: &mut AsyncApp| {
3179 let job = job(state, cx.clone());
3180 cx.spawn(async move |cx| {
3181 if let Some(s) = status.clone() {
3182 this.update(cx, |this, cx| {
3183 this.active_jobs.insert(
3184 job_id,
3185 JobInfo {
3186 start: Instant::now(),
3187 message: s.clone(),
3188 },
3189 );
3190
3191 cx.notify();
3192 })
3193 .ok();
3194 }
3195 let result = job.await;
3196
3197 this.update(cx, |this, cx| {
3198 this.active_jobs.remove(&job_id);
3199 cx.notify();
3200 })
3201 .ok();
3202
3203 result_tx.send(result).ok();
3204 })
3205 }),
3206 })
3207 .ok();
3208 result_rx
3209 }
3210
3211 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3212 let Some(git_store) = self.git_store.upgrade() else {
3213 return;
3214 };
3215 let entity = cx.entity();
3216 git_store.update(cx, |git_store, cx| {
3217 let Some((&id, _)) = git_store
3218 .repositories
3219 .iter()
3220 .find(|(_, handle)| *handle == &entity)
3221 else {
3222 return;
3223 };
3224 git_store.active_repo_id = Some(id);
3225 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3226 });
3227 }
3228
3229 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3230 self.snapshot.status()
3231 }
3232
3233 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3234 let git_store = self.git_store.upgrade()?;
3235 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3236 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3237 let abs_path = SanitizedPath::new(&abs_path);
3238 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3239 Some(ProjectPath {
3240 worktree_id: worktree.read(cx).id(),
3241 path: relative_path.into(),
3242 })
3243 }
3244
3245 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3246 let git_store = self.git_store.upgrade()?;
3247 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3248 let abs_path = worktree_store.absolutize(path, cx)?;
3249 self.snapshot.abs_path_to_repo_path(&abs_path)
3250 }
3251
3252 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3253 other
3254 .read(cx)
3255 .snapshot
3256 .work_directory_abs_path
3257 .starts_with(&self.snapshot.work_directory_abs_path)
3258 }
3259
3260 pub fn open_commit_buffer(
3261 &mut self,
3262 languages: Option<Arc<LanguageRegistry>>,
3263 buffer_store: Entity<BufferStore>,
3264 cx: &mut Context<Self>,
3265 ) -> Task<Result<Entity<Buffer>>> {
3266 let id = self.id;
3267 if let Some(buffer) = self.commit_message_buffer.clone() {
3268 return Task::ready(Ok(buffer));
3269 }
3270 let this = cx.weak_entity();
3271
3272 let rx = self.send_job(None, move |state, mut cx| async move {
3273 let Some(this) = this.upgrade() else {
3274 bail!("git store was dropped");
3275 };
3276 match state {
3277 RepositoryState::Local { .. } => {
3278 this.update(&mut cx, |_, cx| {
3279 Self::open_local_commit_buffer(languages, buffer_store, cx)
3280 })?
3281 .await
3282 }
3283 RepositoryState::Remote { project_id, client } => {
3284 let request = client.request(proto::OpenCommitMessageBuffer {
3285 project_id: project_id.0,
3286 repository_id: id.to_proto(),
3287 });
3288 let response = request.await.context("requesting to open commit buffer")?;
3289 let buffer_id = BufferId::new(response.buffer_id)?;
3290 let buffer = buffer_store
3291 .update(&mut cx, |buffer_store, cx| {
3292 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3293 })?
3294 .await?;
3295 if let Some(language_registry) = languages {
3296 let git_commit_language =
3297 language_registry.language_for_name("Git Commit").await?;
3298 buffer.update(&mut cx, |buffer, cx| {
3299 buffer.set_language(Some(git_commit_language), cx);
3300 })?;
3301 }
3302 this.update(&mut cx, |this, _| {
3303 this.commit_message_buffer = Some(buffer.clone());
3304 })?;
3305 Ok(buffer)
3306 }
3307 }
3308 });
3309
3310 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3311 }
3312
3313 fn open_local_commit_buffer(
3314 language_registry: Option<Arc<LanguageRegistry>>,
3315 buffer_store: Entity<BufferStore>,
3316 cx: &mut Context<Self>,
3317 ) -> Task<Result<Entity<Buffer>>> {
3318 cx.spawn(async move |repository, cx| {
3319 let buffer = buffer_store
3320 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3321 .await?;
3322
3323 if let Some(language_registry) = language_registry {
3324 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3325 buffer.update(cx, |buffer, cx| {
3326 buffer.set_language(Some(git_commit_language), cx);
3327 })?;
3328 }
3329
3330 repository.update(cx, |repository, _| {
3331 repository.commit_message_buffer = Some(buffer.clone());
3332 })?;
3333 Ok(buffer)
3334 })
3335 }
3336
3337 pub fn checkout_files(
3338 &mut self,
3339 commit: &str,
3340 paths: Vec<RepoPath>,
3341 _cx: &mut App,
3342 ) -> oneshot::Receiver<Result<()>> {
3343 let commit = commit.to_string();
3344 let id = self.id;
3345
3346 self.send_job(
3347 Some(format!("git checkout {}", commit).into()),
3348 move |git_repo, _| async move {
3349 match git_repo {
3350 RepositoryState::Local {
3351 backend,
3352 environment,
3353 ..
3354 } => {
3355 backend
3356 .checkout_files(commit, paths, environment.clone())
3357 .await
3358 }
3359 RepositoryState::Remote { project_id, client } => {
3360 client
3361 .request(proto::GitCheckoutFiles {
3362 project_id: project_id.0,
3363 repository_id: id.to_proto(),
3364 commit,
3365 paths: paths
3366 .into_iter()
3367 .map(|p| p.to_string_lossy().to_string())
3368 .collect(),
3369 })
3370 .await?;
3371
3372 Ok(())
3373 }
3374 }
3375 },
3376 )
3377 }
3378
3379 pub fn reset(
3380 &mut self,
3381 commit: String,
3382 reset_mode: ResetMode,
3383 _cx: &mut App,
3384 ) -> oneshot::Receiver<Result<()>> {
3385 let id = self.id;
3386
3387 self.send_job(None, move |git_repo, _| async move {
3388 match git_repo {
3389 RepositoryState::Local {
3390 backend,
3391 environment,
3392 ..
3393 } => backend.reset(commit, reset_mode, environment).await,
3394 RepositoryState::Remote { project_id, client } => {
3395 client
3396 .request(proto::GitReset {
3397 project_id: project_id.0,
3398 repository_id: id.to_proto(),
3399 commit,
3400 mode: match reset_mode {
3401 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3402 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3403 },
3404 })
3405 .await?;
3406
3407 Ok(())
3408 }
3409 }
3410 })
3411 }
3412
3413 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3414 let id = self.id;
3415 self.send_job(None, move |git_repo, _cx| async move {
3416 match git_repo {
3417 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3418 RepositoryState::Remote { project_id, client } => {
3419 let resp = client
3420 .request(proto::GitShow {
3421 project_id: project_id.0,
3422 repository_id: id.to_proto(),
3423 commit,
3424 })
3425 .await?;
3426
3427 Ok(CommitDetails {
3428 sha: resp.sha.into(),
3429 message: resp.message.into(),
3430 commit_timestamp: resp.commit_timestamp,
3431 author_email: resp.author_email.into(),
3432 author_name: resp.author_name.into(),
3433 })
3434 }
3435 }
3436 })
3437 }
3438
3439 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3440 let id = self.id;
3441 self.send_job(None, move |git_repo, cx| async move {
3442 match git_repo {
3443 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3444 RepositoryState::Remote {
3445 client, project_id, ..
3446 } => {
3447 let response = client
3448 .request(proto::LoadCommitDiff {
3449 project_id: project_id.0,
3450 repository_id: id.to_proto(),
3451 commit,
3452 })
3453 .await?;
3454 Ok(CommitDiff {
3455 files: response
3456 .files
3457 .into_iter()
3458 .map(|file| CommitFile {
3459 path: Path::new(&file.path).into(),
3460 old_text: file.old_text,
3461 new_text: file.new_text,
3462 })
3463 .collect(),
3464 })
3465 }
3466 }
3467 })
3468 }
3469
3470 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3471 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3472 }
3473
3474 pub fn stage_entries(
3475 &self,
3476 entries: Vec<RepoPath>,
3477 cx: &mut Context<Self>,
3478 ) -> Task<anyhow::Result<()>> {
3479 if entries.is_empty() {
3480 return Task::ready(Ok(()));
3481 }
3482 let id = self.id;
3483
3484 let mut save_futures = Vec::new();
3485 if let Some(buffer_store) = self.buffer_store(cx) {
3486 buffer_store.update(cx, |buffer_store, cx| {
3487 for path in &entries {
3488 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3489 continue;
3490 };
3491 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3492 && buffer
3493 .read(cx)
3494 .file()
3495 .is_some_and(|file| file.disk_state().exists())
3496 {
3497 save_futures.push(buffer_store.save_buffer(buffer, cx));
3498 }
3499 }
3500 })
3501 }
3502
3503 cx.spawn(async move |this, cx| {
3504 for save_future in save_futures {
3505 save_future.await?;
3506 }
3507
3508 this.update(cx, |this, _| {
3509 this.send_job(None, move |git_repo, _cx| async move {
3510 match git_repo {
3511 RepositoryState::Local {
3512 backend,
3513 environment,
3514 ..
3515 } => backend.stage_paths(entries, environment.clone()).await,
3516 RepositoryState::Remote { project_id, client } => {
3517 client
3518 .request(proto::Stage {
3519 project_id: project_id.0,
3520 repository_id: id.to_proto(),
3521 paths: entries
3522 .into_iter()
3523 .map(|repo_path| repo_path.as_ref().to_proto())
3524 .collect(),
3525 })
3526 .await
3527 .context("sending stage request")?;
3528
3529 Ok(())
3530 }
3531 }
3532 })
3533 })?
3534 .await??;
3535
3536 Ok(())
3537 })
3538 }
3539
3540 pub fn unstage_entries(
3541 &self,
3542 entries: Vec<RepoPath>,
3543 cx: &mut Context<Self>,
3544 ) -> Task<anyhow::Result<()>> {
3545 if entries.is_empty() {
3546 return Task::ready(Ok(()));
3547 }
3548 let id = self.id;
3549
3550 let mut save_futures = Vec::new();
3551 if let Some(buffer_store) = self.buffer_store(cx) {
3552 buffer_store.update(cx, |buffer_store, cx| {
3553 for path in &entries {
3554 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3555 continue;
3556 };
3557 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3558 && buffer
3559 .read(cx)
3560 .file()
3561 .is_some_and(|file| file.disk_state().exists())
3562 {
3563 save_futures.push(buffer_store.save_buffer(buffer, cx));
3564 }
3565 }
3566 })
3567 }
3568
3569 cx.spawn(async move |this, cx| {
3570 for save_future in save_futures {
3571 save_future.await?;
3572 }
3573
3574 this.update(cx, |this, _| {
3575 this.send_job(None, move |git_repo, _cx| async move {
3576 match git_repo {
3577 RepositoryState::Local {
3578 backend,
3579 environment,
3580 ..
3581 } => backend.unstage_paths(entries, environment).await,
3582 RepositoryState::Remote { project_id, client } => {
3583 client
3584 .request(proto::Unstage {
3585 project_id: project_id.0,
3586 repository_id: id.to_proto(),
3587 paths: entries
3588 .into_iter()
3589 .map(|repo_path| repo_path.as_ref().to_proto())
3590 .collect(),
3591 })
3592 .await
3593 .context("sending unstage request")?;
3594
3595 Ok(())
3596 }
3597 }
3598 })
3599 })?
3600 .await??;
3601
3602 Ok(())
3603 })
3604 }
3605
3606 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3607 let to_stage = self
3608 .cached_status()
3609 .filter(|entry| !entry.status.staging().is_fully_staged())
3610 .map(|entry| entry.repo_path)
3611 .collect();
3612 self.stage_entries(to_stage, cx)
3613 }
3614
3615 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3616 let to_unstage = self
3617 .cached_status()
3618 .filter(|entry| entry.status.staging().has_staged())
3619 .map(|entry| entry.repo_path)
3620 .collect();
3621 self.unstage_entries(to_unstage, cx)
3622 }
3623
3624 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3625 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3626
3627 self.stash_entries(to_stash, cx)
3628 }
3629
3630 pub fn stash_entries(
3631 &mut self,
3632 entries: Vec<RepoPath>,
3633 cx: &mut Context<Self>,
3634 ) -> Task<anyhow::Result<()>> {
3635 let id = self.id;
3636
3637 cx.spawn(async move |this, cx| {
3638 this.update(cx, |this, _| {
3639 this.send_job(None, move |git_repo, _cx| async move {
3640 match git_repo {
3641 RepositoryState::Local {
3642 backend,
3643 environment,
3644 ..
3645 } => backend.stash_paths(entries, environment).await,
3646 RepositoryState::Remote { project_id, client } => {
3647 client
3648 .request(proto::Stash {
3649 project_id: project_id.0,
3650 repository_id: id.to_proto(),
3651 paths: entries
3652 .into_iter()
3653 .map(|repo_path| repo_path.as_ref().to_proto())
3654 .collect(),
3655 })
3656 .await
3657 .context("sending stash request")?;
3658 Ok(())
3659 }
3660 }
3661 })
3662 })?
3663 .await??;
3664 Ok(())
3665 })
3666 }
3667
3668 pub fn stash_pop(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3669 let id = self.id;
3670 cx.spawn(async move |this, cx| {
3671 this.update(cx, |this, _| {
3672 this.send_job(None, move |git_repo, _cx| async move {
3673 match git_repo {
3674 RepositoryState::Local {
3675 backend,
3676 environment,
3677 ..
3678 } => backend.stash_pop(environment).await,
3679 RepositoryState::Remote { project_id, client } => {
3680 client
3681 .request(proto::StashPop {
3682 project_id: project_id.0,
3683 repository_id: id.to_proto(),
3684 })
3685 .await
3686 .context("sending stash pop request")?;
3687 Ok(())
3688 }
3689 }
3690 })
3691 })?
3692 .await??;
3693 Ok(())
3694 })
3695 }
3696
3697 pub fn commit(
3698 &mut self,
3699 message: SharedString,
3700 name_and_email: Option<(SharedString, SharedString)>,
3701 options: CommitOptions,
3702 _cx: &mut App,
3703 ) -> oneshot::Receiver<Result<()>> {
3704 let id = self.id;
3705
3706 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3707 match git_repo {
3708 RepositoryState::Local {
3709 backend,
3710 environment,
3711 ..
3712 } => {
3713 backend
3714 .commit(message, name_and_email, options, environment)
3715 .await
3716 }
3717 RepositoryState::Remote { project_id, client } => {
3718 let (name, email) = name_and_email.unzip();
3719 client
3720 .request(proto::Commit {
3721 project_id: project_id.0,
3722 repository_id: id.to_proto(),
3723 message: String::from(message),
3724 name: name.map(String::from),
3725 email: email.map(String::from),
3726 options: Some(proto::commit::CommitOptions {
3727 amend: options.amend,
3728 signoff: options.signoff,
3729 }),
3730 })
3731 .await
3732 .context("sending commit request")?;
3733
3734 Ok(())
3735 }
3736 }
3737 })
3738 }
3739
3740 pub fn fetch(
3741 &mut self,
3742 fetch_options: FetchOptions,
3743 askpass: AskPassDelegate,
3744 _cx: &mut App,
3745 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3746 let askpass_delegates = self.askpass_delegates.clone();
3747 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3748 let id = self.id;
3749
3750 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3751 match git_repo {
3752 RepositoryState::Local {
3753 backend,
3754 environment,
3755 ..
3756 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3757 RepositoryState::Remote { project_id, client } => {
3758 askpass_delegates.lock().insert(askpass_id, askpass);
3759 let _defer = util::defer(|| {
3760 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3761 debug_assert!(askpass_delegate.is_some());
3762 });
3763
3764 let response = client
3765 .request(proto::Fetch {
3766 project_id: project_id.0,
3767 repository_id: id.to_proto(),
3768 askpass_id,
3769 remote: fetch_options.to_proto(),
3770 })
3771 .await
3772 .context("sending fetch request")?;
3773
3774 Ok(RemoteCommandOutput {
3775 stdout: response.stdout,
3776 stderr: response.stderr,
3777 })
3778 }
3779 }
3780 })
3781 }
3782
3783 pub fn push(
3784 &mut self,
3785 branch: SharedString,
3786 remote: SharedString,
3787 options: Option<PushOptions>,
3788 askpass: AskPassDelegate,
3789 cx: &mut Context<Self>,
3790 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3791 let askpass_delegates = self.askpass_delegates.clone();
3792 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3793 let id = self.id;
3794
3795 let args = options
3796 .map(|option| match option {
3797 PushOptions::SetUpstream => " --set-upstream",
3798 PushOptions::Force => " --force-with-lease",
3799 })
3800 .unwrap_or("");
3801
3802 let updates_tx = self
3803 .git_store()
3804 .and_then(|git_store| match &git_store.read(cx).state {
3805 GitStoreState::Local { downstream, .. } => downstream
3806 .as_ref()
3807 .map(|downstream| downstream.updates_tx.clone()),
3808 _ => None,
3809 });
3810
3811 let this = cx.weak_entity();
3812 self.send_job(
3813 Some(format!("git push {} {} {}", args, branch, remote).into()),
3814 move |git_repo, mut cx| async move {
3815 match git_repo {
3816 RepositoryState::Local {
3817 backend,
3818 environment,
3819 ..
3820 } => {
3821 let result = backend
3822 .push(
3823 branch.to_string(),
3824 remote.to_string(),
3825 options,
3826 askpass,
3827 environment.clone(),
3828 cx.clone(),
3829 )
3830 .await;
3831 if result.is_ok() {
3832 let branches = backend.branches().await?;
3833 let branch = branches.into_iter().find(|branch| branch.is_head);
3834 log::info!("head branch after scan is {branch:?}");
3835 let snapshot = this.update(&mut cx, |this, cx| {
3836 this.snapshot.branch = branch;
3837 let snapshot = this.snapshot.clone();
3838 cx.emit(RepositoryEvent::Updated {
3839 full_scan: false,
3840 new_instance: false,
3841 });
3842 snapshot
3843 })?;
3844 if let Some(updates_tx) = updates_tx {
3845 updates_tx
3846 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3847 .ok();
3848 }
3849 }
3850 result
3851 }
3852 RepositoryState::Remote { project_id, client } => {
3853 askpass_delegates.lock().insert(askpass_id, askpass);
3854 let _defer = util::defer(|| {
3855 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3856 debug_assert!(askpass_delegate.is_some());
3857 });
3858 let response = client
3859 .request(proto::Push {
3860 project_id: project_id.0,
3861 repository_id: id.to_proto(),
3862 askpass_id,
3863 branch_name: branch.to_string(),
3864 remote_name: remote.to_string(),
3865 options: options.map(|options| match options {
3866 PushOptions::Force => proto::push::PushOptions::Force,
3867 PushOptions::SetUpstream => {
3868 proto::push::PushOptions::SetUpstream
3869 }
3870 }
3871 as i32),
3872 })
3873 .await
3874 .context("sending push request")?;
3875
3876 Ok(RemoteCommandOutput {
3877 stdout: response.stdout,
3878 stderr: response.stderr,
3879 })
3880 }
3881 }
3882 },
3883 )
3884 }
3885
3886 pub fn pull(
3887 &mut self,
3888 branch: SharedString,
3889 remote: SharedString,
3890 askpass: AskPassDelegate,
3891 _cx: &mut App,
3892 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3893 let askpass_delegates = self.askpass_delegates.clone();
3894 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3895 let id = self.id;
3896
3897 self.send_job(
3898 Some(format!("git pull {} {}", remote, branch).into()),
3899 move |git_repo, cx| async move {
3900 match git_repo {
3901 RepositoryState::Local {
3902 backend,
3903 environment,
3904 ..
3905 } => {
3906 backend
3907 .pull(
3908 branch.to_string(),
3909 remote.to_string(),
3910 askpass,
3911 environment.clone(),
3912 cx,
3913 )
3914 .await
3915 }
3916 RepositoryState::Remote { project_id, client } => {
3917 askpass_delegates.lock().insert(askpass_id, askpass);
3918 let _defer = util::defer(|| {
3919 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3920 debug_assert!(askpass_delegate.is_some());
3921 });
3922 let response = client
3923 .request(proto::Pull {
3924 project_id: project_id.0,
3925 repository_id: id.to_proto(),
3926 askpass_id,
3927 branch_name: branch.to_string(),
3928 remote_name: remote.to_string(),
3929 })
3930 .await
3931 .context("sending pull request")?;
3932
3933 Ok(RemoteCommandOutput {
3934 stdout: response.stdout,
3935 stderr: response.stderr,
3936 })
3937 }
3938 }
3939 },
3940 )
3941 }
3942
3943 fn spawn_set_index_text_job(
3944 &mut self,
3945 path: RepoPath,
3946 content: Option<String>,
3947 hunk_staging_operation_count: Option<usize>,
3948 cx: &mut Context<Self>,
3949 ) -> oneshot::Receiver<anyhow::Result<()>> {
3950 let id = self.id;
3951 let this = cx.weak_entity();
3952 let git_store = self.git_store.clone();
3953 self.send_keyed_job(
3954 Some(GitJobKey::WriteIndex(path.clone())),
3955 None,
3956 move |git_repo, mut cx| async move {
3957 log::debug!("start updating index text for buffer {}", path.display());
3958 match git_repo {
3959 RepositoryState::Local {
3960 backend,
3961 environment,
3962 ..
3963 } => {
3964 backend
3965 .set_index_text(path.clone(), content, environment.clone())
3966 .await?;
3967 }
3968 RepositoryState::Remote { project_id, client } => {
3969 client
3970 .request(proto::SetIndexText {
3971 project_id: project_id.0,
3972 repository_id: id.to_proto(),
3973 path: path.as_ref().to_proto(),
3974 text: content,
3975 })
3976 .await?;
3977 }
3978 }
3979 log::debug!("finish updating index text for buffer {}", path.display());
3980
3981 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
3982 let project_path = this
3983 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
3984 .ok()
3985 .flatten();
3986 git_store.update(&mut cx, |git_store, cx| {
3987 let buffer_id = git_store
3988 .buffer_store
3989 .read(cx)
3990 .get_by_path(&project_path?)?
3991 .read(cx)
3992 .remote_id();
3993 let diff_state = git_store.diffs.get(&buffer_id)?;
3994 diff_state.update(cx, |diff_state, _| {
3995 diff_state.hunk_staging_operation_count_as_of_write =
3996 hunk_staging_operation_count;
3997 });
3998 Some(())
3999 })?;
4000 }
4001 Ok(())
4002 },
4003 )
4004 }
4005
4006 pub fn get_remotes(
4007 &mut self,
4008 branch_name: Option<String>,
4009 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4010 let id = self.id;
4011 self.send_job(None, move |repo, _cx| async move {
4012 match repo {
4013 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4014 RepositoryState::Remote { project_id, client } => {
4015 let response = client
4016 .request(proto::GetRemotes {
4017 project_id: project_id.0,
4018 repository_id: id.to_proto(),
4019 branch_name,
4020 })
4021 .await?;
4022
4023 let remotes = response
4024 .remotes
4025 .into_iter()
4026 .map(|remotes| git::repository::Remote {
4027 name: remotes.name.into(),
4028 })
4029 .collect();
4030
4031 Ok(remotes)
4032 }
4033 }
4034 })
4035 }
4036
4037 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4038 let id = self.id;
4039 self.send_job(None, move |repo, _| async move {
4040 match repo {
4041 RepositoryState::Local { backend, .. } => backend.branches().await,
4042 RepositoryState::Remote { project_id, client } => {
4043 let response = client
4044 .request(proto::GitGetBranches {
4045 project_id: project_id.0,
4046 repository_id: id.to_proto(),
4047 })
4048 .await?;
4049
4050 let branches = response
4051 .branches
4052 .into_iter()
4053 .map(|branch| proto_to_branch(&branch))
4054 .collect();
4055
4056 Ok(branches)
4057 }
4058 }
4059 })
4060 }
4061
4062 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4063 let id = self.id;
4064 self.send_job(None, move |repo, _| async move {
4065 match repo {
4066 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4067 RepositoryState::Remote { project_id, client } => {
4068 let response = client
4069 .request(proto::GetDefaultBranch {
4070 project_id: project_id.0,
4071 repository_id: id.to_proto(),
4072 })
4073 .await?;
4074
4075 anyhow::Ok(response.branch.map(SharedString::from))
4076 }
4077 }
4078 })
4079 }
4080
4081 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4082 let id = self.id;
4083 self.send_job(None, move |repo, _cx| async move {
4084 match repo {
4085 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4086 RepositoryState::Remote { project_id, client } => {
4087 let response = client
4088 .request(proto::GitDiff {
4089 project_id: project_id.0,
4090 repository_id: id.to_proto(),
4091 diff_type: match diff_type {
4092 DiffType::HeadToIndex => {
4093 proto::git_diff::DiffType::HeadToIndex.into()
4094 }
4095 DiffType::HeadToWorktree => {
4096 proto::git_diff::DiffType::HeadToWorktree.into()
4097 }
4098 },
4099 })
4100 .await?;
4101
4102 Ok(response.diff)
4103 }
4104 }
4105 })
4106 }
4107
4108 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4109 let id = self.id;
4110 self.send_job(
4111 Some(format!("git switch -c {branch_name}").into()),
4112 move |repo, _cx| async move {
4113 match repo {
4114 RepositoryState::Local { backend, .. } => {
4115 backend.create_branch(branch_name).await
4116 }
4117 RepositoryState::Remote { project_id, client } => {
4118 client
4119 .request(proto::GitCreateBranch {
4120 project_id: project_id.0,
4121 repository_id: id.to_proto(),
4122 branch_name,
4123 })
4124 .await?;
4125
4126 Ok(())
4127 }
4128 }
4129 },
4130 )
4131 }
4132
4133 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4134 let id = self.id;
4135 self.send_job(
4136 Some(format!("git switch {branch_name}").into()),
4137 move |repo, _cx| async move {
4138 match repo {
4139 RepositoryState::Local { backend, .. } => {
4140 backend.change_branch(branch_name).await
4141 }
4142 RepositoryState::Remote { project_id, client } => {
4143 client
4144 .request(proto::GitChangeBranch {
4145 project_id: project_id.0,
4146 repository_id: id.to_proto(),
4147 branch_name,
4148 })
4149 .await?;
4150
4151 Ok(())
4152 }
4153 }
4154 },
4155 )
4156 }
4157
4158 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4159 let id = self.id;
4160 self.send_job(None, move |repo, _cx| async move {
4161 match repo {
4162 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4163 RepositoryState::Remote { project_id, client } => {
4164 let response = client
4165 .request(proto::CheckForPushedCommits {
4166 project_id: project_id.0,
4167 repository_id: id.to_proto(),
4168 })
4169 .await?;
4170
4171 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4172
4173 Ok(branches)
4174 }
4175 }
4176 })
4177 }
4178
4179 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4180 self.send_job(None, |repo, _cx| async move {
4181 match repo {
4182 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4183 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4184 }
4185 })
4186 }
4187
4188 pub fn restore_checkpoint(
4189 &mut self,
4190 checkpoint: GitRepositoryCheckpoint,
4191 ) -> oneshot::Receiver<Result<()>> {
4192 self.send_job(None, move |repo, _cx| async move {
4193 match repo {
4194 RepositoryState::Local { backend, .. } => {
4195 backend.restore_checkpoint(checkpoint).await
4196 }
4197 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4198 }
4199 })
4200 }
4201
4202 pub(crate) fn apply_remote_update(
4203 &mut self,
4204 update: proto::UpdateRepository,
4205 is_new: bool,
4206 cx: &mut Context<Self>,
4207 ) -> Result<()> {
4208 let conflicted_paths = TreeSet::from_ordered_entries(
4209 update
4210 .current_merge_conflicts
4211 .into_iter()
4212 .map(|path| RepoPath(Path::new(&path).into())),
4213 );
4214 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4215 self.snapshot.head_commit = update
4216 .head_commit_details
4217 .as_ref()
4218 .map(proto_to_commit_details);
4219
4220 self.snapshot.merge.conflicted_paths = conflicted_paths;
4221 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4222
4223 let edits = update
4224 .removed_statuses
4225 .into_iter()
4226 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4227 .chain(
4228 update
4229 .updated_statuses
4230 .into_iter()
4231 .filter_map(|updated_status| {
4232 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4233 }),
4234 )
4235 .collect::<Vec<_>>();
4236 self.snapshot.statuses_by_path.edit(edits, &());
4237 if update.is_last_update {
4238 self.snapshot.scan_id = update.scan_id;
4239 }
4240 cx.emit(RepositoryEvent::Updated {
4241 full_scan: true,
4242 new_instance: is_new,
4243 });
4244 Ok(())
4245 }
4246
4247 pub fn compare_checkpoints(
4248 &mut self,
4249 left: GitRepositoryCheckpoint,
4250 right: GitRepositoryCheckpoint,
4251 ) -> oneshot::Receiver<Result<bool>> {
4252 self.send_job(None, move |repo, _cx| async move {
4253 match repo {
4254 RepositoryState::Local { backend, .. } => {
4255 backend.compare_checkpoints(left, right).await
4256 }
4257 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4258 }
4259 })
4260 }
4261
4262 pub fn diff_checkpoints(
4263 &mut self,
4264 base_checkpoint: GitRepositoryCheckpoint,
4265 target_checkpoint: GitRepositoryCheckpoint,
4266 ) -> oneshot::Receiver<Result<String>> {
4267 self.send_job(None, move |repo, _cx| async move {
4268 match repo {
4269 RepositoryState::Local { backend, .. } => {
4270 backend
4271 .diff_checkpoints(base_checkpoint, target_checkpoint)
4272 .await
4273 }
4274 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4275 }
4276 })
4277 }
4278
4279 fn schedule_scan(
4280 &mut self,
4281 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4282 cx: &mut Context<Self>,
4283 ) {
4284 let this = cx.weak_entity();
4285 let _ = self.send_keyed_job(
4286 Some(GitJobKey::ReloadGitState),
4287 None,
4288 |state, mut cx| async move {
4289 log::debug!("run scheduled git status scan");
4290
4291 let Some(this) = this.upgrade() else {
4292 return Ok(());
4293 };
4294 let RepositoryState::Local { backend, .. } = state else {
4295 bail!("not a local repository")
4296 };
4297 let (snapshot, events) = this
4298 .update(&mut cx, |this, _| {
4299 this.paths_needing_status_update.clear();
4300 compute_snapshot(
4301 this.id,
4302 this.work_directory_abs_path.clone(),
4303 this.snapshot.clone(),
4304 backend.clone(),
4305 )
4306 })?
4307 .await?;
4308 this.update(&mut cx, |this, cx| {
4309 this.snapshot = snapshot.clone();
4310 for event in events {
4311 cx.emit(event);
4312 }
4313 })?;
4314 if let Some(updates_tx) = updates_tx {
4315 updates_tx
4316 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4317 .ok();
4318 }
4319 Ok(())
4320 },
4321 );
4322 }
4323
4324 fn spawn_local_git_worker(
4325 work_directory_abs_path: Arc<Path>,
4326 dot_git_abs_path: Arc<Path>,
4327 _repository_dir_abs_path: Arc<Path>,
4328 _common_dir_abs_path: Arc<Path>,
4329 project_environment: WeakEntity<ProjectEnvironment>,
4330 fs: Arc<dyn Fs>,
4331 cx: &mut Context<Self>,
4332 ) -> mpsc::UnboundedSender<GitJob> {
4333 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4334
4335 cx.spawn(async move |_, cx| {
4336 let environment = project_environment
4337 .upgrade()
4338 .context("missing project environment")?
4339 .update(cx, |project_environment, cx| {
4340 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4341 })?
4342 .await
4343 .unwrap_or_else(|| {
4344 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4345 HashMap::default()
4346 });
4347 let backend = cx
4348 .background_spawn(async move {
4349 fs.open_repo(&dot_git_abs_path)
4350 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4351 })
4352 .await?;
4353
4354 if let Some(git_hosting_provider_registry) =
4355 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4356 {
4357 git_hosting_providers::register_additional_providers(
4358 git_hosting_provider_registry,
4359 backend.clone(),
4360 );
4361 }
4362
4363 let state = RepositoryState::Local {
4364 backend,
4365 environment: Arc::new(environment),
4366 };
4367 let mut jobs = VecDeque::new();
4368 loop {
4369 while let Ok(Some(next_job)) = job_rx.try_next() {
4370 jobs.push_back(next_job);
4371 }
4372
4373 if let Some(job) = jobs.pop_front() {
4374 if let Some(current_key) = &job.key
4375 && jobs
4376 .iter()
4377 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4378 {
4379 continue;
4380 }
4381 (job.job)(state.clone(), cx).await;
4382 } else if let Some(job) = job_rx.next().await {
4383 jobs.push_back(job);
4384 } else {
4385 break;
4386 }
4387 }
4388 anyhow::Ok(())
4389 })
4390 .detach_and_log_err(cx);
4391
4392 job_tx
4393 }
4394
4395 fn spawn_remote_git_worker(
4396 project_id: ProjectId,
4397 client: AnyProtoClient,
4398 cx: &mut Context<Self>,
4399 ) -> mpsc::UnboundedSender<GitJob> {
4400 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4401
4402 cx.spawn(async move |_, cx| {
4403 let state = RepositoryState::Remote { project_id, client };
4404 let mut jobs = VecDeque::new();
4405 loop {
4406 while let Ok(Some(next_job)) = job_rx.try_next() {
4407 jobs.push_back(next_job);
4408 }
4409
4410 if let Some(job) = jobs.pop_front() {
4411 if let Some(current_key) = &job.key
4412 && jobs
4413 .iter()
4414 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4415 {
4416 continue;
4417 }
4418 (job.job)(state.clone(), cx).await;
4419 } else if let Some(job) = job_rx.next().await {
4420 jobs.push_back(job);
4421 } else {
4422 break;
4423 }
4424 }
4425 anyhow::Ok(())
4426 })
4427 .detach_and_log_err(cx);
4428
4429 job_tx
4430 }
4431
4432 fn load_staged_text(
4433 &mut self,
4434 buffer_id: BufferId,
4435 repo_path: RepoPath,
4436 cx: &App,
4437 ) -> Task<Result<Option<String>>> {
4438 let rx = self.send_job(None, move |state, _| async move {
4439 match state {
4440 RepositoryState::Local { backend, .. } => {
4441 anyhow::Ok(backend.load_index_text(repo_path).await)
4442 }
4443 RepositoryState::Remote { project_id, client } => {
4444 let response = client
4445 .request(proto::OpenUnstagedDiff {
4446 project_id: project_id.to_proto(),
4447 buffer_id: buffer_id.to_proto(),
4448 })
4449 .await?;
4450 Ok(response.staged_text)
4451 }
4452 }
4453 });
4454 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4455 }
4456
4457 fn load_committed_text(
4458 &mut self,
4459 buffer_id: BufferId,
4460 repo_path: RepoPath,
4461 cx: &App,
4462 ) -> Task<Result<DiffBasesChange>> {
4463 let rx = self.send_job(None, move |state, _| async move {
4464 match state {
4465 RepositoryState::Local { backend, .. } => {
4466 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4467 let staged_text = backend.load_index_text(repo_path).await;
4468 let diff_bases_change = if committed_text == staged_text {
4469 DiffBasesChange::SetBoth(committed_text)
4470 } else {
4471 DiffBasesChange::SetEach {
4472 index: staged_text,
4473 head: committed_text,
4474 }
4475 };
4476 anyhow::Ok(diff_bases_change)
4477 }
4478 RepositoryState::Remote { project_id, client } => {
4479 use proto::open_uncommitted_diff_response::Mode;
4480
4481 let response = client
4482 .request(proto::OpenUncommittedDiff {
4483 project_id: project_id.to_proto(),
4484 buffer_id: buffer_id.to_proto(),
4485 })
4486 .await?;
4487 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4488 let bases = match mode {
4489 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4490 Mode::IndexAndHead => DiffBasesChange::SetEach {
4491 head: response.committed_text,
4492 index: response.staged_text,
4493 },
4494 };
4495 Ok(bases)
4496 }
4497 }
4498 });
4499
4500 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4501 }
4502
4503 fn paths_changed(
4504 &mut self,
4505 paths: Vec<RepoPath>,
4506 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4507 cx: &mut Context<Self>,
4508 ) {
4509 self.paths_needing_status_update.extend(paths);
4510
4511 let this = cx.weak_entity();
4512 let _ = self.send_keyed_job(
4513 Some(GitJobKey::RefreshStatuses),
4514 None,
4515 |state, mut cx| async move {
4516 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4517 (
4518 this.snapshot.clone(),
4519 mem::take(&mut this.paths_needing_status_update),
4520 )
4521 })?;
4522 let RepositoryState::Local { backend, .. } = state else {
4523 bail!("not a local repository")
4524 };
4525
4526 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4527 if paths.is_empty() {
4528 return Ok(());
4529 }
4530 let statuses = backend.status(&paths).await?;
4531
4532 let changed_path_statuses = cx
4533 .background_spawn(async move {
4534 let mut changed_path_statuses = Vec::new();
4535 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4536 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4537
4538 for (repo_path, status) in &*statuses.entries {
4539 changed_paths.remove(repo_path);
4540 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4541 && cursor.item().is_some_and(|entry| entry.status == *status)
4542 {
4543 continue;
4544 }
4545
4546 changed_path_statuses.push(Edit::Insert(StatusEntry {
4547 repo_path: repo_path.clone(),
4548 status: *status,
4549 }));
4550 }
4551 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4552 for path in changed_paths.into_iter() {
4553 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4554 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4555 }
4556 }
4557 changed_path_statuses
4558 })
4559 .await;
4560
4561 this.update(&mut cx, |this, cx| {
4562 if !changed_path_statuses.is_empty() {
4563 this.snapshot
4564 .statuses_by_path
4565 .edit(changed_path_statuses, &());
4566 this.snapshot.scan_id += 1;
4567 if let Some(updates_tx) = updates_tx {
4568 updates_tx
4569 .unbounded_send(DownstreamUpdate::UpdateRepository(
4570 this.snapshot.clone(),
4571 ))
4572 .ok();
4573 }
4574 }
4575 cx.emit(RepositoryEvent::Updated {
4576 full_scan: false,
4577 new_instance: false,
4578 });
4579 })
4580 },
4581 );
4582 }
4583
4584 /// currently running git command and when it started
4585 pub fn current_job(&self) -> Option<JobInfo> {
4586 self.active_jobs.values().next().cloned()
4587 }
4588
4589 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4590 self.send_job(None, |_, _| async {})
4591 }
4592}
4593
4594fn get_permalink_in_rust_registry_src(
4595 provider_registry: Arc<GitHostingProviderRegistry>,
4596 path: PathBuf,
4597 selection: Range<u32>,
4598) -> Result<url::Url> {
4599 #[derive(Deserialize)]
4600 struct CargoVcsGit {
4601 sha1: String,
4602 }
4603
4604 #[derive(Deserialize)]
4605 struct CargoVcsInfo {
4606 git: CargoVcsGit,
4607 path_in_vcs: String,
4608 }
4609
4610 #[derive(Deserialize)]
4611 struct CargoPackage {
4612 repository: String,
4613 }
4614
4615 #[derive(Deserialize)]
4616 struct CargoToml {
4617 package: CargoPackage,
4618 }
4619
4620 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4621 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4622 Some((dir, json))
4623 }) else {
4624 bail!("No .cargo_vcs_info.json found in parent directories")
4625 };
4626 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4627 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4628 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4629 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4630 .context("parsing package.repository field of manifest")?;
4631 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4632 let permalink = provider.build_permalink(
4633 remote,
4634 BuildPermalinkParams {
4635 sha: &cargo_vcs_info.git.sha1,
4636 path: &path.to_string_lossy(),
4637 selection: Some(selection),
4638 },
4639 );
4640 Ok(permalink)
4641}
4642
4643fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4644 let Some(blame) = blame else {
4645 return proto::BlameBufferResponse {
4646 blame_response: None,
4647 };
4648 };
4649
4650 let entries = blame
4651 .entries
4652 .into_iter()
4653 .map(|entry| proto::BlameEntry {
4654 sha: entry.sha.as_bytes().into(),
4655 start_line: entry.range.start,
4656 end_line: entry.range.end,
4657 original_line_number: entry.original_line_number,
4658 author: entry.author,
4659 author_mail: entry.author_mail,
4660 author_time: entry.author_time,
4661 author_tz: entry.author_tz,
4662 committer: entry.committer_name,
4663 committer_mail: entry.committer_email,
4664 committer_time: entry.committer_time,
4665 committer_tz: entry.committer_tz,
4666 summary: entry.summary,
4667 previous: entry.previous,
4668 filename: entry.filename,
4669 })
4670 .collect::<Vec<_>>();
4671
4672 let messages = blame
4673 .messages
4674 .into_iter()
4675 .map(|(oid, message)| proto::CommitMessage {
4676 oid: oid.as_bytes().into(),
4677 message,
4678 })
4679 .collect::<Vec<_>>();
4680
4681 proto::BlameBufferResponse {
4682 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4683 entries,
4684 messages,
4685 remote_url: blame.remote_url,
4686 }),
4687 }
4688}
4689
4690fn deserialize_blame_buffer_response(
4691 response: proto::BlameBufferResponse,
4692) -> Option<git::blame::Blame> {
4693 let response = response.blame_response?;
4694 let entries = response
4695 .entries
4696 .into_iter()
4697 .filter_map(|entry| {
4698 Some(git::blame::BlameEntry {
4699 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4700 range: entry.start_line..entry.end_line,
4701 original_line_number: entry.original_line_number,
4702 committer_name: entry.committer,
4703 committer_time: entry.committer_time,
4704 committer_tz: entry.committer_tz,
4705 committer_email: entry.committer_mail,
4706 author: entry.author,
4707 author_mail: entry.author_mail,
4708 author_time: entry.author_time,
4709 author_tz: entry.author_tz,
4710 summary: entry.summary,
4711 previous: entry.previous,
4712 filename: entry.filename,
4713 })
4714 })
4715 .collect::<Vec<_>>();
4716
4717 let messages = response
4718 .messages
4719 .into_iter()
4720 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4721 .collect::<HashMap<_, _>>();
4722
4723 Some(Blame {
4724 entries,
4725 messages,
4726 remote_url: response.remote_url,
4727 })
4728}
4729
4730fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4731 proto::Branch {
4732 is_head: branch.is_head,
4733 ref_name: branch.ref_name.to_string(),
4734 unix_timestamp: branch
4735 .most_recent_commit
4736 .as_ref()
4737 .map(|commit| commit.commit_timestamp as u64),
4738 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4739 ref_name: upstream.ref_name.to_string(),
4740 tracking: upstream
4741 .tracking
4742 .status()
4743 .map(|upstream| proto::UpstreamTracking {
4744 ahead: upstream.ahead as u64,
4745 behind: upstream.behind as u64,
4746 }),
4747 }),
4748 most_recent_commit: branch
4749 .most_recent_commit
4750 .as_ref()
4751 .map(|commit| proto::CommitSummary {
4752 sha: commit.sha.to_string(),
4753 subject: commit.subject.to_string(),
4754 commit_timestamp: commit.commit_timestamp,
4755 author_name: commit.author_name.to_string(),
4756 }),
4757 }
4758}
4759
4760fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4761 git::repository::Branch {
4762 is_head: proto.is_head,
4763 ref_name: proto.ref_name.clone().into(),
4764 upstream: proto
4765 .upstream
4766 .as_ref()
4767 .map(|upstream| git::repository::Upstream {
4768 ref_name: upstream.ref_name.to_string().into(),
4769 tracking: upstream
4770 .tracking
4771 .as_ref()
4772 .map(|tracking| {
4773 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4774 ahead: tracking.ahead as u32,
4775 behind: tracking.behind as u32,
4776 })
4777 })
4778 .unwrap_or(git::repository::UpstreamTracking::Gone),
4779 }),
4780 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4781 git::repository::CommitSummary {
4782 sha: commit.sha.to_string().into(),
4783 subject: commit.subject.to_string().into(),
4784 commit_timestamp: commit.commit_timestamp,
4785 author_name: commit.author_name.to_string().into(),
4786 has_parent: true,
4787 }
4788 }),
4789 }
4790}
4791
4792fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4793 proto::GitCommitDetails {
4794 sha: commit.sha.to_string(),
4795 message: commit.message.to_string(),
4796 commit_timestamp: commit.commit_timestamp,
4797 author_email: commit.author_email.to_string(),
4798 author_name: commit.author_name.to_string(),
4799 }
4800}
4801
4802fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4803 CommitDetails {
4804 sha: proto.sha.clone().into(),
4805 message: proto.message.clone().into(),
4806 commit_timestamp: proto.commit_timestamp,
4807 author_email: proto.author_email.clone().into(),
4808 author_name: proto.author_name.clone().into(),
4809 }
4810}
4811
4812async fn compute_snapshot(
4813 id: RepositoryId,
4814 work_directory_abs_path: Arc<Path>,
4815 prev_snapshot: RepositorySnapshot,
4816 backend: Arc<dyn GitRepository>,
4817) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4818 let mut events = Vec::new();
4819 let branches = backend.branches().await?;
4820 let branch = branches.into_iter().find(|branch| branch.is_head);
4821 let statuses = backend
4822 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
4823 .await?;
4824 let statuses_by_path = SumTree::from_iter(
4825 statuses
4826 .entries
4827 .iter()
4828 .map(|(repo_path, status)| StatusEntry {
4829 repo_path: repo_path.clone(),
4830 status: *status,
4831 }),
4832 &(),
4833 );
4834 let (merge_details, merge_heads_changed) =
4835 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
4836 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
4837
4838 if merge_heads_changed
4839 || branch != prev_snapshot.branch
4840 || statuses_by_path != prev_snapshot.statuses_by_path
4841 {
4842 events.push(RepositoryEvent::Updated {
4843 full_scan: true,
4844 new_instance: false,
4845 });
4846 }
4847
4848 // Cache merge conflict paths so they don't change from staging/unstaging,
4849 // until the merge heads change (at commit time, etc.).
4850 if merge_heads_changed {
4851 events.push(RepositoryEvent::MergeHeadsChanged);
4852 }
4853
4854 // Useful when branch is None in detached head state
4855 let head_commit = match backend.head_sha().await {
4856 Some(head_sha) => backend.show(head_sha).await.log_err(),
4857 None => None,
4858 };
4859
4860 // Used by edit prediction data collection
4861 let remote_origin_url = backend.remote_url("origin");
4862 let remote_upstream_url = backend.remote_url("upstream");
4863
4864 let snapshot = RepositorySnapshot {
4865 id,
4866 statuses_by_path,
4867 work_directory_abs_path,
4868 scan_id: prev_snapshot.scan_id + 1,
4869 branch,
4870 head_commit,
4871 merge: merge_details,
4872 remote_origin_url,
4873 remote_upstream_url,
4874 };
4875
4876 Ok((snapshot, events))
4877}
4878
4879fn status_from_proto(
4880 simple_status: i32,
4881 status: Option<proto::GitFileStatus>,
4882) -> anyhow::Result<FileStatus> {
4883 use proto::git_file_status::Variant;
4884
4885 let Some(variant) = status.and_then(|status| status.variant) else {
4886 let code = proto::GitStatus::from_i32(simple_status)
4887 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
4888 let result = match code {
4889 proto::GitStatus::Added => TrackedStatus {
4890 worktree_status: StatusCode::Added,
4891 index_status: StatusCode::Unmodified,
4892 }
4893 .into(),
4894 proto::GitStatus::Modified => TrackedStatus {
4895 worktree_status: StatusCode::Modified,
4896 index_status: StatusCode::Unmodified,
4897 }
4898 .into(),
4899 proto::GitStatus::Conflict => UnmergedStatus {
4900 first_head: UnmergedStatusCode::Updated,
4901 second_head: UnmergedStatusCode::Updated,
4902 }
4903 .into(),
4904 proto::GitStatus::Deleted => TrackedStatus {
4905 worktree_status: StatusCode::Deleted,
4906 index_status: StatusCode::Unmodified,
4907 }
4908 .into(),
4909 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
4910 };
4911 return Ok(result);
4912 };
4913
4914 let result = match variant {
4915 Variant::Untracked(_) => FileStatus::Untracked,
4916 Variant::Ignored(_) => FileStatus::Ignored,
4917 Variant::Unmerged(unmerged) => {
4918 let [first_head, second_head] =
4919 [unmerged.first_head, unmerged.second_head].map(|head| {
4920 let code = proto::GitStatus::from_i32(head)
4921 .with_context(|| format!("Invalid git status code: {head}"))?;
4922 let result = match code {
4923 proto::GitStatus::Added => UnmergedStatusCode::Added,
4924 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4925 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4926 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
4927 };
4928 Ok(result)
4929 });
4930 let [first_head, second_head] = [first_head?, second_head?];
4931 UnmergedStatus {
4932 first_head,
4933 second_head,
4934 }
4935 .into()
4936 }
4937 Variant::Tracked(tracked) => {
4938 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4939 .map(|status| {
4940 let code = proto::GitStatus::from_i32(status)
4941 .with_context(|| format!("Invalid git status code: {status}"))?;
4942 let result = match code {
4943 proto::GitStatus::Modified => StatusCode::Modified,
4944 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4945 proto::GitStatus::Added => StatusCode::Added,
4946 proto::GitStatus::Deleted => StatusCode::Deleted,
4947 proto::GitStatus::Renamed => StatusCode::Renamed,
4948 proto::GitStatus::Copied => StatusCode::Copied,
4949 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4950 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
4951 };
4952 Ok(result)
4953 });
4954 let [index_status, worktree_status] = [index_status?, worktree_status?];
4955 TrackedStatus {
4956 index_status,
4957 worktree_status,
4958 }
4959 .into()
4960 }
4961 };
4962 Ok(result)
4963}
4964
4965fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4966 use proto::git_file_status::{Tracked, Unmerged, Variant};
4967
4968 let variant = match status {
4969 FileStatus::Untracked => Variant::Untracked(Default::default()),
4970 FileStatus::Ignored => Variant::Ignored(Default::default()),
4971 FileStatus::Unmerged(UnmergedStatus {
4972 first_head,
4973 second_head,
4974 }) => Variant::Unmerged(Unmerged {
4975 first_head: unmerged_status_to_proto(first_head),
4976 second_head: unmerged_status_to_proto(second_head),
4977 }),
4978 FileStatus::Tracked(TrackedStatus {
4979 index_status,
4980 worktree_status,
4981 }) => Variant::Tracked(Tracked {
4982 index_status: tracked_status_to_proto(index_status),
4983 worktree_status: tracked_status_to_proto(worktree_status),
4984 }),
4985 };
4986 proto::GitFileStatus {
4987 variant: Some(variant),
4988 }
4989}
4990
4991fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4992 match code {
4993 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4994 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4995 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4996 }
4997}
4998
4999fn tracked_status_to_proto(code: StatusCode) -> i32 {
5000 match code {
5001 StatusCode::Added => proto::GitStatus::Added as _,
5002 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5003 StatusCode::Modified => proto::GitStatus::Modified as _,
5004 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5005 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5006 StatusCode::Copied => proto::GitStatus::Copied as _,
5007 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5008 }
5009}