1pub mod git_traversal;
2
3use crate::{
4 ProjectEnvironment, ProjectItem, ProjectPath,
5 buffer_store::{BufferStore, BufferStoreEvent},
6 worktree_store::{WorktreeStore, WorktreeStoreEvent},
7};
8use anyhow::{Context as _, Result, anyhow, bail};
9use askpass::AskPassDelegate;
10use buffer_diff::{BufferDiff, BufferDiffEvent};
11use client::ProjectId;
12use collections::HashMap;
13use fs::Fs;
14use futures::{
15 FutureExt as _, StreamExt as _,
16 channel::{mpsc, oneshot},
17 future::{self, Shared},
18};
19use git::{
20 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
21 blame::Blame,
22 parse_git_remote_url,
23 repository::{
24 Branch, CommitDetails, CommitDiff, CommitFile, DiffType, GitRepository,
25 GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
26 UpstreamTrackingStatus,
27 },
28 status::{
29 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
30 },
31};
32use gpui::{
33 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
34 WeakEntity,
35};
36use language::{
37 Buffer, BufferEvent, Language, LanguageRegistry,
38 proto::{deserialize_version, serialize_version},
39};
40use parking_lot::Mutex;
41use rpc::{
42 AnyProtoClient, TypedEnvelope,
43 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
44};
45use serde::Deserialize;
46use std::{
47 cmp::Ordering,
48 collections::{BTreeSet, VecDeque},
49 future::Future,
50 mem,
51 ops::Range,
52 path::{Path, PathBuf},
53 sync::{
54 Arc,
55 atomic::{self, AtomicU64},
56 },
57 time::Instant,
58};
59use sum_tree::{Edit, SumTree, TreeSet};
60use text::{Bias, BufferId};
61use util::{ResultExt, debug_panic, post_inc};
62use worktree::{
63 File, PathKey, PathProgress, PathSummary, PathTarget, UpdatedGitRepositoriesSet, Worktree,
64};
65
66pub struct GitStore {
67 state: GitStoreState,
68 buffer_store: Entity<BufferStore>,
69 worktree_store: Entity<WorktreeStore>,
70 repositories: HashMap<RepositoryId, Entity<Repository>>,
71 active_repo_id: Option<RepositoryId>,
72 #[allow(clippy::type_complexity)]
73 loading_diffs:
74 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
75 diffs: HashMap<BufferId, Entity<BufferDiffState>>,
76 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
77 _subscriptions: Vec<Subscription>,
78}
79
80#[derive(Default)]
81struct SharedDiffs {
82 unstaged: Option<Entity<BufferDiff>>,
83 uncommitted: Option<Entity<BufferDiff>>,
84}
85
86#[derive(Default)]
87struct BufferDiffState {
88 unstaged_diff: Option<WeakEntity<BufferDiff>>,
89 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
90 recalculate_diff_task: Option<Task<Result<()>>>,
91 language: Option<Arc<Language>>,
92 language_registry: Option<Arc<LanguageRegistry>>,
93 diff_updated_futures: Vec<oneshot::Sender<()>>,
94 hunk_staging_operation_count: usize,
95
96 head_text: Option<Arc<String>>,
97 index_text: Option<Arc<String>>,
98 head_changed: bool,
99 index_changed: bool,
100 language_changed: bool,
101}
102
103#[derive(Clone, Debug)]
104enum DiffBasesChange {
105 SetIndex(Option<String>),
106 SetHead(Option<String>),
107 SetEach {
108 index: Option<String>,
109 head: Option<String>,
110 },
111 SetBoth(Option<String>),
112}
113
114#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
115enum DiffKind {
116 Unstaged,
117 Uncommitted,
118}
119
120enum GitStoreState {
121 Local {
122 next_repository_id: Arc<AtomicU64>,
123 downstream: Option<LocalDownstreamState>,
124 project_environment: Entity<ProjectEnvironment>,
125 fs: Arc<dyn Fs>,
126 },
127 Ssh {
128 upstream_client: AnyProtoClient,
129 upstream_project_id: ProjectId,
130 downstream: Option<(AnyProtoClient, ProjectId)>,
131 },
132 Remote {
133 upstream_client: AnyProtoClient,
134 upstream_project_id: ProjectId,
135 },
136}
137
138enum DownstreamUpdate {
139 UpdateRepository(RepositorySnapshot),
140 RemoveRepository(RepositoryId),
141}
142
143struct LocalDownstreamState {
144 client: AnyProtoClient,
145 project_id: ProjectId,
146 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
147 _task: Task<Result<()>>,
148}
149
150#[derive(Clone)]
151pub struct GitStoreCheckpoint {
152 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
153}
154
155#[derive(Clone, Debug, PartialEq, Eq)]
156pub struct StatusEntry {
157 pub repo_path: RepoPath,
158 pub status: FileStatus,
159}
160
161impl StatusEntry {
162 fn to_proto(&self) -> proto::StatusEntry {
163 let simple_status = match self.status {
164 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
165 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
166 FileStatus::Tracked(TrackedStatus {
167 index_status,
168 worktree_status,
169 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
170 worktree_status
171 } else {
172 index_status
173 }),
174 };
175
176 proto::StatusEntry {
177 repo_path: self.repo_path.as_ref().to_proto(),
178 simple_status,
179 status: Some(status_to_proto(self.status)),
180 }
181 }
182}
183
184impl TryFrom<proto::StatusEntry> for StatusEntry {
185 type Error = anyhow::Error;
186
187 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
188 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
189 let status = status_from_proto(value.simple_status, value.status)?;
190 Ok(Self { repo_path, status })
191 }
192}
193
194impl sum_tree::Item for StatusEntry {
195 type Summary = PathSummary<GitSummary>;
196
197 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
198 PathSummary {
199 max_path: self.repo_path.0.clone(),
200 item_summary: self.status.summary(),
201 }
202 }
203}
204
205impl sum_tree::KeyedItem for StatusEntry {
206 type Key = PathKey;
207
208 fn key(&self) -> Self::Key {
209 PathKey(self.repo_path.0.clone())
210 }
211}
212
213#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
214pub struct RepositoryId(pub u64);
215
216#[derive(Clone, Debug, PartialEq, Eq)]
217pub struct RepositorySnapshot {
218 pub id: RepositoryId,
219 pub merge_message: Option<SharedString>,
220 pub statuses_by_path: SumTree<StatusEntry>,
221 pub work_directory_abs_path: Arc<Path>,
222 pub branch: Option<Branch>,
223 pub merge_conflicts: TreeSet<RepoPath>,
224 pub merge_head_shas: Vec<SharedString>,
225 pub scan_id: u64,
226}
227
228type JobId = u64;
229
230#[derive(Clone, Debug, PartialEq, Eq)]
231pub struct JobInfo {
232 pub start: Instant,
233 pub message: SharedString,
234}
235
236pub struct Repository {
237 this: WeakEntity<Self>,
238 snapshot: RepositorySnapshot,
239 commit_message_buffer: Option<Entity<Buffer>>,
240 git_store: WeakEntity<GitStore>,
241 // For a local repository, holds paths that have had worktree events since the last status scan completed,
242 // and that should be examined during the next status scan.
243 paths_needing_status_update: BTreeSet<RepoPath>,
244 job_sender: mpsc::UnboundedSender<GitJob>,
245 active_jobs: HashMap<JobId, JobInfo>,
246 job_id: JobId,
247 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
248 latest_askpass_id: u64,
249}
250
251impl std::ops::Deref for Repository {
252 type Target = RepositorySnapshot;
253
254 fn deref(&self) -> &Self::Target {
255 &self.snapshot
256 }
257}
258
259#[derive(Clone)]
260pub enum RepositoryState {
261 Local {
262 backend: Arc<dyn GitRepository>,
263 environment: Arc<HashMap<String, String>>,
264 },
265 Remote {
266 project_id: ProjectId,
267 client: AnyProtoClient,
268 },
269}
270
271#[derive(Clone, Debug)]
272pub enum RepositoryEvent {
273 Updated { full_scan: bool },
274 MergeHeadsChanged,
275}
276
277#[derive(Clone, Debug)]
278pub struct JobsUpdated;
279
280#[derive(Debug)]
281pub enum GitStoreEvent {
282 ActiveRepositoryChanged(Option<RepositoryId>),
283 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
284 RepositoryAdded(RepositoryId),
285 RepositoryRemoved(RepositoryId),
286 IndexWriteError(anyhow::Error),
287 JobsUpdated,
288}
289
290impl EventEmitter<RepositoryEvent> for Repository {}
291impl EventEmitter<JobsUpdated> for Repository {}
292impl EventEmitter<GitStoreEvent> for GitStore {}
293
294pub struct GitJob {
295 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
296 key: Option<GitJobKey>,
297}
298
299#[derive(PartialEq, Eq)]
300enum GitJobKey {
301 WriteIndex(RepoPath),
302 BatchReadIndex,
303 RefreshStatuses,
304 ReloadGitState,
305}
306
307impl GitStore {
308 pub fn local(
309 worktree_store: &Entity<WorktreeStore>,
310 buffer_store: Entity<BufferStore>,
311 environment: Entity<ProjectEnvironment>,
312 fs: Arc<dyn Fs>,
313 cx: &mut Context<Self>,
314 ) -> Self {
315 Self::new(
316 worktree_store.clone(),
317 buffer_store,
318 GitStoreState::Local {
319 next_repository_id: Arc::new(AtomicU64::new(1)),
320 downstream: None,
321 project_environment: environment,
322 fs,
323 },
324 cx,
325 )
326 }
327
328 pub fn remote(
329 worktree_store: &Entity<WorktreeStore>,
330 buffer_store: Entity<BufferStore>,
331 upstream_client: AnyProtoClient,
332 project_id: ProjectId,
333 cx: &mut Context<Self>,
334 ) -> Self {
335 Self::new(
336 worktree_store.clone(),
337 buffer_store,
338 GitStoreState::Remote {
339 upstream_client,
340 upstream_project_id: project_id,
341 },
342 cx,
343 )
344 }
345
346 pub fn ssh(
347 worktree_store: &Entity<WorktreeStore>,
348 buffer_store: Entity<BufferStore>,
349 upstream_client: AnyProtoClient,
350 cx: &mut Context<Self>,
351 ) -> Self {
352 Self::new(
353 worktree_store.clone(),
354 buffer_store,
355 GitStoreState::Ssh {
356 upstream_client,
357 upstream_project_id: ProjectId(SSH_PROJECT_ID),
358 downstream: None,
359 },
360 cx,
361 )
362 }
363
364 fn new(
365 worktree_store: Entity<WorktreeStore>,
366 buffer_store: Entity<BufferStore>,
367 state: GitStoreState,
368 cx: &mut Context<Self>,
369 ) -> Self {
370 let _subscriptions = vec![
371 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
372 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
373 ];
374
375 GitStore {
376 state,
377 buffer_store,
378 worktree_store,
379 repositories: HashMap::default(),
380 active_repo_id: None,
381 _subscriptions,
382 loading_diffs: HashMap::default(),
383 shared_diffs: HashMap::default(),
384 diffs: HashMap::default(),
385 }
386 }
387
388 pub fn init(client: &AnyProtoClient) {
389 client.add_entity_request_handler(Self::handle_get_remotes);
390 client.add_entity_request_handler(Self::handle_get_branches);
391 client.add_entity_request_handler(Self::handle_change_branch);
392 client.add_entity_request_handler(Self::handle_create_branch);
393 client.add_entity_request_handler(Self::handle_git_init);
394 client.add_entity_request_handler(Self::handle_push);
395 client.add_entity_request_handler(Self::handle_pull);
396 client.add_entity_request_handler(Self::handle_fetch);
397 client.add_entity_request_handler(Self::handle_stage);
398 client.add_entity_request_handler(Self::handle_unstage);
399 client.add_entity_request_handler(Self::handle_commit);
400 client.add_entity_request_handler(Self::handle_reset);
401 client.add_entity_request_handler(Self::handle_show);
402 client.add_entity_request_handler(Self::handle_load_commit_diff);
403 client.add_entity_request_handler(Self::handle_checkout_files);
404 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
405 client.add_entity_request_handler(Self::handle_set_index_text);
406 client.add_entity_request_handler(Self::handle_askpass);
407 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
408 client.add_entity_request_handler(Self::handle_git_diff);
409 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
410 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
411 client.add_entity_message_handler(Self::handle_update_diff_bases);
412 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
413 client.add_entity_request_handler(Self::handle_blame_buffer);
414 client.add_entity_message_handler(Self::handle_update_repository);
415 client.add_entity_message_handler(Self::handle_remove_repository);
416 }
417
418 pub fn is_local(&self) -> bool {
419 matches!(self.state, GitStoreState::Local { .. })
420 }
421
422 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
423 match &mut self.state {
424 GitStoreState::Ssh {
425 downstream: downstream_client,
426 ..
427 } => {
428 for repo in self.repositories.values() {
429 let update = repo.read(cx).snapshot.initial_update(project_id);
430 for update in split_repository_update(update) {
431 client.send(update).log_err();
432 }
433 }
434 *downstream_client = Some((client, ProjectId(project_id)));
435 }
436 GitStoreState::Local {
437 downstream: downstream_client,
438 ..
439 } => {
440 let mut snapshots = HashMap::default();
441 let (updates_tx, mut updates_rx) = mpsc::unbounded();
442 for repo in self.repositories.values() {
443 updates_tx
444 .unbounded_send(DownstreamUpdate::UpdateRepository(
445 repo.read(cx).snapshot.clone(),
446 ))
447 .ok();
448 }
449 *downstream_client = Some(LocalDownstreamState {
450 client: client.clone(),
451 project_id: ProjectId(project_id),
452 updates_tx,
453 _task: cx.spawn(async move |this, cx| {
454 cx.background_spawn(async move {
455 while let Some(update) = updates_rx.next().await {
456 match update {
457 DownstreamUpdate::UpdateRepository(snapshot) => {
458 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
459 {
460 let update =
461 snapshot.build_update(old_snapshot, project_id);
462 *old_snapshot = snapshot;
463 for update in split_repository_update(update) {
464 client.send(update)?;
465 }
466 } else {
467 let update = snapshot.initial_update(project_id);
468 for update in split_repository_update(update) {
469 client.send(update)?;
470 }
471 snapshots.insert(snapshot.id, snapshot);
472 }
473 }
474 DownstreamUpdate::RemoveRepository(id) => {
475 client.send(proto::RemoveRepository {
476 project_id,
477 id: id.to_proto(),
478 })?;
479 }
480 }
481 }
482 anyhow::Ok(())
483 })
484 .await
485 .ok();
486 this.update(cx, |this, _| {
487 if let GitStoreState::Local {
488 downstream: downstream_client,
489 ..
490 } = &mut this.state
491 {
492 downstream_client.take();
493 } else {
494 unreachable!("unshared called on remote store");
495 }
496 })
497 }),
498 });
499 }
500 GitStoreState::Remote { .. } => {
501 debug_panic!("shared called on remote store");
502 }
503 }
504 }
505
506 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
507 match &mut self.state {
508 GitStoreState::Local {
509 downstream: downstream_client,
510 ..
511 } => {
512 downstream_client.take();
513 }
514 GitStoreState::Ssh {
515 downstream: downstream_client,
516 ..
517 } => {
518 downstream_client.take();
519 }
520 GitStoreState::Remote { .. } => {
521 debug_panic!("unshared called on remote store");
522 }
523 }
524 self.shared_diffs.clear();
525 }
526
527 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
528 self.shared_diffs.remove(peer_id);
529 }
530
531 pub fn active_repository(&self) -> Option<Entity<Repository>> {
532 self.active_repo_id
533 .as_ref()
534 .map(|id| self.repositories[&id].clone())
535 }
536
537 pub fn open_unstaged_diff(
538 &mut self,
539 buffer: Entity<Buffer>,
540 cx: &mut Context<Self>,
541 ) -> Task<Result<Entity<BufferDiff>>> {
542 let buffer_id = buffer.read(cx).remote_id();
543 if let Some(diff_state) = self.diffs.get(&buffer_id) {
544 if let Some(unstaged_diff) = diff_state
545 .read(cx)
546 .unstaged_diff
547 .as_ref()
548 .and_then(|weak| weak.upgrade())
549 {
550 if let Some(task) =
551 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
552 {
553 return cx.background_executor().spawn(async move {
554 task.await?;
555 Ok(unstaged_diff)
556 });
557 }
558 return Task::ready(Ok(unstaged_diff));
559 }
560 }
561
562 let Some((repo, repo_path)) =
563 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
564 else {
565 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
566 };
567
568 let task = self
569 .loading_diffs
570 .entry((buffer_id, DiffKind::Unstaged))
571 .or_insert_with(|| {
572 let staged_text = repo.update(cx, |repo, cx| {
573 repo.load_staged_text(buffer_id, repo_path, cx)
574 });
575 cx.spawn(async move |this, cx| {
576 Self::open_diff_internal(
577 this,
578 DiffKind::Unstaged,
579 staged_text.await.map(DiffBasesChange::SetIndex),
580 buffer,
581 cx,
582 )
583 .await
584 .map_err(Arc::new)
585 })
586 .shared()
587 })
588 .clone();
589
590 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
591 }
592
593 pub fn open_uncommitted_diff(
594 &mut self,
595 buffer: Entity<Buffer>,
596 cx: &mut Context<Self>,
597 ) -> Task<Result<Entity<BufferDiff>>> {
598 let buffer_id = buffer.read(cx).remote_id();
599
600 if let Some(diff_state) = self.diffs.get(&buffer_id) {
601 if let Some(uncommitted_diff) = diff_state
602 .read(cx)
603 .uncommitted_diff
604 .as_ref()
605 .and_then(|weak| weak.upgrade())
606 {
607 if let Some(task) =
608 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
609 {
610 return cx.background_executor().spawn(async move {
611 task.await?;
612 Ok(uncommitted_diff)
613 });
614 }
615 return Task::ready(Ok(uncommitted_diff));
616 }
617 }
618
619 let Some((repo, repo_path)) =
620 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
621 else {
622 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
623 };
624
625 let task = self
626 .loading_diffs
627 .entry((buffer_id, DiffKind::Uncommitted))
628 .or_insert_with(|| {
629 let changes = repo.update(cx, |repo, cx| {
630 repo.load_committed_text(buffer_id, repo_path, cx)
631 });
632
633 cx.spawn(async move |this, cx| {
634 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
635 .await
636 .map_err(Arc::new)
637 })
638 .shared()
639 })
640 .clone();
641
642 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
643 }
644
645 async fn open_diff_internal(
646 this: WeakEntity<Self>,
647 kind: DiffKind,
648 texts: Result<DiffBasesChange>,
649 buffer_entity: Entity<Buffer>,
650 cx: &mut AsyncApp,
651 ) -> Result<Entity<BufferDiff>> {
652 let diff_bases_change = match texts {
653 Err(e) => {
654 this.update(cx, |this, cx| {
655 let buffer = buffer_entity.read(cx);
656 let buffer_id = buffer.remote_id();
657 this.loading_diffs.remove(&(buffer_id, kind));
658 })?;
659 return Err(e);
660 }
661 Ok(change) => change,
662 };
663
664 this.update(cx, |this, cx| {
665 let buffer = buffer_entity.read(cx);
666 let buffer_id = buffer.remote_id();
667 let language = buffer.language().cloned();
668 let language_registry = buffer.language_registry();
669 let text_snapshot = buffer.text_snapshot();
670 this.loading_diffs.remove(&(buffer_id, kind));
671
672 let diff_state = this
673 .diffs
674 .entry(buffer_id)
675 .or_insert_with(|| cx.new(|_| BufferDiffState::default()));
676
677 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
678
679 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
680 diff_state.update(cx, |diff_state, cx| {
681 diff_state.language = language;
682 diff_state.language_registry = language_registry;
683
684 match kind {
685 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
686 DiffKind::Uncommitted => {
687 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
688 diff
689 } else {
690 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
691 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
692 unstaged_diff
693 };
694
695 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
696 diff_state.uncommitted_diff = Some(diff.downgrade())
697 }
698 }
699
700 let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, 0, cx);
701
702 anyhow::Ok(async move {
703 rx.await.ok();
704 Ok(diff)
705 })
706 })
707 })??
708 .await
709 }
710
711 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
712 let diff_state = self.diffs.get(&buffer_id)?;
713 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
714 }
715
716 pub fn get_uncommitted_diff(
717 &self,
718 buffer_id: BufferId,
719 cx: &App,
720 ) -> Option<Entity<BufferDiff>> {
721 let diff_state = self.diffs.get(&buffer_id)?;
722 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
723 }
724
725 pub fn project_path_git_status(
726 &self,
727 project_path: &ProjectPath,
728 cx: &App,
729 ) -> Option<FileStatus> {
730 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
731 Some(repo.read(cx).status_for_path(&repo_path)?.status)
732 }
733
734 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
735 let mut work_directory_abs_paths = Vec::new();
736 let mut checkpoints = Vec::new();
737 for repository in self.repositories.values() {
738 repository.update(cx, |repository, _| {
739 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
740 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
741 });
742 }
743
744 cx.background_executor().spawn(async move {
745 let checkpoints = future::try_join_all(checkpoints).await?;
746 Ok(GitStoreCheckpoint {
747 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
748 .into_iter()
749 .zip(checkpoints)
750 .collect(),
751 })
752 })
753 }
754
755 pub fn restore_checkpoint(
756 &self,
757 checkpoint: GitStoreCheckpoint,
758 cx: &mut App,
759 ) -> Task<Result<()>> {
760 let repositories_by_work_dir_abs_path = self
761 .repositories
762 .values()
763 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
764 .collect::<HashMap<_, _>>();
765
766 let mut tasks = Vec::new();
767 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
768 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
769 let restore = repository.update(cx, |repository, _| {
770 repository.restore_checkpoint(checkpoint)
771 });
772 tasks.push(async move { restore.await? });
773 }
774 }
775 cx.background_spawn(async move {
776 future::try_join_all(tasks).await?;
777 Ok(())
778 })
779 }
780
781 /// Compares two checkpoints, returning true if they are equal.
782 pub fn compare_checkpoints(
783 &self,
784 left: GitStoreCheckpoint,
785 mut right: GitStoreCheckpoint,
786 cx: &mut App,
787 ) -> Task<Result<bool>> {
788 let repositories_by_work_dir_abs_path = self
789 .repositories
790 .values()
791 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
792 .collect::<HashMap<_, _>>();
793
794 let mut tasks = Vec::new();
795 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
796 if let Some(right_checkpoint) = right
797 .checkpoints_by_work_dir_abs_path
798 .remove(&work_dir_abs_path)
799 {
800 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
801 {
802 let compare = repository.update(cx, |repository, _| {
803 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
804 });
805
806 tasks.push(async move { compare.await? });
807 }
808 } else {
809 return Task::ready(Ok(false));
810 }
811 }
812 cx.background_spawn(async move {
813 Ok(future::try_join_all(tasks)
814 .await?
815 .into_iter()
816 .all(|result| result))
817 })
818 }
819
820 pub fn delete_checkpoint(
821 &self,
822 checkpoint: GitStoreCheckpoint,
823 cx: &mut App,
824 ) -> Task<Result<()>> {
825 let repositories_by_work_directory_abs_path = self
826 .repositories
827 .values()
828 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
829 .collect::<HashMap<_, _>>();
830
831 let mut tasks = Vec::new();
832 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
833 if let Some(repository) =
834 repositories_by_work_directory_abs_path.get(&work_dir_abs_path)
835 {
836 let delete = repository.update(cx, |this, _| this.delete_checkpoint(checkpoint));
837 tasks.push(async move { delete.await? });
838 }
839 }
840 cx.background_spawn(async move {
841 future::try_join_all(tasks).await?;
842 Ok(())
843 })
844 }
845
846 /// Blames a buffer.
847 pub fn blame_buffer(
848 &self,
849 buffer: &Entity<Buffer>,
850 version: Option<clock::Global>,
851 cx: &mut App,
852 ) -> Task<Result<Option<Blame>>> {
853 let buffer = buffer.read(cx);
854 let Some((repo, repo_path)) =
855 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
856 else {
857 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
858 };
859 let content = match &version {
860 Some(version) => buffer.rope_for_version(version).clone(),
861 None => buffer.as_rope().clone(),
862 };
863 let version = version.unwrap_or(buffer.version());
864 let buffer_id = buffer.remote_id();
865
866 let rx = repo.update(cx, |repo, _| {
867 repo.send_job(None, move |state, _| async move {
868 match state {
869 RepositoryState::Local { backend, .. } => backend
870 .blame(repo_path.clone(), content)
871 .await
872 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
873 .map(Some),
874 RepositoryState::Remote { project_id, client } => {
875 let response = client
876 .request(proto::BlameBuffer {
877 project_id: project_id.to_proto(),
878 buffer_id: buffer_id.into(),
879 version: serialize_version(&version),
880 })
881 .await?;
882 Ok(deserialize_blame_buffer_response(response))
883 }
884 }
885 })
886 });
887
888 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
889 }
890
891 pub fn get_permalink_to_line(
892 &self,
893 buffer: &Entity<Buffer>,
894 selection: Range<u32>,
895 cx: &mut App,
896 ) -> Task<Result<url::Url>> {
897 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
898 return Task::ready(Err(anyhow!("buffer has no file")));
899 };
900
901 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
902 &(file.worktree.read(cx).id(), file.path.clone()).into(),
903 cx,
904 ) else {
905 // If we're not in a Git repo, check whether this is a Rust source
906 // file in the Cargo registry (presumably opened with go-to-definition
907 // from a normal Rust file). If so, we can put together a permalink
908 // using crate metadata.
909 if buffer
910 .read(cx)
911 .language()
912 .is_none_or(|lang| lang.name() != "Rust".into())
913 {
914 return Task::ready(Err(anyhow!("no permalink available")));
915 }
916 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
917 return Task::ready(Err(anyhow!("no permalink available")));
918 };
919 return cx.spawn(async move |cx| {
920 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
921 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
922 .map_err(|_| anyhow!("no permalink available"))
923 });
924
925 // TODO remote case
926 };
927
928 let buffer_id = buffer.read(cx).remote_id();
929 let branch = repo.read(cx).branch.clone();
930 let remote = branch
931 .as_ref()
932 .and_then(|b| b.upstream.as_ref())
933 .and_then(|b| b.remote_name())
934 .unwrap_or("origin")
935 .to_string();
936
937 let rx = repo.update(cx, |repo, _| {
938 repo.send_job(None, move |state, cx| async move {
939 match state {
940 RepositoryState::Local { backend, .. } => {
941 let origin_url = backend
942 .remote_url(&remote)
943 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
944
945 let sha = backend
946 .head_sha()
947 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
948
949 let provider_registry =
950 cx.update(GitHostingProviderRegistry::default_global)?;
951
952 let (provider, remote) =
953 parse_git_remote_url(provider_registry, &origin_url)
954 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
955
956 let path = repo_path
957 .to_str()
958 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
959
960 Ok(provider.build_permalink(
961 remote,
962 BuildPermalinkParams {
963 sha: &sha,
964 path,
965 selection: Some(selection),
966 },
967 ))
968 }
969 RepositoryState::Remote { project_id, client } => {
970 let response = client
971 .request(proto::GetPermalinkToLine {
972 project_id: project_id.to_proto(),
973 buffer_id: buffer_id.into(),
974 selection: Some(proto::Range {
975 start: selection.start as u64,
976 end: selection.end as u64,
977 }),
978 })
979 .await?;
980
981 url::Url::parse(&response.permalink).context("failed to parse permalink")
982 }
983 }
984 })
985 });
986 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
987 }
988
989 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
990 match &self.state {
991 GitStoreState::Local {
992 downstream: downstream_client,
993 ..
994 } => downstream_client
995 .as_ref()
996 .map(|state| (state.client.clone(), state.project_id)),
997 GitStoreState::Ssh {
998 downstream: downstream_client,
999 ..
1000 } => downstream_client.clone(),
1001 GitStoreState::Remote { .. } => None,
1002 }
1003 }
1004
1005 fn upstream_client(&self) -> Option<AnyProtoClient> {
1006 match &self.state {
1007 GitStoreState::Local { .. } => None,
1008 GitStoreState::Ssh {
1009 upstream_client, ..
1010 }
1011 | GitStoreState::Remote {
1012 upstream_client, ..
1013 } => Some(upstream_client.clone()),
1014 }
1015 }
1016
1017 fn on_worktree_store_event(
1018 &mut self,
1019 worktree_store: Entity<WorktreeStore>,
1020 event: &WorktreeStoreEvent,
1021 cx: &mut Context<Self>,
1022 ) {
1023 let GitStoreState::Local {
1024 project_environment,
1025 downstream,
1026 next_repository_id,
1027 fs,
1028 } = &self.state
1029 else {
1030 return;
1031 };
1032
1033 match event {
1034 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1035 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
1036 for (relative_path, _, _) in updated_entries.iter() {
1037 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1038 &(*worktree_id, relative_path.clone()).into(),
1039 cx,
1040 ) else {
1041 continue;
1042 };
1043 paths_by_git_repo.entry(repo).or_default().push(repo_path)
1044 }
1045
1046 for (repo, paths) in paths_by_git_repo {
1047 repo.update(cx, |repo, cx| {
1048 repo.paths_changed(
1049 paths,
1050 downstream
1051 .as_ref()
1052 .map(|downstream| downstream.updates_tx.clone()),
1053 cx,
1054 );
1055 });
1056 }
1057 }
1058 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1059 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1060 else {
1061 return;
1062 };
1063 if !worktree.read(cx).is_visible() {
1064 log::debug!(
1065 "not adding repositories for local worktree {:?} because it's not visible",
1066 worktree.read(cx).abs_path()
1067 );
1068 return;
1069 }
1070 self.update_repositories_from_worktree(
1071 project_environment.clone(),
1072 next_repository_id.clone(),
1073 downstream
1074 .as_ref()
1075 .map(|downstream| downstream.updates_tx.clone()),
1076 changed_repos.clone(),
1077 fs.clone(),
1078 cx,
1079 );
1080 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1081 }
1082 _ => {}
1083 }
1084 }
1085
1086 fn on_repository_event(
1087 &mut self,
1088 repo: Entity<Repository>,
1089 event: &RepositoryEvent,
1090 cx: &mut Context<Self>,
1091 ) {
1092 let id = repo.read(cx).id;
1093 cx.emit(GitStoreEvent::RepositoryUpdated(
1094 id,
1095 event.clone(),
1096 self.active_repo_id == Some(id),
1097 ))
1098 }
1099
1100 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1101 cx.emit(GitStoreEvent::JobsUpdated)
1102 }
1103
1104 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1105 fn update_repositories_from_worktree(
1106 &mut self,
1107 project_environment: Entity<ProjectEnvironment>,
1108 next_repository_id: Arc<AtomicU64>,
1109 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1110 updated_git_repositories: UpdatedGitRepositoriesSet,
1111 fs: Arc<dyn Fs>,
1112 cx: &mut Context<Self>,
1113 ) {
1114 let mut removed_ids = Vec::new();
1115 for update in updated_git_repositories.iter() {
1116 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1117 let existing_work_directory_abs_path =
1118 repo.read(cx).work_directory_abs_path.clone();
1119 Some(&existing_work_directory_abs_path)
1120 == update.old_work_directory_abs_path.as_ref()
1121 || Some(&existing_work_directory_abs_path)
1122 == update.new_work_directory_abs_path.as_ref()
1123 }) {
1124 if let Some(new_work_directory_abs_path) =
1125 update.new_work_directory_abs_path.clone()
1126 {
1127 existing.update(cx, |existing, cx| {
1128 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1129 existing.schedule_scan(updates_tx.clone(), cx);
1130 });
1131 } else {
1132 removed_ids.push(*id);
1133 }
1134 } else if let Some((work_directory_abs_path, dot_git_abs_path)) = update
1135 .new_work_directory_abs_path
1136 .clone()
1137 .zip(update.dot_git_abs_path.clone())
1138 {
1139 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1140 let git_store = cx.weak_entity();
1141 let repo = cx.new(|cx| {
1142 let mut repo = Repository::local(
1143 id,
1144 work_directory_abs_path,
1145 dot_git_abs_path,
1146 project_environment.downgrade(),
1147 fs.clone(),
1148 git_store,
1149 cx,
1150 );
1151 repo.schedule_scan(updates_tx.clone(), cx);
1152 repo
1153 });
1154 self._subscriptions
1155 .push(cx.subscribe(&repo, Self::on_repository_event));
1156 self._subscriptions
1157 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1158 self.repositories.insert(id, repo);
1159 cx.emit(GitStoreEvent::RepositoryAdded(id));
1160 self.active_repo_id.get_or_insert_with(|| {
1161 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1162 id
1163 });
1164 }
1165 }
1166
1167 for id in removed_ids {
1168 if self.active_repo_id == Some(id) {
1169 self.active_repo_id = None;
1170 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1171 }
1172 self.repositories.remove(&id);
1173 if let Some(updates_tx) = updates_tx.as_ref() {
1174 updates_tx
1175 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1176 .ok();
1177 }
1178 }
1179 }
1180
1181 fn on_buffer_store_event(
1182 &mut self,
1183 _: Entity<BufferStore>,
1184 event: &BufferStoreEvent,
1185 cx: &mut Context<Self>,
1186 ) {
1187 match event {
1188 BufferStoreEvent::BufferAdded(buffer) => {
1189 cx.subscribe(&buffer, |this, buffer, event, cx| {
1190 if let BufferEvent::LanguageChanged = event {
1191 let buffer_id = buffer.read(cx).remote_id();
1192 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1193 diff_state.update(cx, |diff_state, cx| {
1194 diff_state.buffer_language_changed(buffer, cx);
1195 });
1196 }
1197 }
1198 })
1199 .detach();
1200 }
1201 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1202 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1203 diffs.remove(buffer_id);
1204 }
1205 }
1206 BufferStoreEvent::BufferDropped(buffer_id) => {
1207 self.diffs.remove(&buffer_id);
1208 for diffs in self.shared_diffs.values_mut() {
1209 diffs.remove(buffer_id);
1210 }
1211 }
1212
1213 _ => {}
1214 }
1215 }
1216
1217 pub fn recalculate_buffer_diffs(
1218 &mut self,
1219 buffers: Vec<Entity<Buffer>>,
1220 cx: &mut Context<Self>,
1221 ) -> impl Future<Output = ()> + use<> {
1222 let mut futures = Vec::new();
1223 for buffer in buffers {
1224 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1225 let buffer = buffer.read(cx).text_snapshot();
1226 futures.push(diff_state.update(cx, |diff_state, cx| {
1227 diff_state.recalculate_diffs(
1228 buffer,
1229 diff_state.hunk_staging_operation_count,
1230 cx,
1231 )
1232 }));
1233 }
1234 }
1235 async move {
1236 futures::future::join_all(futures).await;
1237 }
1238 }
1239
1240 fn on_buffer_diff_event(
1241 &mut self,
1242 diff: Entity<buffer_diff::BufferDiff>,
1243 event: &BufferDiffEvent,
1244 cx: &mut Context<Self>,
1245 ) {
1246 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1247 let buffer_id = diff.read(cx).buffer_id;
1248 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1249 diff_state.update(cx, |diff_state, _| {
1250 diff_state.hunk_staging_operation_count += 1;
1251 });
1252 }
1253 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1254 let recv = repo.update(cx, |repo, cx| {
1255 log::debug!("updating index text for buffer {}", path.display());
1256 repo.spawn_set_index_text_job(
1257 path,
1258 new_index_text.as_ref().map(|rope| rope.to_string()),
1259 cx,
1260 )
1261 });
1262 let diff = diff.downgrade();
1263 cx.spawn(async move |this, cx| {
1264 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1265 diff.update(cx, |diff, cx| {
1266 diff.clear_pending_hunks(cx);
1267 })
1268 .ok();
1269 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1270 .ok();
1271 }
1272 })
1273 .detach();
1274 }
1275 }
1276 }
1277
1278 fn local_worktree_git_repos_changed(
1279 &mut self,
1280 worktree: Entity<Worktree>,
1281 changed_repos: &UpdatedGitRepositoriesSet,
1282 cx: &mut Context<Self>,
1283 ) {
1284 log::debug!("local worktree repos changed");
1285 debug_assert!(worktree.read(cx).is_local());
1286
1287 let mut diff_state_updates = HashMap::<Entity<Repository>, Vec<_>>::default();
1288 for (buffer_id, diff_state) in &self.diffs {
1289 let Some(buffer) = self.buffer_store.read(cx).get(*buffer_id) else {
1290 continue;
1291 };
1292 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1293 continue;
1294 };
1295 if file.worktree != worktree {
1296 continue;
1297 }
1298 let Some((repo, repo_path)) =
1299 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
1300 else {
1301 continue;
1302 };
1303 if !changed_repos.iter().any(|update| {
1304 update.old_work_directory_abs_path.as_ref()
1305 == Some(&repo.read(cx).work_directory_abs_path)
1306 || update.new_work_directory_abs_path.as_ref()
1307 == Some(&repo.read(cx).work_directory_abs_path)
1308 }) {
1309 continue;
1310 }
1311
1312 let diff_state = diff_state.read(cx);
1313 let has_unstaged_diff = diff_state
1314 .unstaged_diff
1315 .as_ref()
1316 .is_some_and(|diff| diff.is_upgradable());
1317 let has_uncommitted_diff = diff_state
1318 .uncommitted_diff
1319 .as_ref()
1320 .is_some_and(|set| set.is_upgradable());
1321
1322 let update = (
1323 buffer,
1324 repo_path,
1325 has_unstaged_diff.then(|| diff_state.index_text.clone()),
1326 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
1327 diff_state.hunk_staging_operation_count,
1328 );
1329 diff_state_updates.entry(repo).or_default().push(update);
1330 }
1331
1332 if diff_state_updates.is_empty() {
1333 return;
1334 }
1335
1336 for (repo, repo_diff_state_updates) in diff_state_updates.into_iter() {
1337 let git_store = cx.weak_entity();
1338
1339 let _ = repo.update(cx, |repo, _| {
1340 repo.send_keyed_job(
1341 Some(GitJobKey::BatchReadIndex),
1342 None,
1343 |state, mut cx| async move {
1344 let RepositoryState::Local { backend, .. } = state else {
1345 log::error!("tried to recompute diffs for a non-local repository");
1346 return;
1347 };
1348 let mut diff_bases_changes_by_buffer = Vec::new();
1349 for (
1350 buffer,
1351 repo_path,
1352 current_index_text,
1353 current_head_text,
1354 hunk_staging_operation_count,
1355 ) in &repo_diff_state_updates
1356 {
1357 let index_text = if current_index_text.is_some() {
1358 backend.load_index_text(repo_path.clone()).await
1359 } else {
1360 None
1361 };
1362 let head_text = if current_head_text.is_some() {
1363 backend.load_committed_text(repo_path.clone()).await
1364 } else {
1365 None
1366 };
1367
1368 // Avoid triggering a diff update if the base text has not changed.
1369 if let Some((current_index, current_head)) =
1370 current_index_text.as_ref().zip(current_head_text.as_ref())
1371 {
1372 if current_index.as_deref() == index_text.as_ref()
1373 && current_head.as_deref() == head_text.as_ref()
1374 {
1375 continue;
1376 }
1377 }
1378
1379 let diff_bases_change =
1380 match (current_index_text.is_some(), current_head_text.is_some()) {
1381 (true, true) => Some(if index_text == head_text {
1382 DiffBasesChange::SetBoth(head_text)
1383 } else {
1384 DiffBasesChange::SetEach {
1385 index: index_text,
1386 head: head_text,
1387 }
1388 }),
1389 (true, false) => Some(DiffBasesChange::SetIndex(index_text)),
1390 (false, true) => Some(DiffBasesChange::SetHead(head_text)),
1391 (false, false) => None,
1392 };
1393
1394 diff_bases_changes_by_buffer.push((
1395 buffer,
1396 diff_bases_change,
1397 *hunk_staging_operation_count,
1398 ))
1399 }
1400
1401 git_store
1402 .update(&mut cx, |git_store, cx| {
1403 for (buffer, diff_bases_change, hunk_staging_operation_count) in
1404 diff_bases_changes_by_buffer
1405 {
1406 let Some(diff_state) =
1407 git_store.diffs.get(&buffer.read(cx).remote_id())
1408 else {
1409 continue;
1410 };
1411 let Some(diff_bases_change) = diff_bases_change else {
1412 continue;
1413 };
1414
1415 let downstream_client = git_store.downstream_client();
1416 diff_state.update(cx, |diff_state, cx| {
1417 use proto::update_diff_bases::Mode;
1418
1419 let buffer = buffer.read(cx);
1420 if let Some((client, project_id)) = downstream_client {
1421 let (staged_text, committed_text, mode) =
1422 match diff_bases_change.clone() {
1423 DiffBasesChange::SetIndex(index) => {
1424 (index, None, Mode::IndexOnly)
1425 }
1426 DiffBasesChange::SetHead(head) => {
1427 (None, head, Mode::HeadOnly)
1428 }
1429 DiffBasesChange::SetEach { index, head } => {
1430 (index, head, Mode::IndexAndHead)
1431 }
1432 DiffBasesChange::SetBoth(text) => {
1433 (None, text, Mode::IndexMatchesHead)
1434 }
1435 };
1436 let message = proto::UpdateDiffBases {
1437 project_id: project_id.to_proto(),
1438 buffer_id: buffer.remote_id().to_proto(),
1439 staged_text,
1440 committed_text,
1441 mode: mode as i32,
1442 };
1443
1444 client.send(message).log_err();
1445 }
1446
1447 let _ = diff_state.diff_bases_changed(
1448 buffer.text_snapshot(),
1449 diff_bases_change,
1450 hunk_staging_operation_count,
1451 cx,
1452 );
1453 });
1454 }
1455 })
1456 .ok();
1457 },
1458 )
1459 });
1460 }
1461 }
1462
1463 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1464 &self.repositories
1465 }
1466
1467 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1468 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1469 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1470 Some(status.status)
1471 }
1472
1473 pub fn repository_and_path_for_buffer_id(
1474 &self,
1475 buffer_id: BufferId,
1476 cx: &App,
1477 ) -> Option<(Entity<Repository>, RepoPath)> {
1478 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1479 let project_path = buffer.read(cx).project_path(cx)?;
1480 self.repository_and_path_for_project_path(&project_path, cx)
1481 }
1482
1483 pub fn repository_and_path_for_project_path(
1484 &self,
1485 path: &ProjectPath,
1486 cx: &App,
1487 ) -> Option<(Entity<Repository>, RepoPath)> {
1488 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1489 self.repositories
1490 .values()
1491 .filter_map(|repo| {
1492 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1493 Some((repo.clone(), repo_path))
1494 })
1495 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1496 }
1497
1498 pub fn git_init(
1499 &self,
1500 path: Arc<Path>,
1501 fallback_branch_name: String,
1502 cx: &App,
1503 ) -> Task<Result<()>> {
1504 match &self.state {
1505 GitStoreState::Local { fs, .. } => {
1506 let fs = fs.clone();
1507 cx.background_executor()
1508 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1509 }
1510 GitStoreState::Ssh {
1511 upstream_client,
1512 upstream_project_id: project_id,
1513 ..
1514 }
1515 | GitStoreState::Remote {
1516 upstream_client,
1517 upstream_project_id: project_id,
1518 ..
1519 } => {
1520 let client = upstream_client.clone();
1521 let project_id = *project_id;
1522 cx.background_executor().spawn(async move {
1523 client
1524 .request(proto::GitInit {
1525 project_id: project_id.0,
1526 abs_path: path.to_string_lossy().to_string(),
1527 fallback_branch_name,
1528 })
1529 .await?;
1530 Ok(())
1531 })
1532 }
1533 }
1534 }
1535
1536 async fn handle_update_repository(
1537 this: Entity<Self>,
1538 envelope: TypedEnvelope<proto::UpdateRepository>,
1539 mut cx: AsyncApp,
1540 ) -> Result<()> {
1541 this.update(&mut cx, |this, cx| {
1542 let mut update = envelope.payload;
1543
1544 let id = RepositoryId::from_proto(update.id);
1545 let client = this
1546 .upstream_client()
1547 .context("no upstream client")?
1548 .clone();
1549
1550 let mut is_new = false;
1551 let repo = this.repositories.entry(id).or_insert_with(|| {
1552 is_new = true;
1553 let git_store = cx.weak_entity();
1554 cx.new(|cx| {
1555 Repository::remote(
1556 id,
1557 Path::new(&update.abs_path).into(),
1558 ProjectId(update.project_id),
1559 client,
1560 git_store,
1561 cx,
1562 )
1563 })
1564 });
1565 if is_new {
1566 this._subscriptions
1567 .push(cx.subscribe(&repo, Self::on_repository_event))
1568 }
1569
1570 repo.update(cx, {
1571 let update = update.clone();
1572 |repo, cx| repo.apply_remote_update(update, cx)
1573 })?;
1574
1575 this.active_repo_id.get_or_insert_with(|| {
1576 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1577 id
1578 });
1579
1580 if let Some((client, project_id)) = this.downstream_client() {
1581 update.project_id = project_id.to_proto();
1582 client.send(update).log_err();
1583 }
1584 Ok(())
1585 })?
1586 }
1587
1588 async fn handle_remove_repository(
1589 this: Entity<Self>,
1590 envelope: TypedEnvelope<proto::RemoveRepository>,
1591 mut cx: AsyncApp,
1592 ) -> Result<()> {
1593 this.update(&mut cx, |this, cx| {
1594 let mut update = envelope.payload;
1595 let id = RepositoryId::from_proto(update.id);
1596 this.repositories.remove(&id);
1597 if let Some((client, project_id)) = this.downstream_client() {
1598 update.project_id = project_id.to_proto();
1599 client.send(update).log_err();
1600 }
1601 if this.active_repo_id == Some(id) {
1602 this.active_repo_id = None;
1603 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1604 }
1605 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1606 })
1607 }
1608
1609 async fn handle_git_init(
1610 this: Entity<Self>,
1611 envelope: TypedEnvelope<proto::GitInit>,
1612 cx: AsyncApp,
1613 ) -> Result<proto::Ack> {
1614 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1615 let name = envelope.payload.fallback_branch_name;
1616 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1617 .await?;
1618
1619 Ok(proto::Ack {})
1620 }
1621
1622 async fn handle_fetch(
1623 this: Entity<Self>,
1624 envelope: TypedEnvelope<proto::Fetch>,
1625 mut cx: AsyncApp,
1626 ) -> Result<proto::RemoteMessageResponse> {
1627 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1628 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1629 let askpass_id = envelope.payload.askpass_id;
1630
1631 let askpass = make_remote_delegate(
1632 this,
1633 envelope.payload.project_id,
1634 repository_id,
1635 askpass_id,
1636 &mut cx,
1637 );
1638
1639 let remote_output = repository_handle
1640 .update(&mut cx, |repository_handle, cx| {
1641 repository_handle.fetch(askpass, cx)
1642 })?
1643 .await??;
1644
1645 Ok(proto::RemoteMessageResponse {
1646 stdout: remote_output.stdout,
1647 stderr: remote_output.stderr,
1648 })
1649 }
1650
1651 async fn handle_push(
1652 this: Entity<Self>,
1653 envelope: TypedEnvelope<proto::Push>,
1654 mut cx: AsyncApp,
1655 ) -> Result<proto::RemoteMessageResponse> {
1656 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1657 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1658
1659 let askpass_id = envelope.payload.askpass_id;
1660 let askpass = make_remote_delegate(
1661 this,
1662 envelope.payload.project_id,
1663 repository_id,
1664 askpass_id,
1665 &mut cx,
1666 );
1667
1668 let options = envelope
1669 .payload
1670 .options
1671 .as_ref()
1672 .map(|_| match envelope.payload.options() {
1673 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1674 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1675 });
1676
1677 let branch_name = envelope.payload.branch_name.into();
1678 let remote_name = envelope.payload.remote_name.into();
1679
1680 let remote_output = repository_handle
1681 .update(&mut cx, |repository_handle, cx| {
1682 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1683 })?
1684 .await??;
1685 Ok(proto::RemoteMessageResponse {
1686 stdout: remote_output.stdout,
1687 stderr: remote_output.stderr,
1688 })
1689 }
1690
1691 async fn handle_pull(
1692 this: Entity<Self>,
1693 envelope: TypedEnvelope<proto::Pull>,
1694 mut cx: AsyncApp,
1695 ) -> Result<proto::RemoteMessageResponse> {
1696 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1697 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1698 let askpass_id = envelope.payload.askpass_id;
1699 let askpass = make_remote_delegate(
1700 this,
1701 envelope.payload.project_id,
1702 repository_id,
1703 askpass_id,
1704 &mut cx,
1705 );
1706
1707 let branch_name = envelope.payload.branch_name.into();
1708 let remote_name = envelope.payload.remote_name.into();
1709
1710 let remote_message = repository_handle
1711 .update(&mut cx, |repository_handle, cx| {
1712 repository_handle.pull(branch_name, remote_name, askpass, cx)
1713 })?
1714 .await??;
1715
1716 Ok(proto::RemoteMessageResponse {
1717 stdout: remote_message.stdout,
1718 stderr: remote_message.stderr,
1719 })
1720 }
1721
1722 async fn handle_stage(
1723 this: Entity<Self>,
1724 envelope: TypedEnvelope<proto::Stage>,
1725 mut cx: AsyncApp,
1726 ) -> Result<proto::Ack> {
1727 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1728 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1729
1730 let entries = envelope
1731 .payload
1732 .paths
1733 .into_iter()
1734 .map(PathBuf::from)
1735 .map(RepoPath::new)
1736 .collect();
1737
1738 repository_handle
1739 .update(&mut cx, |repository_handle, cx| {
1740 repository_handle.stage_entries(entries, cx)
1741 })?
1742 .await?;
1743 Ok(proto::Ack {})
1744 }
1745
1746 async fn handle_unstage(
1747 this: Entity<Self>,
1748 envelope: TypedEnvelope<proto::Unstage>,
1749 mut cx: AsyncApp,
1750 ) -> Result<proto::Ack> {
1751 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1752 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1753
1754 let entries = envelope
1755 .payload
1756 .paths
1757 .into_iter()
1758 .map(PathBuf::from)
1759 .map(RepoPath::new)
1760 .collect();
1761
1762 repository_handle
1763 .update(&mut cx, |repository_handle, cx| {
1764 repository_handle.unstage_entries(entries, cx)
1765 })?
1766 .await?;
1767
1768 Ok(proto::Ack {})
1769 }
1770
1771 async fn handle_set_index_text(
1772 this: Entity<Self>,
1773 envelope: TypedEnvelope<proto::SetIndexText>,
1774 mut cx: AsyncApp,
1775 ) -> Result<proto::Ack> {
1776 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1777 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1778
1779 repository_handle
1780 .update(&mut cx, |repository_handle, cx| {
1781 repository_handle.spawn_set_index_text_job(
1782 RepoPath::from_str(&envelope.payload.path),
1783 envelope.payload.text,
1784 cx,
1785 )
1786 })?
1787 .await??;
1788 Ok(proto::Ack {})
1789 }
1790
1791 async fn handle_commit(
1792 this: Entity<Self>,
1793 envelope: TypedEnvelope<proto::Commit>,
1794 mut cx: AsyncApp,
1795 ) -> Result<proto::Ack> {
1796 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1797 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1798
1799 let message = SharedString::from(envelope.payload.message);
1800 let name = envelope.payload.name.map(SharedString::from);
1801 let email = envelope.payload.email.map(SharedString::from);
1802
1803 repository_handle
1804 .update(&mut cx, |repository_handle, cx| {
1805 repository_handle.commit(message, name.zip(email), cx)
1806 })?
1807 .await??;
1808 Ok(proto::Ack {})
1809 }
1810
1811 async fn handle_get_remotes(
1812 this: Entity<Self>,
1813 envelope: TypedEnvelope<proto::GetRemotes>,
1814 mut cx: AsyncApp,
1815 ) -> Result<proto::GetRemotesResponse> {
1816 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1817 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1818
1819 let branch_name = envelope.payload.branch_name;
1820
1821 let remotes = repository_handle
1822 .update(&mut cx, |repository_handle, _| {
1823 repository_handle.get_remotes(branch_name)
1824 })?
1825 .await??;
1826
1827 Ok(proto::GetRemotesResponse {
1828 remotes: remotes
1829 .into_iter()
1830 .map(|remotes| proto::get_remotes_response::Remote {
1831 name: remotes.name.to_string(),
1832 })
1833 .collect::<Vec<_>>(),
1834 })
1835 }
1836
1837 async fn handle_get_branches(
1838 this: Entity<Self>,
1839 envelope: TypedEnvelope<proto::GitGetBranches>,
1840 mut cx: AsyncApp,
1841 ) -> Result<proto::GitBranchesResponse> {
1842 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1843 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1844
1845 let branches = repository_handle
1846 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1847 .await??;
1848
1849 Ok(proto::GitBranchesResponse {
1850 branches: branches
1851 .into_iter()
1852 .map(|branch| branch_to_proto(&branch))
1853 .collect::<Vec<_>>(),
1854 })
1855 }
1856 async fn handle_create_branch(
1857 this: Entity<Self>,
1858 envelope: TypedEnvelope<proto::GitCreateBranch>,
1859 mut cx: AsyncApp,
1860 ) -> Result<proto::Ack> {
1861 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1862 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1863 let branch_name = envelope.payload.branch_name;
1864
1865 repository_handle
1866 .update(&mut cx, |repository_handle, _| {
1867 repository_handle.create_branch(branch_name)
1868 })?
1869 .await??;
1870
1871 Ok(proto::Ack {})
1872 }
1873
1874 async fn handle_change_branch(
1875 this: Entity<Self>,
1876 envelope: TypedEnvelope<proto::GitChangeBranch>,
1877 mut cx: AsyncApp,
1878 ) -> Result<proto::Ack> {
1879 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1880 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1881 let branch_name = envelope.payload.branch_name;
1882
1883 repository_handle
1884 .update(&mut cx, |repository_handle, _| {
1885 repository_handle.change_branch(branch_name)
1886 })?
1887 .await??;
1888
1889 Ok(proto::Ack {})
1890 }
1891
1892 async fn handle_show(
1893 this: Entity<Self>,
1894 envelope: TypedEnvelope<proto::GitShow>,
1895 mut cx: AsyncApp,
1896 ) -> Result<proto::GitCommitDetails> {
1897 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1898 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1899
1900 let commit = repository_handle
1901 .update(&mut cx, |repository_handle, _| {
1902 repository_handle.show(envelope.payload.commit)
1903 })?
1904 .await??;
1905 Ok(proto::GitCommitDetails {
1906 sha: commit.sha.into(),
1907 message: commit.message.into(),
1908 commit_timestamp: commit.commit_timestamp,
1909 author_email: commit.author_email.into(),
1910 author_name: commit.author_name.into(),
1911 })
1912 }
1913
1914 async fn handle_load_commit_diff(
1915 this: Entity<Self>,
1916 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1917 mut cx: AsyncApp,
1918 ) -> Result<proto::LoadCommitDiffResponse> {
1919 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1920 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1921
1922 let commit_diff = repository_handle
1923 .update(&mut cx, |repository_handle, _| {
1924 repository_handle.load_commit_diff(envelope.payload.commit)
1925 })?
1926 .await??;
1927 Ok(proto::LoadCommitDiffResponse {
1928 files: commit_diff
1929 .files
1930 .into_iter()
1931 .map(|file| proto::CommitFile {
1932 path: file.path.to_string(),
1933 old_text: file.old_text,
1934 new_text: file.new_text,
1935 })
1936 .collect(),
1937 })
1938 }
1939
1940 async fn handle_reset(
1941 this: Entity<Self>,
1942 envelope: TypedEnvelope<proto::GitReset>,
1943 mut cx: AsyncApp,
1944 ) -> Result<proto::Ack> {
1945 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1946 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1947
1948 let mode = match envelope.payload.mode() {
1949 git_reset::ResetMode::Soft => ResetMode::Soft,
1950 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1951 };
1952
1953 repository_handle
1954 .update(&mut cx, |repository_handle, cx| {
1955 repository_handle.reset(envelope.payload.commit, mode, cx)
1956 })?
1957 .await??;
1958 Ok(proto::Ack {})
1959 }
1960
1961 async fn handle_checkout_files(
1962 this: Entity<Self>,
1963 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1964 mut cx: AsyncApp,
1965 ) -> Result<proto::Ack> {
1966 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1967 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1968 let paths = envelope
1969 .payload
1970 .paths
1971 .iter()
1972 .map(|s| RepoPath::from_str(s))
1973 .collect();
1974
1975 repository_handle
1976 .update(&mut cx, |repository_handle, cx| {
1977 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1978 })?
1979 .await??;
1980 Ok(proto::Ack {})
1981 }
1982
1983 async fn handle_open_commit_message_buffer(
1984 this: Entity<Self>,
1985 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1986 mut cx: AsyncApp,
1987 ) -> Result<proto::OpenBufferResponse> {
1988 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1989 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1990 let buffer = repository
1991 .update(&mut cx, |repository, cx| {
1992 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1993 })?
1994 .await?;
1995
1996 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1997 this.update(&mut cx, |this, cx| {
1998 this.buffer_store.update(cx, |buffer_store, cx| {
1999 buffer_store
2000 .create_buffer_for_peer(
2001 &buffer,
2002 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2003 cx,
2004 )
2005 .detach_and_log_err(cx);
2006 })
2007 })?;
2008
2009 Ok(proto::OpenBufferResponse {
2010 buffer_id: buffer_id.to_proto(),
2011 })
2012 }
2013
2014 async fn handle_askpass(
2015 this: Entity<Self>,
2016 envelope: TypedEnvelope<proto::AskPassRequest>,
2017 mut cx: AsyncApp,
2018 ) -> Result<proto::AskPassResponse> {
2019 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2020 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2021
2022 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2023 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2024 debug_panic!("no askpass found");
2025 return Err(anyhow::anyhow!("no askpass found"));
2026 };
2027
2028 let response = askpass.ask_password(envelope.payload.prompt).await?;
2029
2030 delegates
2031 .lock()
2032 .insert(envelope.payload.askpass_id, askpass);
2033
2034 Ok(proto::AskPassResponse { response })
2035 }
2036
2037 async fn handle_check_for_pushed_commits(
2038 this: Entity<Self>,
2039 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2040 mut cx: AsyncApp,
2041 ) -> Result<proto::CheckForPushedCommitsResponse> {
2042 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2043 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2044
2045 let branches = repository_handle
2046 .update(&mut cx, |repository_handle, _| {
2047 repository_handle.check_for_pushed_commits()
2048 })?
2049 .await??;
2050 Ok(proto::CheckForPushedCommitsResponse {
2051 pushed_to: branches
2052 .into_iter()
2053 .map(|commit| commit.to_string())
2054 .collect(),
2055 })
2056 }
2057
2058 async fn handle_git_diff(
2059 this: Entity<Self>,
2060 envelope: TypedEnvelope<proto::GitDiff>,
2061 mut cx: AsyncApp,
2062 ) -> Result<proto::GitDiffResponse> {
2063 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2064 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2065 let diff_type = match envelope.payload.diff_type() {
2066 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2067 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2068 };
2069
2070 let mut diff = repository_handle
2071 .update(&mut cx, |repository_handle, cx| {
2072 repository_handle.diff(diff_type, cx)
2073 })?
2074 .await??;
2075 const ONE_MB: usize = 1_000_000;
2076 if diff.len() > ONE_MB {
2077 diff = diff.chars().take(ONE_MB).collect()
2078 }
2079
2080 Ok(proto::GitDiffResponse { diff })
2081 }
2082
2083 async fn handle_open_unstaged_diff(
2084 this: Entity<Self>,
2085 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2086 mut cx: AsyncApp,
2087 ) -> Result<proto::OpenUnstagedDiffResponse> {
2088 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2089 let diff = this
2090 .update(&mut cx, |this, cx| {
2091 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2092 Some(this.open_unstaged_diff(buffer, cx))
2093 })?
2094 .ok_or_else(|| anyhow!("no such buffer"))?
2095 .await?;
2096 this.update(&mut cx, |this, _| {
2097 let shared_diffs = this
2098 .shared_diffs
2099 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2100 .or_default();
2101 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2102 })?;
2103 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2104 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2105 }
2106
2107 async fn handle_open_uncommitted_diff(
2108 this: Entity<Self>,
2109 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2110 mut cx: AsyncApp,
2111 ) -> Result<proto::OpenUncommittedDiffResponse> {
2112 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2113 let diff = this
2114 .update(&mut cx, |this, cx| {
2115 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2116 Some(this.open_uncommitted_diff(buffer, cx))
2117 })?
2118 .ok_or_else(|| anyhow!("no such buffer"))?
2119 .await?;
2120 this.update(&mut cx, |this, _| {
2121 let shared_diffs = this
2122 .shared_diffs
2123 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2124 .or_default();
2125 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2126 })?;
2127 diff.read_with(&cx, |diff, cx| {
2128 use proto::open_uncommitted_diff_response::Mode;
2129
2130 let unstaged_diff = diff.secondary_diff();
2131 let index_snapshot = unstaged_diff.and_then(|diff| {
2132 let diff = diff.read(cx);
2133 diff.base_text_exists().then(|| diff.base_text())
2134 });
2135
2136 let mode;
2137 let staged_text;
2138 let committed_text;
2139 if diff.base_text_exists() {
2140 let committed_snapshot = diff.base_text();
2141 committed_text = Some(committed_snapshot.text());
2142 if let Some(index_text) = index_snapshot {
2143 if index_text.remote_id() == committed_snapshot.remote_id() {
2144 mode = Mode::IndexMatchesHead;
2145 staged_text = None;
2146 } else {
2147 mode = Mode::IndexAndHead;
2148 staged_text = Some(index_text.text());
2149 }
2150 } else {
2151 mode = Mode::IndexAndHead;
2152 staged_text = None;
2153 }
2154 } else {
2155 mode = Mode::IndexAndHead;
2156 committed_text = None;
2157 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2158 }
2159
2160 proto::OpenUncommittedDiffResponse {
2161 committed_text,
2162 staged_text,
2163 mode: mode.into(),
2164 }
2165 })
2166 }
2167
2168 async fn handle_update_diff_bases(
2169 this: Entity<Self>,
2170 request: TypedEnvelope<proto::UpdateDiffBases>,
2171 mut cx: AsyncApp,
2172 ) -> Result<()> {
2173 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2174 this.update(&mut cx, |this, cx| {
2175 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2176 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2177 let buffer = buffer.read(cx).text_snapshot();
2178 diff_state.update(cx, |diff_state, cx| {
2179 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2180 })
2181 }
2182 }
2183 })
2184 }
2185
2186 async fn handle_blame_buffer(
2187 this: Entity<Self>,
2188 envelope: TypedEnvelope<proto::BlameBuffer>,
2189 mut cx: AsyncApp,
2190 ) -> Result<proto::BlameBufferResponse> {
2191 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2192 let version = deserialize_version(&envelope.payload.version);
2193 let buffer = this.read_with(&cx, |this, cx| {
2194 this.buffer_store.read(cx).get_existing(buffer_id)
2195 })??;
2196 buffer
2197 .update(&mut cx, |buffer, _| {
2198 buffer.wait_for_version(version.clone())
2199 })?
2200 .await?;
2201 let blame = this
2202 .update(&mut cx, |this, cx| {
2203 this.blame_buffer(&buffer, Some(version), cx)
2204 })?
2205 .await?;
2206 Ok(serialize_blame_buffer_response(blame))
2207 }
2208
2209 async fn handle_get_permalink_to_line(
2210 this: Entity<Self>,
2211 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2212 mut cx: AsyncApp,
2213 ) -> Result<proto::GetPermalinkToLineResponse> {
2214 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2215 // let version = deserialize_version(&envelope.payload.version);
2216 let selection = {
2217 let proto_selection = envelope
2218 .payload
2219 .selection
2220 .context("no selection to get permalink for defined")?;
2221 proto_selection.start as u32..proto_selection.end as u32
2222 };
2223 let buffer = this.read_with(&cx, |this, cx| {
2224 this.buffer_store.read(cx).get_existing(buffer_id)
2225 })??;
2226 let permalink = this
2227 .update(&mut cx, |this, cx| {
2228 this.get_permalink_to_line(&buffer, selection, cx)
2229 })?
2230 .await?;
2231 Ok(proto::GetPermalinkToLineResponse {
2232 permalink: permalink.to_string(),
2233 })
2234 }
2235
2236 fn repository_for_request(
2237 this: &Entity<Self>,
2238 id: RepositoryId,
2239 cx: &mut AsyncApp,
2240 ) -> Result<Entity<Repository>> {
2241 this.update(cx, |this, _| {
2242 this.repositories
2243 .get(&id)
2244 .context("missing repository handle")
2245 .cloned()
2246 })?
2247 }
2248
2249 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2250 self.repositories
2251 .iter()
2252 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2253 .collect()
2254 }
2255}
2256
2257impl BufferDiffState {
2258 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2259 self.language = buffer.read(cx).language().cloned();
2260 self.language_changed = true;
2261 let _ = self.recalculate_diffs(
2262 buffer.read(cx).text_snapshot(),
2263 self.hunk_staging_operation_count,
2264 cx,
2265 );
2266 }
2267
2268 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2269 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2270 }
2271
2272 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2273 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2274 }
2275
2276 fn handle_base_texts_updated(
2277 &mut self,
2278 buffer: text::BufferSnapshot,
2279 message: proto::UpdateDiffBases,
2280 cx: &mut Context<Self>,
2281 ) {
2282 use proto::update_diff_bases::Mode;
2283
2284 let Some(mode) = Mode::from_i32(message.mode) else {
2285 return;
2286 };
2287
2288 let diff_bases_change = match mode {
2289 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2290 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2291 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2292 Mode::IndexAndHead => DiffBasesChange::SetEach {
2293 index: message.staged_text,
2294 head: message.committed_text,
2295 },
2296 };
2297
2298 let _ = self.diff_bases_changed(
2299 buffer,
2300 diff_bases_change,
2301 self.hunk_staging_operation_count,
2302 cx,
2303 );
2304 }
2305
2306 pub fn wait_for_recalculation(&mut self) -> Option<oneshot::Receiver<()>> {
2307 if self.diff_updated_futures.is_empty() {
2308 return None;
2309 }
2310 let (tx, rx) = oneshot::channel();
2311 self.diff_updated_futures.push(tx);
2312 Some(rx)
2313 }
2314
2315 fn diff_bases_changed(
2316 &mut self,
2317 buffer: text::BufferSnapshot,
2318 diff_bases_change: DiffBasesChange,
2319 prev_hunk_staging_operation_count: usize,
2320 cx: &mut Context<Self>,
2321 ) -> oneshot::Receiver<()> {
2322 match diff_bases_change {
2323 DiffBasesChange::SetIndex(index) => {
2324 self.index_text = index.map(|mut index| {
2325 text::LineEnding::normalize(&mut index);
2326 Arc::new(index)
2327 });
2328 self.index_changed = true;
2329 }
2330 DiffBasesChange::SetHead(head) => {
2331 self.head_text = head.map(|mut head| {
2332 text::LineEnding::normalize(&mut head);
2333 Arc::new(head)
2334 });
2335 self.head_changed = true;
2336 }
2337 DiffBasesChange::SetBoth(text) => {
2338 let text = text.map(|mut text| {
2339 text::LineEnding::normalize(&mut text);
2340 Arc::new(text)
2341 });
2342 self.head_text = text.clone();
2343 self.index_text = text;
2344 self.head_changed = true;
2345 self.index_changed = true;
2346 }
2347 DiffBasesChange::SetEach { index, head } => {
2348 self.index_text = index.map(|mut index| {
2349 text::LineEnding::normalize(&mut index);
2350 Arc::new(index)
2351 });
2352 self.index_changed = true;
2353 self.head_text = head.map(|mut head| {
2354 text::LineEnding::normalize(&mut head);
2355 Arc::new(head)
2356 });
2357 self.head_changed = true;
2358 }
2359 }
2360
2361 self.recalculate_diffs(buffer, prev_hunk_staging_operation_count, cx)
2362 }
2363
2364 fn recalculate_diffs(
2365 &mut self,
2366 buffer: text::BufferSnapshot,
2367 prev_hunk_staging_operation_count: usize,
2368 cx: &mut Context<Self>,
2369 ) -> oneshot::Receiver<()> {
2370 log::debug!("recalculate diffs");
2371 let (tx, rx) = oneshot::channel();
2372 self.diff_updated_futures.push(tx);
2373
2374 let language = self.language.clone();
2375 let language_registry = self.language_registry.clone();
2376 let unstaged_diff = self.unstaged_diff();
2377 let uncommitted_diff = self.uncommitted_diff();
2378 let head = self.head_text.clone();
2379 let index = self.index_text.clone();
2380 let index_changed = self.index_changed;
2381 let head_changed = self.head_changed;
2382 let language_changed = self.language_changed;
2383 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2384 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2385 (None, None) => true,
2386 _ => false,
2387 };
2388 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2389 let mut new_unstaged_diff = None;
2390 if let Some(unstaged_diff) = &unstaged_diff {
2391 new_unstaged_diff = Some(
2392 BufferDiff::update_diff(
2393 unstaged_diff.clone(),
2394 buffer.clone(),
2395 index,
2396 index_changed,
2397 language_changed,
2398 language.clone(),
2399 language_registry.clone(),
2400 cx,
2401 )
2402 .await?,
2403 );
2404 }
2405
2406 let mut new_uncommitted_diff = None;
2407 if let Some(uncommitted_diff) = &uncommitted_diff {
2408 new_uncommitted_diff = if index_matches_head {
2409 new_unstaged_diff.clone()
2410 } else {
2411 Some(
2412 BufferDiff::update_diff(
2413 uncommitted_diff.clone(),
2414 buffer.clone(),
2415 head,
2416 head_changed,
2417 language_changed,
2418 language.clone(),
2419 language_registry.clone(),
2420 cx,
2421 )
2422 .await?,
2423 )
2424 }
2425 }
2426
2427 if this.update(cx, |this, _| {
2428 this.hunk_staging_operation_count > prev_hunk_staging_operation_count
2429 })? {
2430 eprintln!("early return");
2431 return Ok(());
2432 }
2433
2434 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2435 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2436 {
2437 unstaged_diff.update(cx, |diff, cx| {
2438 if language_changed {
2439 diff.language_changed(cx);
2440 }
2441 diff.set_snapshot(new_unstaged_diff, &buffer, None, cx)
2442 })?
2443 } else {
2444 None
2445 };
2446
2447 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2448 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2449 {
2450 uncommitted_diff.update(cx, |diff, cx| {
2451 if language_changed {
2452 diff.language_changed(cx);
2453 }
2454 diff.set_snapshot(new_uncommitted_diff, &buffer, unstaged_changed_range, cx);
2455 })?;
2456 }
2457
2458 if let Some(this) = this.upgrade() {
2459 this.update(cx, |this, _| {
2460 this.index_changed = false;
2461 this.head_changed = false;
2462 this.language_changed = false;
2463 for tx in this.diff_updated_futures.drain(..) {
2464 tx.send(()).ok();
2465 }
2466 })?;
2467 }
2468
2469 Ok(())
2470 }));
2471
2472 rx
2473 }
2474}
2475
2476fn make_remote_delegate(
2477 this: Entity<GitStore>,
2478 project_id: u64,
2479 repository_id: RepositoryId,
2480 askpass_id: u64,
2481 cx: &mut AsyncApp,
2482) -> AskPassDelegate {
2483 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2484 this.update(cx, |this, cx| {
2485 let Some((client, _)) = this.downstream_client() else {
2486 return;
2487 };
2488 let response = client.request(proto::AskPassRequest {
2489 project_id,
2490 repository_id: repository_id.to_proto(),
2491 askpass_id,
2492 prompt,
2493 });
2494 cx.spawn(async move |_, _| {
2495 tx.send(response.await?.response).ok();
2496 anyhow::Ok(())
2497 })
2498 .detach_and_log_err(cx);
2499 })
2500 .log_err();
2501 })
2502}
2503
2504impl RepositoryId {
2505 pub fn to_proto(self) -> u64 {
2506 self.0
2507 }
2508
2509 pub fn from_proto(id: u64) -> Self {
2510 RepositoryId(id)
2511 }
2512}
2513
2514impl RepositorySnapshot {
2515 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2516 Self {
2517 id,
2518 merge_message: None,
2519 statuses_by_path: Default::default(),
2520 work_directory_abs_path,
2521 branch: None,
2522 merge_conflicts: Default::default(),
2523 merge_head_shas: Default::default(),
2524 scan_id: 0,
2525 }
2526 }
2527
2528 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2529 proto::UpdateRepository {
2530 branch_summary: self.branch.as_ref().map(branch_to_proto),
2531 updated_statuses: self
2532 .statuses_by_path
2533 .iter()
2534 .map(|entry| entry.to_proto())
2535 .collect(),
2536 removed_statuses: Default::default(),
2537 current_merge_conflicts: self
2538 .merge_conflicts
2539 .iter()
2540 .map(|repo_path| repo_path.to_proto())
2541 .collect(),
2542 project_id,
2543 id: self.id.to_proto(),
2544 abs_path: self.work_directory_abs_path.to_proto(),
2545 entry_ids: vec![self.id.to_proto()],
2546 scan_id: self.scan_id,
2547 is_last_update: true,
2548 }
2549 }
2550
2551 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2552 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2553 let mut removed_statuses: Vec<String> = Vec::new();
2554
2555 let mut new_statuses = self.statuses_by_path.iter().peekable();
2556 let mut old_statuses = old.statuses_by_path.iter().peekable();
2557
2558 let mut current_new_entry = new_statuses.next();
2559 let mut current_old_entry = old_statuses.next();
2560 loop {
2561 match (current_new_entry, current_old_entry) {
2562 (Some(new_entry), Some(old_entry)) => {
2563 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2564 Ordering::Less => {
2565 updated_statuses.push(new_entry.to_proto());
2566 current_new_entry = new_statuses.next();
2567 }
2568 Ordering::Equal => {
2569 if new_entry.status != old_entry.status {
2570 updated_statuses.push(new_entry.to_proto());
2571 }
2572 current_old_entry = old_statuses.next();
2573 current_new_entry = new_statuses.next();
2574 }
2575 Ordering::Greater => {
2576 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2577 current_old_entry = old_statuses.next();
2578 }
2579 }
2580 }
2581 (None, Some(old_entry)) => {
2582 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2583 current_old_entry = old_statuses.next();
2584 }
2585 (Some(new_entry), None) => {
2586 updated_statuses.push(new_entry.to_proto());
2587 current_new_entry = new_statuses.next();
2588 }
2589 (None, None) => break,
2590 }
2591 }
2592
2593 proto::UpdateRepository {
2594 branch_summary: self.branch.as_ref().map(branch_to_proto),
2595 updated_statuses,
2596 removed_statuses,
2597 current_merge_conflicts: self
2598 .merge_conflicts
2599 .iter()
2600 .map(|path| path.as_ref().to_proto())
2601 .collect(),
2602 project_id,
2603 id: self.id.to_proto(),
2604 abs_path: self.work_directory_abs_path.to_proto(),
2605 entry_ids: vec![],
2606 scan_id: self.scan_id,
2607 is_last_update: true,
2608 }
2609 }
2610
2611 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2612 self.statuses_by_path.iter().cloned()
2613 }
2614
2615 pub fn status_summary(&self) -> GitSummary {
2616 self.statuses_by_path.summary().item_summary
2617 }
2618
2619 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2620 self.statuses_by_path
2621 .get(&PathKey(path.0.clone()), &())
2622 .cloned()
2623 }
2624
2625 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2626 abs_path
2627 .strip_prefix(&self.work_directory_abs_path)
2628 .map(RepoPath::from)
2629 .ok()
2630 }
2631
2632 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2633 self.merge_conflicts.contains(repo_path)
2634 }
2635
2636 /// This is the name that will be displayed in the repository selector for this repository.
2637 pub fn display_name(&self) -> SharedString {
2638 self.work_directory_abs_path
2639 .file_name()
2640 .unwrap_or_default()
2641 .to_string_lossy()
2642 .to_string()
2643 .into()
2644 }
2645}
2646
2647impl Repository {
2648 fn local(
2649 id: RepositoryId,
2650 work_directory_abs_path: Arc<Path>,
2651 dot_git_abs_path: Arc<Path>,
2652 project_environment: WeakEntity<ProjectEnvironment>,
2653 fs: Arc<dyn Fs>,
2654 git_store: WeakEntity<GitStore>,
2655 cx: &mut Context<Self>,
2656 ) -> Self {
2657 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2658 Repository {
2659 this: cx.weak_entity(),
2660 git_store,
2661 snapshot,
2662 commit_message_buffer: None,
2663 askpass_delegates: Default::default(),
2664 paths_needing_status_update: Default::default(),
2665 latest_askpass_id: 0,
2666 job_sender: Repository::spawn_local_git_worker(
2667 work_directory_abs_path,
2668 dot_git_abs_path,
2669 project_environment,
2670 fs,
2671 cx,
2672 ),
2673 job_id: 0,
2674 active_jobs: Default::default(),
2675 }
2676 }
2677
2678 fn remote(
2679 id: RepositoryId,
2680 work_directory_abs_path: Arc<Path>,
2681 project_id: ProjectId,
2682 client: AnyProtoClient,
2683 git_store: WeakEntity<GitStore>,
2684 cx: &mut Context<Self>,
2685 ) -> Self {
2686 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2687 Self {
2688 this: cx.weak_entity(),
2689 snapshot,
2690 commit_message_buffer: None,
2691 git_store,
2692 paths_needing_status_update: Default::default(),
2693 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2694 askpass_delegates: Default::default(),
2695 latest_askpass_id: 0,
2696 active_jobs: Default::default(),
2697 job_id: 0,
2698 }
2699 }
2700
2701 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2702 self.git_store.upgrade()
2703 }
2704
2705 pub fn send_job<F, Fut, R>(
2706 &mut self,
2707 status: Option<SharedString>,
2708 job: F,
2709 ) -> oneshot::Receiver<R>
2710 where
2711 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2712 Fut: Future<Output = R> + 'static,
2713 R: Send + 'static,
2714 {
2715 self.send_keyed_job(None, status, job)
2716 }
2717
2718 fn send_keyed_job<F, Fut, R>(
2719 &mut self,
2720 key: Option<GitJobKey>,
2721 status: Option<SharedString>,
2722 job: F,
2723 ) -> oneshot::Receiver<R>
2724 where
2725 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2726 Fut: Future<Output = R> + 'static,
2727 R: Send + 'static,
2728 {
2729 let (result_tx, result_rx) = futures::channel::oneshot::channel();
2730 let job_id = post_inc(&mut self.job_id);
2731 let this = self.this.clone();
2732 self.job_sender
2733 .unbounded_send(GitJob {
2734 key,
2735 job: Box::new(move |state, cx: &mut AsyncApp| {
2736 let job = job(state, cx.clone());
2737 cx.spawn(async move |cx| {
2738 if let Some(s) = status.clone() {
2739 this.update(cx, |this, cx| {
2740 this.active_jobs.insert(
2741 job_id,
2742 JobInfo {
2743 start: Instant::now(),
2744 message: s.clone(),
2745 },
2746 );
2747
2748 cx.notify();
2749 })
2750 .ok();
2751 }
2752 let result = job.await;
2753
2754 this.update(cx, |this, cx| {
2755 this.active_jobs.remove(&job_id);
2756 cx.notify();
2757 })
2758 .ok();
2759
2760 result_tx.send(result).ok();
2761 })
2762 }),
2763 })
2764 .ok();
2765 result_rx
2766 }
2767
2768 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
2769 let Some(git_store) = self.git_store.upgrade() else {
2770 return;
2771 };
2772 let entity = cx.entity();
2773 git_store.update(cx, |git_store, cx| {
2774 let Some((&id, _)) = git_store
2775 .repositories
2776 .iter()
2777 .find(|(_, handle)| *handle == &entity)
2778 else {
2779 return;
2780 };
2781 git_store.active_repo_id = Some(id);
2782 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2783 });
2784 }
2785
2786 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
2787 self.snapshot.status()
2788 }
2789
2790 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
2791 let git_store = self.git_store.upgrade()?;
2792 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2793 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
2794 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
2795 Some(ProjectPath {
2796 worktree_id: worktree.read(cx).id(),
2797 path: relative_path.into(),
2798 })
2799 }
2800
2801 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
2802 let git_store = self.git_store.upgrade()?;
2803 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2804 let abs_path = worktree_store.absolutize(path, cx)?;
2805 self.snapshot.abs_path_to_repo_path(&abs_path)
2806 }
2807
2808 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
2809 other
2810 .read(cx)
2811 .snapshot
2812 .work_directory_abs_path
2813 .starts_with(&self.snapshot.work_directory_abs_path)
2814 }
2815
2816 pub fn open_commit_buffer(
2817 &mut self,
2818 languages: Option<Arc<LanguageRegistry>>,
2819 buffer_store: Entity<BufferStore>,
2820 cx: &mut Context<Self>,
2821 ) -> Task<Result<Entity<Buffer>>> {
2822 let id = self.id;
2823 if let Some(buffer) = self.commit_message_buffer.clone() {
2824 return Task::ready(Ok(buffer));
2825 }
2826 let this = cx.weak_entity();
2827
2828 let rx = self.send_job(None, move |state, mut cx| async move {
2829 let Some(this) = this.upgrade() else {
2830 bail!("git store was dropped");
2831 };
2832 match state {
2833 RepositoryState::Local { .. } => {
2834 this.update(&mut cx, |_, cx| {
2835 Self::open_local_commit_buffer(languages, buffer_store, cx)
2836 })?
2837 .await
2838 }
2839 RepositoryState::Remote { project_id, client } => {
2840 let request = client.request(proto::OpenCommitMessageBuffer {
2841 project_id: project_id.0,
2842 repository_id: id.to_proto(),
2843 });
2844 let response = request.await.context("requesting to open commit buffer")?;
2845 let buffer_id = BufferId::new(response.buffer_id)?;
2846 let buffer = buffer_store
2847 .update(&mut cx, |buffer_store, cx| {
2848 buffer_store.wait_for_remote_buffer(buffer_id, cx)
2849 })?
2850 .await?;
2851 if let Some(language_registry) = languages {
2852 let git_commit_language =
2853 language_registry.language_for_name("Git Commit").await?;
2854 buffer.update(&mut cx, |buffer, cx| {
2855 buffer.set_language(Some(git_commit_language), cx);
2856 })?;
2857 }
2858 this.update(&mut cx, |this, _| {
2859 this.commit_message_buffer = Some(buffer.clone());
2860 })?;
2861 Ok(buffer)
2862 }
2863 }
2864 });
2865
2866 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
2867 }
2868
2869 fn open_local_commit_buffer(
2870 language_registry: Option<Arc<LanguageRegistry>>,
2871 buffer_store: Entity<BufferStore>,
2872 cx: &mut Context<Self>,
2873 ) -> Task<Result<Entity<Buffer>>> {
2874 cx.spawn(async move |repository, cx| {
2875 let buffer = buffer_store
2876 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
2877 .await?;
2878
2879 if let Some(language_registry) = language_registry {
2880 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
2881 buffer.update(cx, |buffer, cx| {
2882 buffer.set_language(Some(git_commit_language), cx);
2883 })?;
2884 }
2885
2886 repository.update(cx, |repository, _| {
2887 repository.commit_message_buffer = Some(buffer.clone());
2888 })?;
2889 Ok(buffer)
2890 })
2891 }
2892
2893 pub fn checkout_files(
2894 &mut self,
2895 commit: &str,
2896 paths: Vec<RepoPath>,
2897 _cx: &mut App,
2898 ) -> oneshot::Receiver<Result<()>> {
2899 let commit = commit.to_string();
2900 let id = self.id;
2901
2902 self.send_job(
2903 Some(format!("git checkout {}", commit).into()),
2904 move |git_repo, _| async move {
2905 match git_repo {
2906 RepositoryState::Local {
2907 backend,
2908 environment,
2909 ..
2910 } => {
2911 backend
2912 .checkout_files(commit, paths, environment.clone())
2913 .await
2914 }
2915 RepositoryState::Remote { project_id, client } => {
2916 client
2917 .request(proto::GitCheckoutFiles {
2918 project_id: project_id.0,
2919 repository_id: id.to_proto(),
2920 commit,
2921 paths: paths
2922 .into_iter()
2923 .map(|p| p.to_string_lossy().to_string())
2924 .collect(),
2925 })
2926 .await?;
2927
2928 Ok(())
2929 }
2930 }
2931 },
2932 )
2933 }
2934
2935 pub fn reset(
2936 &mut self,
2937 commit: String,
2938 reset_mode: ResetMode,
2939 _cx: &mut App,
2940 ) -> oneshot::Receiver<Result<()>> {
2941 let commit = commit.to_string();
2942 let id = self.id;
2943
2944 self.send_job(None, move |git_repo, _| async move {
2945 match git_repo {
2946 RepositoryState::Local {
2947 backend,
2948 environment,
2949 ..
2950 } => backend.reset(commit, reset_mode, environment).await,
2951 RepositoryState::Remote { project_id, client } => {
2952 client
2953 .request(proto::GitReset {
2954 project_id: project_id.0,
2955 repository_id: id.to_proto(),
2956 commit,
2957 mode: match reset_mode {
2958 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
2959 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
2960 },
2961 })
2962 .await?;
2963
2964 Ok(())
2965 }
2966 }
2967 })
2968 }
2969
2970 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
2971 let id = self.id;
2972 self.send_job(None, move |git_repo, _cx| async move {
2973 match git_repo {
2974 RepositoryState::Local { backend, .. } => backend.show(commit).await,
2975 RepositoryState::Remote { project_id, client } => {
2976 let resp = client
2977 .request(proto::GitShow {
2978 project_id: project_id.0,
2979 repository_id: id.to_proto(),
2980 commit,
2981 })
2982 .await?;
2983
2984 Ok(CommitDetails {
2985 sha: resp.sha.into(),
2986 message: resp.message.into(),
2987 commit_timestamp: resp.commit_timestamp,
2988 author_email: resp.author_email.into(),
2989 author_name: resp.author_name.into(),
2990 })
2991 }
2992 }
2993 })
2994 }
2995
2996 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
2997 let id = self.id;
2998 self.send_job(None, move |git_repo, cx| async move {
2999 match git_repo {
3000 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3001 RepositoryState::Remote {
3002 client, project_id, ..
3003 } => {
3004 let response = client
3005 .request(proto::LoadCommitDiff {
3006 project_id: project_id.0,
3007 repository_id: id.to_proto(),
3008 commit,
3009 })
3010 .await?;
3011 Ok(CommitDiff {
3012 files: response
3013 .files
3014 .into_iter()
3015 .map(|file| CommitFile {
3016 path: Path::new(&file.path).into(),
3017 old_text: file.old_text,
3018 new_text: file.new_text,
3019 })
3020 .collect(),
3021 })
3022 }
3023 }
3024 })
3025 }
3026
3027 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3028 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3029 }
3030
3031 pub fn stage_entries(
3032 &self,
3033 entries: Vec<RepoPath>,
3034 cx: &mut Context<Self>,
3035 ) -> Task<anyhow::Result<()>> {
3036 if entries.is_empty() {
3037 return Task::ready(Ok(()));
3038 }
3039 let id = self.id;
3040
3041 let mut save_futures = Vec::new();
3042 if let Some(buffer_store) = self.buffer_store(cx) {
3043 buffer_store.update(cx, |buffer_store, cx| {
3044 for path in &entries {
3045 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3046 continue;
3047 };
3048 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3049 if buffer
3050 .read(cx)
3051 .file()
3052 .map_or(false, |file| file.disk_state().exists())
3053 {
3054 save_futures.push(buffer_store.save_buffer(buffer, cx));
3055 }
3056 }
3057 }
3058 })
3059 }
3060
3061 cx.spawn(async move |this, cx| {
3062 for save_future in save_futures {
3063 save_future.await?;
3064 }
3065
3066 this.update(cx, |this, _| {
3067 this.send_job(None, move |git_repo, _cx| async move {
3068 match git_repo {
3069 RepositoryState::Local {
3070 backend,
3071 environment,
3072 ..
3073 } => backend.stage_paths(entries, environment.clone()).await,
3074 RepositoryState::Remote { project_id, client } => {
3075 client
3076 .request(proto::Stage {
3077 project_id: project_id.0,
3078 repository_id: id.to_proto(),
3079 paths: entries
3080 .into_iter()
3081 .map(|repo_path| repo_path.as_ref().to_proto())
3082 .collect(),
3083 })
3084 .await
3085 .context("sending stage request")?;
3086
3087 Ok(())
3088 }
3089 }
3090 })
3091 })?
3092 .await??;
3093
3094 Ok(())
3095 })
3096 }
3097
3098 pub fn unstage_entries(
3099 &self,
3100 entries: Vec<RepoPath>,
3101 cx: &mut Context<Self>,
3102 ) -> Task<anyhow::Result<()>> {
3103 if entries.is_empty() {
3104 return Task::ready(Ok(()));
3105 }
3106 let id = self.id;
3107
3108 let mut save_futures = Vec::new();
3109 if let Some(buffer_store) = self.buffer_store(cx) {
3110 buffer_store.update(cx, |buffer_store, cx| {
3111 for path in &entries {
3112 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3113 continue;
3114 };
3115 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3116 if buffer
3117 .read(cx)
3118 .file()
3119 .map_or(false, |file| file.disk_state().exists())
3120 {
3121 save_futures.push(buffer_store.save_buffer(buffer, cx));
3122 }
3123 }
3124 }
3125 })
3126 }
3127
3128 cx.spawn(async move |this, cx| {
3129 for save_future in save_futures {
3130 save_future.await?;
3131 }
3132
3133 this.update(cx, |this, _| {
3134 this.send_job(None, move |git_repo, _cx| async move {
3135 match git_repo {
3136 RepositoryState::Local {
3137 backend,
3138 environment,
3139 ..
3140 } => backend.unstage_paths(entries, environment).await,
3141 RepositoryState::Remote { project_id, client } => {
3142 client
3143 .request(proto::Unstage {
3144 project_id: project_id.0,
3145 repository_id: id.to_proto(),
3146 paths: entries
3147 .into_iter()
3148 .map(|repo_path| repo_path.as_ref().to_proto())
3149 .collect(),
3150 })
3151 .await
3152 .context("sending unstage request")?;
3153
3154 Ok(())
3155 }
3156 }
3157 })
3158 })?
3159 .await??;
3160
3161 Ok(())
3162 })
3163 }
3164
3165 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3166 let to_stage = self
3167 .cached_status()
3168 .filter(|entry| !entry.status.staging().is_fully_staged())
3169 .map(|entry| entry.repo_path.clone())
3170 .collect();
3171 self.stage_entries(to_stage, cx)
3172 }
3173
3174 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3175 let to_unstage = self
3176 .cached_status()
3177 .filter(|entry| entry.status.staging().has_staged())
3178 .map(|entry| entry.repo_path.clone())
3179 .collect();
3180 self.unstage_entries(to_unstage, cx)
3181 }
3182
3183 pub fn commit(
3184 &mut self,
3185 message: SharedString,
3186 name_and_email: Option<(SharedString, SharedString)>,
3187 _cx: &mut App,
3188 ) -> oneshot::Receiver<Result<()>> {
3189 let id = self.id;
3190
3191 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3192 match git_repo {
3193 RepositoryState::Local {
3194 backend,
3195 environment,
3196 ..
3197 } => backend.commit(message, name_and_email, environment).await,
3198 RepositoryState::Remote { project_id, client } => {
3199 let (name, email) = name_and_email.unzip();
3200 client
3201 .request(proto::Commit {
3202 project_id: project_id.0,
3203 repository_id: id.to_proto(),
3204 message: String::from(message),
3205 name: name.map(String::from),
3206 email: email.map(String::from),
3207 })
3208 .await
3209 .context("sending commit request")?;
3210
3211 Ok(())
3212 }
3213 }
3214 })
3215 }
3216
3217 pub fn fetch(
3218 &mut self,
3219 askpass: AskPassDelegate,
3220 _cx: &mut App,
3221 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3222 let askpass_delegates = self.askpass_delegates.clone();
3223 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3224 let id = self.id;
3225
3226 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3227 match git_repo {
3228 RepositoryState::Local {
3229 backend,
3230 environment,
3231 ..
3232 } => backend.fetch(askpass, environment, cx).await,
3233 RepositoryState::Remote { project_id, client } => {
3234 askpass_delegates.lock().insert(askpass_id, askpass);
3235 let _defer = util::defer(|| {
3236 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3237 debug_assert!(askpass_delegate.is_some());
3238 });
3239
3240 let response = client
3241 .request(proto::Fetch {
3242 project_id: project_id.0,
3243 repository_id: id.to_proto(),
3244 askpass_id,
3245 })
3246 .await
3247 .context("sending fetch request")?;
3248
3249 Ok(RemoteCommandOutput {
3250 stdout: response.stdout,
3251 stderr: response.stderr,
3252 })
3253 }
3254 }
3255 })
3256 }
3257
3258 pub fn push(
3259 &mut self,
3260 branch: SharedString,
3261 remote: SharedString,
3262 options: Option<PushOptions>,
3263 askpass: AskPassDelegate,
3264 cx: &mut Context<Self>,
3265 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3266 let askpass_delegates = self.askpass_delegates.clone();
3267 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3268 let id = self.id;
3269
3270 let args = options
3271 .map(|option| match option {
3272 PushOptions::SetUpstream => " --set-upstream",
3273 PushOptions::Force => " --force",
3274 })
3275 .unwrap_or("");
3276
3277 let updates_tx = self
3278 .git_store()
3279 .and_then(|git_store| match &git_store.read(cx).state {
3280 GitStoreState::Local { downstream, .. } => downstream
3281 .as_ref()
3282 .map(|downstream| downstream.updates_tx.clone()),
3283 _ => None,
3284 });
3285
3286 let this = cx.weak_entity();
3287 self.send_job(
3288 Some(format!("git push {} {} {}", args, branch, remote).into()),
3289 move |git_repo, mut cx| async move {
3290 match git_repo {
3291 RepositoryState::Local {
3292 backend,
3293 environment,
3294 ..
3295 } => {
3296 let result = backend
3297 .push(
3298 branch.to_string(),
3299 remote.to_string(),
3300 options,
3301 askpass,
3302 environment.clone(),
3303 cx.clone(),
3304 )
3305 .await;
3306 if result.is_ok() {
3307 let branches = backend.branches().await?;
3308 let branch = branches.into_iter().find(|branch| branch.is_head);
3309 log::info!("head branch after scan is {branch:?}");
3310 let snapshot = this.update(&mut cx, |this, cx| {
3311 this.snapshot.branch = branch;
3312 let snapshot = this.snapshot.clone();
3313 cx.emit(RepositoryEvent::Updated { full_scan: false });
3314 snapshot
3315 })?;
3316 if let Some(updates_tx) = updates_tx {
3317 updates_tx
3318 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3319 .ok();
3320 }
3321 }
3322 result
3323 }
3324 RepositoryState::Remote { project_id, client } => {
3325 askpass_delegates.lock().insert(askpass_id, askpass);
3326 let _defer = util::defer(|| {
3327 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3328 debug_assert!(askpass_delegate.is_some());
3329 });
3330 let response = client
3331 .request(proto::Push {
3332 project_id: project_id.0,
3333 repository_id: id.to_proto(),
3334 askpass_id,
3335 branch_name: branch.to_string(),
3336 remote_name: remote.to_string(),
3337 options: options.map(|options| match options {
3338 PushOptions::Force => proto::push::PushOptions::Force,
3339 PushOptions::SetUpstream => {
3340 proto::push::PushOptions::SetUpstream
3341 }
3342 }
3343 as i32),
3344 })
3345 .await
3346 .context("sending push request")?;
3347
3348 Ok(RemoteCommandOutput {
3349 stdout: response.stdout,
3350 stderr: response.stderr,
3351 })
3352 }
3353 }
3354 },
3355 )
3356 }
3357
3358 pub fn pull(
3359 &mut self,
3360 branch: SharedString,
3361 remote: SharedString,
3362 askpass: AskPassDelegate,
3363 _cx: &mut App,
3364 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3365 let askpass_delegates = self.askpass_delegates.clone();
3366 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3367 let id = self.id;
3368
3369 self.send_job(
3370 Some(format!("git pull {} {}", remote, branch).into()),
3371 move |git_repo, cx| async move {
3372 match git_repo {
3373 RepositoryState::Local {
3374 backend,
3375 environment,
3376 ..
3377 } => {
3378 backend
3379 .pull(
3380 branch.to_string(),
3381 remote.to_string(),
3382 askpass,
3383 environment.clone(),
3384 cx,
3385 )
3386 .await
3387 }
3388 RepositoryState::Remote { project_id, client } => {
3389 askpass_delegates.lock().insert(askpass_id, askpass);
3390 let _defer = util::defer(|| {
3391 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3392 debug_assert!(askpass_delegate.is_some());
3393 });
3394 let response = client
3395 .request(proto::Pull {
3396 project_id: project_id.0,
3397 repository_id: id.to_proto(),
3398 askpass_id,
3399 branch_name: branch.to_string(),
3400 remote_name: remote.to_string(),
3401 })
3402 .await
3403 .context("sending pull request")?;
3404
3405 Ok(RemoteCommandOutput {
3406 stdout: response.stdout,
3407 stderr: response.stderr,
3408 })
3409 }
3410 }
3411 },
3412 )
3413 }
3414
3415 fn spawn_set_index_text_job(
3416 &mut self,
3417 path: RepoPath,
3418 content: Option<String>,
3419 _cx: &mut App,
3420 ) -> oneshot::Receiver<anyhow::Result<()>> {
3421 let id = self.id;
3422
3423 self.send_keyed_job(
3424 Some(GitJobKey::WriteIndex(path.clone())),
3425 None,
3426 move |git_repo, _cx| async move {
3427 match git_repo {
3428 RepositoryState::Local {
3429 backend,
3430 environment,
3431 ..
3432 } => {
3433 backend
3434 .set_index_text(path, content, environment.clone())
3435 .await
3436 }
3437 RepositoryState::Remote { project_id, client } => {
3438 client
3439 .request(proto::SetIndexText {
3440 project_id: project_id.0,
3441 repository_id: id.to_proto(),
3442 path: path.as_ref().to_proto(),
3443 text: content,
3444 })
3445 .await?;
3446 Ok(())
3447 }
3448 }
3449 },
3450 )
3451 }
3452
3453 pub fn get_remotes(
3454 &mut self,
3455 branch_name: Option<String>,
3456 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3457 let id = self.id;
3458 self.send_job(None, move |repo, _cx| async move {
3459 match repo {
3460 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3461 RepositoryState::Remote { project_id, client } => {
3462 let response = client
3463 .request(proto::GetRemotes {
3464 project_id: project_id.0,
3465 repository_id: id.to_proto(),
3466 branch_name,
3467 })
3468 .await?;
3469
3470 let remotes = response
3471 .remotes
3472 .into_iter()
3473 .map(|remotes| git::repository::Remote {
3474 name: remotes.name.into(),
3475 })
3476 .collect();
3477
3478 Ok(remotes)
3479 }
3480 }
3481 })
3482 }
3483
3484 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3485 let id = self.id;
3486 self.send_job(None, move |repo, cx| async move {
3487 match repo {
3488 RepositoryState::Local { backend, .. } => {
3489 let backend = backend.clone();
3490 cx.background_spawn(async move { backend.branches().await })
3491 .await
3492 }
3493 RepositoryState::Remote { project_id, client } => {
3494 let response = client
3495 .request(proto::GitGetBranches {
3496 project_id: project_id.0,
3497 repository_id: id.to_proto(),
3498 })
3499 .await?;
3500
3501 let branches = response
3502 .branches
3503 .into_iter()
3504 .map(|branch| proto_to_branch(&branch))
3505 .collect();
3506
3507 Ok(branches)
3508 }
3509 }
3510 })
3511 }
3512
3513 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3514 let id = self.id;
3515 self.send_job(None, move |repo, _cx| async move {
3516 match repo {
3517 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
3518 RepositoryState::Remote { project_id, client } => {
3519 let response = client
3520 .request(proto::GitDiff {
3521 project_id: project_id.0,
3522 repository_id: id.to_proto(),
3523 diff_type: match diff_type {
3524 DiffType::HeadToIndex => {
3525 proto::git_diff::DiffType::HeadToIndex.into()
3526 }
3527 DiffType::HeadToWorktree => {
3528 proto::git_diff::DiffType::HeadToWorktree.into()
3529 }
3530 },
3531 })
3532 .await?;
3533
3534 Ok(response.diff)
3535 }
3536 }
3537 })
3538 }
3539
3540 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3541 let id = self.id;
3542 self.send_job(
3543 Some(format!("git switch -c {branch_name}").into()),
3544 move |repo, _cx| async move {
3545 match repo {
3546 RepositoryState::Local { backend, .. } => {
3547 backend.create_branch(branch_name).await
3548 }
3549 RepositoryState::Remote { project_id, client } => {
3550 client
3551 .request(proto::GitCreateBranch {
3552 project_id: project_id.0,
3553 repository_id: id.to_proto(),
3554 branch_name,
3555 })
3556 .await?;
3557
3558 Ok(())
3559 }
3560 }
3561 },
3562 )
3563 }
3564
3565 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3566 let id = self.id;
3567 self.send_job(
3568 Some(format!("git switch {branch_name}").into()),
3569 move |repo, _cx| async move {
3570 match repo {
3571 RepositoryState::Local { backend, .. } => {
3572 backend.change_branch(branch_name).await
3573 }
3574 RepositoryState::Remote { project_id, client } => {
3575 client
3576 .request(proto::GitChangeBranch {
3577 project_id: project_id.0,
3578 repository_id: id.to_proto(),
3579 branch_name,
3580 })
3581 .await?;
3582
3583 Ok(())
3584 }
3585 }
3586 },
3587 )
3588 }
3589
3590 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3591 let id = self.id;
3592 self.send_job(None, move |repo, _cx| async move {
3593 match repo {
3594 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
3595 RepositoryState::Remote { project_id, client } => {
3596 let response = client
3597 .request(proto::CheckForPushedCommits {
3598 project_id: project_id.0,
3599 repository_id: id.to_proto(),
3600 })
3601 .await?;
3602
3603 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3604
3605 Ok(branches)
3606 }
3607 }
3608 })
3609 }
3610
3611 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
3612 self.send_job(None, |repo, _cx| async move {
3613 match repo {
3614 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
3615 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3616 }
3617 })
3618 }
3619
3620 pub fn restore_checkpoint(
3621 &mut self,
3622 checkpoint: GitRepositoryCheckpoint,
3623 ) -> oneshot::Receiver<Result<()>> {
3624 self.send_job(None, move |repo, _cx| async move {
3625 match repo {
3626 RepositoryState::Local { backend, .. } => {
3627 backend.restore_checkpoint(checkpoint).await
3628 }
3629 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3630 }
3631 })
3632 }
3633
3634 pub(crate) fn apply_remote_update(
3635 &mut self,
3636 update: proto::UpdateRepository,
3637 cx: &mut Context<Self>,
3638 ) -> Result<()> {
3639 let conflicted_paths = TreeSet::from_ordered_entries(
3640 update
3641 .current_merge_conflicts
3642 .into_iter()
3643 .map(|path| RepoPath(Path::new(&path).into())),
3644 );
3645 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
3646 self.snapshot.merge_conflicts = conflicted_paths;
3647
3648 let edits = update
3649 .removed_statuses
3650 .into_iter()
3651 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
3652 .chain(
3653 update
3654 .updated_statuses
3655 .into_iter()
3656 .filter_map(|updated_status| {
3657 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
3658 }),
3659 )
3660 .collect::<Vec<_>>();
3661 self.snapshot.statuses_by_path.edit(edits, &());
3662 if update.is_last_update {
3663 self.snapshot.scan_id = update.scan_id;
3664 }
3665 cx.emit(RepositoryEvent::Updated { full_scan: true });
3666 Ok(())
3667 }
3668
3669 pub fn compare_checkpoints(
3670 &mut self,
3671 left: GitRepositoryCheckpoint,
3672 right: GitRepositoryCheckpoint,
3673 ) -> oneshot::Receiver<Result<bool>> {
3674 self.send_job(None, move |repo, _cx| async move {
3675 match repo {
3676 RepositoryState::Local { backend, .. } => {
3677 backend.compare_checkpoints(left, right).await
3678 }
3679 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3680 }
3681 })
3682 }
3683
3684 pub fn delete_checkpoint(
3685 &mut self,
3686 checkpoint: GitRepositoryCheckpoint,
3687 ) -> oneshot::Receiver<Result<()>> {
3688 self.send_job(None, move |repo, _cx| async move {
3689 match repo {
3690 RepositoryState::Local { backend, .. } => {
3691 backend.delete_checkpoint(checkpoint).await
3692 }
3693 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3694 }
3695 })
3696 }
3697
3698 pub fn diff_checkpoints(
3699 &mut self,
3700 base_checkpoint: GitRepositoryCheckpoint,
3701 target_checkpoint: GitRepositoryCheckpoint,
3702 ) -> oneshot::Receiver<Result<String>> {
3703 self.send_job(None, move |repo, _cx| async move {
3704 match repo {
3705 RepositoryState::Local { backend, .. } => {
3706 backend
3707 .diff_checkpoints(base_checkpoint, target_checkpoint)
3708 .await
3709 }
3710 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3711 }
3712 })
3713 }
3714
3715 fn schedule_scan(
3716 &mut self,
3717 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3718 cx: &mut Context<Self>,
3719 ) {
3720 self.paths_changed(
3721 vec![git::repository::WORK_DIRECTORY_REPO_PATH.clone()],
3722 updates_tx.clone(),
3723 cx,
3724 );
3725
3726 let this = cx.weak_entity();
3727 let _ = self.send_keyed_job(
3728 Some(GitJobKey::ReloadGitState),
3729 None,
3730 |state, mut cx| async move {
3731 let Some(this) = this.upgrade() else {
3732 return Ok(());
3733 };
3734 let RepositoryState::Local { backend, .. } = state else {
3735 bail!("not a local repository")
3736 };
3737 let (snapshot, events) = this
3738 .update(&mut cx, |this, _| {
3739 compute_snapshot(
3740 this.id,
3741 this.work_directory_abs_path.clone(),
3742 this.snapshot.clone(),
3743 backend.clone(),
3744 )
3745 })?
3746 .await?;
3747 this.update(&mut cx, |this, cx| {
3748 this.snapshot = snapshot.clone();
3749 for event in events {
3750 cx.emit(event);
3751 }
3752 })?;
3753 if let Some(updates_tx) = updates_tx {
3754 updates_tx
3755 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3756 .ok();
3757 }
3758 Ok(())
3759 },
3760 );
3761 }
3762
3763 fn spawn_local_git_worker(
3764 work_directory_abs_path: Arc<Path>,
3765 dot_git_abs_path: Arc<Path>,
3766 project_environment: WeakEntity<ProjectEnvironment>,
3767 fs: Arc<dyn Fs>,
3768 cx: &mut Context<Self>,
3769 ) -> mpsc::UnboundedSender<GitJob> {
3770 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3771
3772 cx.spawn(async move |_, cx| {
3773 let environment = project_environment
3774 .upgrade()
3775 .ok_or_else(|| anyhow!("missing project environment"))?
3776 .update(cx, |project_environment, cx| {
3777 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
3778 })?
3779 .await
3780 .unwrap_or_else(|| {
3781 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
3782 HashMap::default()
3783 });
3784 let backend = cx
3785 .background_spawn(async move {
3786 fs.open_repo(&dot_git_abs_path)
3787 .ok_or_else(|| anyhow!("failed to build repository"))
3788 })
3789 .await?;
3790
3791 if let Some(git_hosting_provider_registry) =
3792 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
3793 {
3794 git_hosting_providers::register_additional_providers(
3795 git_hosting_provider_registry,
3796 backend.clone(),
3797 );
3798 }
3799
3800 let state = RepositoryState::Local {
3801 backend,
3802 environment: Arc::new(environment),
3803 };
3804 let mut jobs = VecDeque::new();
3805 loop {
3806 while let Ok(Some(next_job)) = job_rx.try_next() {
3807 jobs.push_back(next_job);
3808 }
3809
3810 if let Some(job) = jobs.pop_front() {
3811 if let Some(current_key) = &job.key {
3812 if jobs
3813 .iter()
3814 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3815 {
3816 continue;
3817 }
3818 }
3819 (job.job)(state.clone(), cx).await;
3820 } else if let Some(job) = job_rx.next().await {
3821 jobs.push_back(job);
3822 } else {
3823 break;
3824 }
3825 }
3826 anyhow::Ok(())
3827 })
3828 .detach_and_log_err(cx);
3829
3830 job_tx
3831 }
3832
3833 fn spawn_remote_git_worker(
3834 project_id: ProjectId,
3835 client: AnyProtoClient,
3836 cx: &mut Context<Self>,
3837 ) -> mpsc::UnboundedSender<GitJob> {
3838 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3839
3840 cx.spawn(async move |_, cx| {
3841 let state = RepositoryState::Remote { project_id, client };
3842 let mut jobs = VecDeque::new();
3843 loop {
3844 while let Ok(Some(next_job)) = job_rx.try_next() {
3845 jobs.push_back(next_job);
3846 }
3847
3848 if let Some(job) = jobs.pop_front() {
3849 if let Some(current_key) = &job.key {
3850 if jobs
3851 .iter()
3852 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3853 {
3854 continue;
3855 }
3856 }
3857 (job.job)(state.clone(), cx).await;
3858 } else if let Some(job) = job_rx.next().await {
3859 jobs.push_back(job);
3860 } else {
3861 break;
3862 }
3863 }
3864 anyhow::Ok(())
3865 })
3866 .detach_and_log_err(cx);
3867
3868 job_tx
3869 }
3870
3871 fn load_staged_text(
3872 &mut self,
3873 buffer_id: BufferId,
3874 repo_path: RepoPath,
3875 cx: &App,
3876 ) -> Task<Result<Option<String>>> {
3877 let rx = self.send_job(None, move |state, _| async move {
3878 match state {
3879 RepositoryState::Local { backend, .. } => {
3880 anyhow::Ok(backend.load_index_text(repo_path).await)
3881 }
3882 RepositoryState::Remote { project_id, client } => {
3883 let response = client
3884 .request(proto::OpenUnstagedDiff {
3885 project_id: project_id.to_proto(),
3886 buffer_id: buffer_id.to_proto(),
3887 })
3888 .await?;
3889 Ok(response.staged_text)
3890 }
3891 }
3892 });
3893 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3894 }
3895
3896 fn load_committed_text(
3897 &mut self,
3898 buffer_id: BufferId,
3899 repo_path: RepoPath,
3900 cx: &App,
3901 ) -> Task<Result<DiffBasesChange>> {
3902 let rx = self.send_job(None, move |state, _| async move {
3903 match state {
3904 RepositoryState::Local { backend, .. } => {
3905 let committed_text = backend.load_committed_text(repo_path.clone()).await;
3906 let staged_text = backend.load_index_text(repo_path).await;
3907 let diff_bases_change = if committed_text == staged_text {
3908 DiffBasesChange::SetBoth(committed_text)
3909 } else {
3910 DiffBasesChange::SetEach {
3911 index: staged_text,
3912 head: committed_text,
3913 }
3914 };
3915 anyhow::Ok(diff_bases_change)
3916 }
3917 RepositoryState::Remote { project_id, client } => {
3918 use proto::open_uncommitted_diff_response::Mode;
3919
3920 let response = client
3921 .request(proto::OpenUncommittedDiff {
3922 project_id: project_id.to_proto(),
3923 buffer_id: buffer_id.to_proto(),
3924 })
3925 .await?;
3926 let mode =
3927 Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
3928 let bases = match mode {
3929 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
3930 Mode::IndexAndHead => DiffBasesChange::SetEach {
3931 head: response.committed_text,
3932 index: response.staged_text,
3933 },
3934 };
3935 Ok(bases)
3936 }
3937 }
3938 });
3939
3940 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3941 }
3942
3943 fn paths_changed(
3944 &mut self,
3945 paths: Vec<RepoPath>,
3946 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3947 cx: &mut Context<Self>,
3948 ) {
3949 self.paths_needing_status_update.extend(paths);
3950
3951 let this = cx.weak_entity();
3952 let _ = self.send_keyed_job(
3953 Some(GitJobKey::RefreshStatuses),
3954 None,
3955 |state, mut cx| async move {
3956 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
3957 (
3958 this.snapshot.clone(),
3959 mem::take(&mut this.paths_needing_status_update),
3960 )
3961 })?;
3962 let RepositoryState::Local { backend, .. } = state else {
3963 bail!("not a local repository")
3964 };
3965
3966 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
3967 let statuses = backend.status(&paths).await?;
3968
3969 let changed_path_statuses = cx
3970 .background_spawn(async move {
3971 let mut changed_path_statuses = Vec::new();
3972 let prev_statuses = prev_snapshot.statuses_by_path.clone();
3973 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
3974
3975 for (repo_path, status) in &*statuses.entries {
3976 changed_paths.remove(repo_path);
3977 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
3978 if &cursor.item().unwrap().status == status {
3979 continue;
3980 }
3981 }
3982
3983 changed_path_statuses.push(Edit::Insert(StatusEntry {
3984 repo_path: repo_path.clone(),
3985 status: *status,
3986 }));
3987 }
3988 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
3989 for path in changed_paths.iter() {
3990 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
3991 changed_path_statuses.push(Edit::Remove(PathKey(path.0.clone())));
3992 }
3993 }
3994 changed_path_statuses
3995 })
3996 .await;
3997
3998 this.update(&mut cx, |this, cx| {
3999 if !changed_path_statuses.is_empty() {
4000 this.snapshot
4001 .statuses_by_path
4002 .edit(changed_path_statuses, &());
4003 this.snapshot.scan_id += 1;
4004 if let Some(updates_tx) = updates_tx {
4005 updates_tx
4006 .unbounded_send(DownstreamUpdate::UpdateRepository(
4007 this.snapshot.clone(),
4008 ))
4009 .ok();
4010 }
4011 }
4012 cx.emit(RepositoryEvent::Updated { full_scan: false });
4013 })
4014 },
4015 );
4016 }
4017
4018 /// currently running git command and when it started
4019 pub fn current_job(&self) -> Option<JobInfo> {
4020 self.active_jobs.values().next().cloned()
4021 }
4022}
4023
4024fn get_permalink_in_rust_registry_src(
4025 provider_registry: Arc<GitHostingProviderRegistry>,
4026 path: PathBuf,
4027 selection: Range<u32>,
4028) -> Result<url::Url> {
4029 #[derive(Deserialize)]
4030 struct CargoVcsGit {
4031 sha1: String,
4032 }
4033
4034 #[derive(Deserialize)]
4035 struct CargoVcsInfo {
4036 git: CargoVcsGit,
4037 path_in_vcs: String,
4038 }
4039
4040 #[derive(Deserialize)]
4041 struct CargoPackage {
4042 repository: String,
4043 }
4044
4045 #[derive(Deserialize)]
4046 struct CargoToml {
4047 package: CargoPackage,
4048 }
4049
4050 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4051 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4052 Some((dir, json))
4053 }) else {
4054 bail!("No .cargo_vcs_info.json found in parent directories")
4055 };
4056 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4057 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4058 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4059 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4060 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
4061 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4062 let permalink = provider.build_permalink(
4063 remote,
4064 BuildPermalinkParams {
4065 sha: &cargo_vcs_info.git.sha1,
4066 path: &path.to_string_lossy(),
4067 selection: Some(selection),
4068 },
4069 );
4070 Ok(permalink)
4071}
4072
4073fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4074 let Some(blame) = blame else {
4075 return proto::BlameBufferResponse {
4076 blame_response: None,
4077 };
4078 };
4079
4080 let entries = blame
4081 .entries
4082 .into_iter()
4083 .map(|entry| proto::BlameEntry {
4084 sha: entry.sha.as_bytes().into(),
4085 start_line: entry.range.start,
4086 end_line: entry.range.end,
4087 original_line_number: entry.original_line_number,
4088 author: entry.author.clone(),
4089 author_mail: entry.author_mail.clone(),
4090 author_time: entry.author_time,
4091 author_tz: entry.author_tz.clone(),
4092 committer: entry.committer_name.clone(),
4093 committer_mail: entry.committer_email.clone(),
4094 committer_time: entry.committer_time,
4095 committer_tz: entry.committer_tz.clone(),
4096 summary: entry.summary.clone(),
4097 previous: entry.previous.clone(),
4098 filename: entry.filename.clone(),
4099 })
4100 .collect::<Vec<_>>();
4101
4102 let messages = blame
4103 .messages
4104 .into_iter()
4105 .map(|(oid, message)| proto::CommitMessage {
4106 oid: oid.as_bytes().into(),
4107 message,
4108 })
4109 .collect::<Vec<_>>();
4110
4111 proto::BlameBufferResponse {
4112 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4113 entries,
4114 messages,
4115 remote_url: blame.remote_url,
4116 }),
4117 }
4118}
4119
4120fn deserialize_blame_buffer_response(
4121 response: proto::BlameBufferResponse,
4122) -> Option<git::blame::Blame> {
4123 let response = response.blame_response?;
4124 let entries = response
4125 .entries
4126 .into_iter()
4127 .filter_map(|entry| {
4128 Some(git::blame::BlameEntry {
4129 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4130 range: entry.start_line..entry.end_line,
4131 original_line_number: entry.original_line_number,
4132 committer_name: entry.committer,
4133 committer_time: entry.committer_time,
4134 committer_tz: entry.committer_tz,
4135 committer_email: entry.committer_mail,
4136 author: entry.author,
4137 author_mail: entry.author_mail,
4138 author_time: entry.author_time,
4139 author_tz: entry.author_tz,
4140 summary: entry.summary,
4141 previous: entry.previous,
4142 filename: entry.filename,
4143 })
4144 })
4145 .collect::<Vec<_>>();
4146
4147 let messages = response
4148 .messages
4149 .into_iter()
4150 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4151 .collect::<HashMap<_, _>>();
4152
4153 Some(Blame {
4154 entries,
4155 messages,
4156 remote_url: response.remote_url,
4157 })
4158}
4159
4160fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4161 proto::Branch {
4162 is_head: branch.is_head,
4163 name: branch.name.to_string(),
4164 unix_timestamp: branch
4165 .most_recent_commit
4166 .as_ref()
4167 .map(|commit| commit.commit_timestamp as u64),
4168 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4169 ref_name: upstream.ref_name.to_string(),
4170 tracking: upstream
4171 .tracking
4172 .status()
4173 .map(|upstream| proto::UpstreamTracking {
4174 ahead: upstream.ahead as u64,
4175 behind: upstream.behind as u64,
4176 }),
4177 }),
4178 most_recent_commit: branch
4179 .most_recent_commit
4180 .as_ref()
4181 .map(|commit| proto::CommitSummary {
4182 sha: commit.sha.to_string(),
4183 subject: commit.subject.to_string(),
4184 commit_timestamp: commit.commit_timestamp,
4185 }),
4186 }
4187}
4188
4189fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4190 git::repository::Branch {
4191 is_head: proto.is_head,
4192 name: proto.name.clone().into(),
4193 upstream: proto
4194 .upstream
4195 .as_ref()
4196 .map(|upstream| git::repository::Upstream {
4197 ref_name: upstream.ref_name.to_string().into(),
4198 tracking: upstream
4199 .tracking
4200 .as_ref()
4201 .map(|tracking| {
4202 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4203 ahead: tracking.ahead as u32,
4204 behind: tracking.behind as u32,
4205 })
4206 })
4207 .unwrap_or(git::repository::UpstreamTracking::Gone),
4208 }),
4209 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4210 git::repository::CommitSummary {
4211 sha: commit.sha.to_string().into(),
4212 subject: commit.subject.to_string().into(),
4213 commit_timestamp: commit.commit_timestamp,
4214 has_parent: true,
4215 }
4216 }),
4217 }
4218}
4219
4220async fn compute_snapshot(
4221 id: RepositoryId,
4222 work_directory_abs_path: Arc<Path>,
4223 prev_snapshot: RepositorySnapshot,
4224 backend: Arc<dyn GitRepository>,
4225) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4226 let mut events = Vec::new();
4227 let branches = backend.branches().await?;
4228 let branch = branches.into_iter().find(|branch| branch.is_head);
4229 let statuses = backend.status(&[WORK_DIRECTORY_REPO_PATH.clone()]).await?;
4230 let merge_message = backend
4231 .merge_message()
4232 .await
4233 .and_then(|msg| Some(msg.lines().nth(0)?.to_owned().into()));
4234 let merge_head_shas = backend
4235 .merge_head_shas()
4236 .into_iter()
4237 .map(SharedString::from)
4238 .collect();
4239
4240 let statuses_by_path = SumTree::from_iter(
4241 statuses
4242 .entries
4243 .iter()
4244 .map(|(repo_path, status)| StatusEntry {
4245 repo_path: repo_path.clone(),
4246 status: *status,
4247 }),
4248 &(),
4249 );
4250
4251 let merge_head_shas_changed = merge_head_shas != prev_snapshot.merge_head_shas;
4252
4253 if merge_head_shas_changed
4254 || branch != prev_snapshot.branch
4255 || statuses_by_path != prev_snapshot.statuses_by_path
4256 {
4257 events.push(RepositoryEvent::Updated { full_scan: true });
4258 }
4259
4260 let mut current_merge_conflicts = TreeSet::default();
4261 for (repo_path, status) in statuses.entries.iter() {
4262 if status.is_conflicted() {
4263 current_merge_conflicts.insert(repo_path.clone());
4264 }
4265 }
4266
4267 // Cache merge conflict paths so they don't change from staging/unstaging,
4268 // until the merge heads change (at commit time, etc.).
4269 let mut merge_conflicts = prev_snapshot.merge_conflicts.clone();
4270 if merge_head_shas_changed {
4271 merge_conflicts = current_merge_conflicts;
4272 events.push(RepositoryEvent::MergeHeadsChanged);
4273 }
4274
4275 let snapshot = RepositorySnapshot {
4276 id,
4277 merge_message,
4278 statuses_by_path,
4279 work_directory_abs_path,
4280 scan_id: prev_snapshot.scan_id + 1,
4281 branch,
4282 merge_conflicts,
4283 merge_head_shas,
4284 };
4285
4286 Ok((snapshot, events))
4287}
4288
4289fn status_from_proto(
4290 simple_status: i32,
4291 status: Option<proto::GitFileStatus>,
4292) -> anyhow::Result<FileStatus> {
4293 use proto::git_file_status::Variant;
4294
4295 let Some(variant) = status.and_then(|status| status.variant) else {
4296 let code = proto::GitStatus::from_i32(simple_status)
4297 .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
4298 let result = match code {
4299 proto::GitStatus::Added => TrackedStatus {
4300 worktree_status: StatusCode::Added,
4301 index_status: StatusCode::Unmodified,
4302 }
4303 .into(),
4304 proto::GitStatus::Modified => TrackedStatus {
4305 worktree_status: StatusCode::Modified,
4306 index_status: StatusCode::Unmodified,
4307 }
4308 .into(),
4309 proto::GitStatus::Conflict => UnmergedStatus {
4310 first_head: UnmergedStatusCode::Updated,
4311 second_head: UnmergedStatusCode::Updated,
4312 }
4313 .into(),
4314 proto::GitStatus::Deleted => TrackedStatus {
4315 worktree_status: StatusCode::Deleted,
4316 index_status: StatusCode::Unmodified,
4317 }
4318 .into(),
4319 _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
4320 };
4321 return Ok(result);
4322 };
4323
4324 let result = match variant {
4325 Variant::Untracked(_) => FileStatus::Untracked,
4326 Variant::Ignored(_) => FileStatus::Ignored,
4327 Variant::Unmerged(unmerged) => {
4328 let [first_head, second_head] =
4329 [unmerged.first_head, unmerged.second_head].map(|head| {
4330 let code = proto::GitStatus::from_i32(head)
4331 .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
4332 let result = match code {
4333 proto::GitStatus::Added => UnmergedStatusCode::Added,
4334 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4335 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4336 _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
4337 };
4338 Ok(result)
4339 });
4340 let [first_head, second_head] = [first_head?, second_head?];
4341 UnmergedStatus {
4342 first_head,
4343 second_head,
4344 }
4345 .into()
4346 }
4347 Variant::Tracked(tracked) => {
4348 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4349 .map(|status| {
4350 let code = proto::GitStatus::from_i32(status)
4351 .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
4352 let result = match code {
4353 proto::GitStatus::Modified => StatusCode::Modified,
4354 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4355 proto::GitStatus::Added => StatusCode::Added,
4356 proto::GitStatus::Deleted => StatusCode::Deleted,
4357 proto::GitStatus::Renamed => StatusCode::Renamed,
4358 proto::GitStatus::Copied => StatusCode::Copied,
4359 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4360 _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
4361 };
4362 Ok(result)
4363 });
4364 let [index_status, worktree_status] = [index_status?, worktree_status?];
4365 TrackedStatus {
4366 index_status,
4367 worktree_status,
4368 }
4369 .into()
4370 }
4371 };
4372 Ok(result)
4373}
4374
4375fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4376 use proto::git_file_status::{Tracked, Unmerged, Variant};
4377
4378 let variant = match status {
4379 FileStatus::Untracked => Variant::Untracked(Default::default()),
4380 FileStatus::Ignored => Variant::Ignored(Default::default()),
4381 FileStatus::Unmerged(UnmergedStatus {
4382 first_head,
4383 second_head,
4384 }) => Variant::Unmerged(Unmerged {
4385 first_head: unmerged_status_to_proto(first_head),
4386 second_head: unmerged_status_to_proto(second_head),
4387 }),
4388 FileStatus::Tracked(TrackedStatus {
4389 index_status,
4390 worktree_status,
4391 }) => Variant::Tracked(Tracked {
4392 index_status: tracked_status_to_proto(index_status),
4393 worktree_status: tracked_status_to_proto(worktree_status),
4394 }),
4395 };
4396 proto::GitFileStatus {
4397 variant: Some(variant),
4398 }
4399}
4400
4401fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4402 match code {
4403 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4404 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4405 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4406 }
4407}
4408
4409fn tracked_status_to_proto(code: StatusCode) -> i32 {
4410 match code {
4411 StatusCode::Added => proto::GitStatus::Added as _,
4412 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4413 StatusCode::Modified => proto::GitStatus::Modified as _,
4414 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4415 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4416 StatusCode::Copied => proto::GitStatus::Copied as _,
4417 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4418 }
4419}