1pub mod git_traversal;
2
3use crate::{
4 ProjectEnvironment, ProjectItem, ProjectPath,
5 buffer_store::{BufferStore, BufferStoreEvent},
6 worktree_store::{WorktreeStore, WorktreeStoreEvent},
7};
8use anyhow::{Context as _, Result, anyhow, bail};
9use askpass::AskPassDelegate;
10use buffer_diff::{BufferDiff, BufferDiffEvent};
11use client::ProjectId;
12use collections::HashMap;
13use fs::Fs;
14use futures::{
15 FutureExt as _, StreamExt as _,
16 channel::{mpsc, oneshot},
17 future::{self, Shared},
18};
19use git::{
20 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
21 blame::Blame,
22 parse_git_remote_url,
23 repository::{
24 Branch, CommitDetails, CommitDiff, CommitFile, DiffType, GitRepository,
25 GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
26 UpstreamTrackingStatus,
27 },
28 status::{
29 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
30 },
31};
32use gpui::{
33 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
34 WeakEntity,
35};
36use language::{
37 Buffer, BufferEvent, Language, LanguageRegistry,
38 proto::{deserialize_version, serialize_version},
39};
40use parking_lot::Mutex;
41use rpc::{
42 AnyProtoClient, TypedEnvelope,
43 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
44};
45use serde::Deserialize;
46use std::{
47 cmp::Ordering,
48 collections::{BTreeSet, VecDeque},
49 future::Future,
50 mem,
51 ops::Range,
52 path::{Path, PathBuf},
53 sync::{
54 Arc,
55 atomic::{self, AtomicU64},
56 },
57 time::Instant,
58};
59use sum_tree::{Edit, SumTree, TreeSet};
60use text::{Bias, BufferId};
61use util::{ResultExt, debug_panic, post_inc};
62use worktree::{
63 File, PathKey, PathProgress, PathSummary, PathTarget, UpdatedGitRepositoriesSet, Worktree,
64};
65
66pub struct GitStore {
67 state: GitStoreState,
68 buffer_store: Entity<BufferStore>,
69 worktree_store: Entity<WorktreeStore>,
70 repositories: HashMap<RepositoryId, Entity<Repository>>,
71 active_repo_id: Option<RepositoryId>,
72 #[allow(clippy::type_complexity)]
73 loading_diffs:
74 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
75 diffs: HashMap<BufferId, Entity<BufferDiffState>>,
76 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
77 _subscriptions: Vec<Subscription>,
78}
79
80#[derive(Default)]
81struct SharedDiffs {
82 unstaged: Option<Entity<BufferDiff>>,
83 uncommitted: Option<Entity<BufferDiff>>,
84}
85
86#[derive(Default)]
87struct BufferDiffState {
88 unstaged_diff: Option<WeakEntity<BufferDiff>>,
89 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
90 recalculate_diff_task: Option<Task<Result<()>>>,
91 language: Option<Arc<Language>>,
92 language_registry: Option<Arc<LanguageRegistry>>,
93 diff_updated_futures: Vec<oneshot::Sender<()>>,
94 hunk_staging_operation_count: usize,
95
96 head_text: Option<Arc<String>>,
97 index_text: Option<Arc<String>>,
98 head_changed: bool,
99 index_changed: bool,
100 language_changed: bool,
101}
102
103#[derive(Clone, Debug)]
104enum DiffBasesChange {
105 SetIndex(Option<String>),
106 SetHead(Option<String>),
107 SetEach {
108 index: Option<String>,
109 head: Option<String>,
110 },
111 SetBoth(Option<String>),
112}
113
114#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
115enum DiffKind {
116 Unstaged,
117 Uncommitted,
118}
119
120enum GitStoreState {
121 Local {
122 next_repository_id: Arc<AtomicU64>,
123 downstream: Option<LocalDownstreamState>,
124 project_environment: Entity<ProjectEnvironment>,
125 fs: Arc<dyn Fs>,
126 },
127 Ssh {
128 upstream_client: AnyProtoClient,
129 upstream_project_id: ProjectId,
130 downstream: Option<(AnyProtoClient, ProjectId)>,
131 },
132 Remote {
133 upstream_client: AnyProtoClient,
134 upstream_project_id: ProjectId,
135 },
136}
137
138enum DownstreamUpdate {
139 UpdateRepository(RepositorySnapshot),
140 RemoveRepository(RepositoryId),
141}
142
143struct LocalDownstreamState {
144 client: AnyProtoClient,
145 project_id: ProjectId,
146 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
147 _task: Task<Result<()>>,
148}
149
150#[derive(Clone)]
151pub struct GitStoreCheckpoint {
152 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
153}
154
155#[derive(Clone, Debug, PartialEq, Eq)]
156pub struct StatusEntry {
157 pub repo_path: RepoPath,
158 pub status: FileStatus,
159}
160
161impl StatusEntry {
162 fn to_proto(&self) -> proto::StatusEntry {
163 let simple_status = match self.status {
164 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
165 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
166 FileStatus::Tracked(TrackedStatus {
167 index_status,
168 worktree_status,
169 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
170 worktree_status
171 } else {
172 index_status
173 }),
174 };
175
176 proto::StatusEntry {
177 repo_path: self.repo_path.as_ref().to_proto(),
178 simple_status,
179 status: Some(status_to_proto(self.status)),
180 }
181 }
182}
183
184impl TryFrom<proto::StatusEntry> for StatusEntry {
185 type Error = anyhow::Error;
186
187 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
188 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
189 let status = status_from_proto(value.simple_status, value.status)?;
190 Ok(Self { repo_path, status })
191 }
192}
193
194impl sum_tree::Item for StatusEntry {
195 type Summary = PathSummary<GitSummary>;
196
197 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
198 PathSummary {
199 max_path: self.repo_path.0.clone(),
200 item_summary: self.status.summary(),
201 }
202 }
203}
204
205impl sum_tree::KeyedItem for StatusEntry {
206 type Key = PathKey;
207
208 fn key(&self) -> Self::Key {
209 PathKey(self.repo_path.0.clone())
210 }
211}
212
213#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
214pub struct RepositoryId(pub u64);
215
216#[derive(Clone, Debug, PartialEq, Eq)]
217pub struct RepositorySnapshot {
218 pub id: RepositoryId,
219 pub merge_message: Option<SharedString>,
220 pub statuses_by_path: SumTree<StatusEntry>,
221 pub work_directory_abs_path: Arc<Path>,
222 pub branch: Option<Branch>,
223 pub merge_conflicts: TreeSet<RepoPath>,
224 pub merge_head_shas: Vec<SharedString>,
225 pub scan_id: u64,
226}
227
228type JobId = u64;
229
230#[derive(Clone, Debug, PartialEq, Eq)]
231pub struct JobInfo {
232 pub start: Instant,
233 pub message: SharedString,
234}
235
236pub struct Repository {
237 this: WeakEntity<Self>,
238 snapshot: RepositorySnapshot,
239 commit_message_buffer: Option<Entity<Buffer>>,
240 git_store: WeakEntity<GitStore>,
241 // For a local repository, holds paths that have had worktree events since the last status scan completed,
242 // and that should be examined during the next status scan.
243 paths_needing_status_update: BTreeSet<RepoPath>,
244 job_sender: mpsc::UnboundedSender<GitJob>,
245 active_jobs: HashMap<JobId, JobInfo>,
246 job_id: JobId,
247 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
248 latest_askpass_id: u64,
249}
250
251impl std::ops::Deref for Repository {
252 type Target = RepositorySnapshot;
253
254 fn deref(&self) -> &Self::Target {
255 &self.snapshot
256 }
257}
258
259#[derive(Clone)]
260pub enum RepositoryState {
261 Local {
262 backend: Arc<dyn GitRepository>,
263 environment: Arc<HashMap<String, String>>,
264 },
265 Remote {
266 project_id: ProjectId,
267 client: AnyProtoClient,
268 },
269}
270
271#[derive(Clone, Debug)]
272pub enum RepositoryEvent {
273 Updated { full_scan: bool },
274 MergeHeadsChanged,
275}
276
277#[derive(Clone, Debug)]
278pub struct JobsUpdated;
279
280#[derive(Debug)]
281pub enum GitStoreEvent {
282 ActiveRepositoryChanged(Option<RepositoryId>),
283 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
284 RepositoryAdded(RepositoryId),
285 RepositoryRemoved(RepositoryId),
286 IndexWriteError(anyhow::Error),
287 JobsUpdated,
288}
289
290impl EventEmitter<RepositoryEvent> for Repository {}
291impl EventEmitter<JobsUpdated> for Repository {}
292impl EventEmitter<GitStoreEvent> for GitStore {}
293
294pub struct GitJob {
295 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
296 key: Option<GitJobKey>,
297}
298
299#[derive(PartialEq, Eq)]
300enum GitJobKey {
301 WriteIndex(RepoPath),
302 BatchReadIndex,
303 RefreshStatuses,
304 ReloadGitState,
305}
306
307impl GitStore {
308 pub fn local(
309 worktree_store: &Entity<WorktreeStore>,
310 buffer_store: Entity<BufferStore>,
311 environment: Entity<ProjectEnvironment>,
312 fs: Arc<dyn Fs>,
313 cx: &mut Context<Self>,
314 ) -> Self {
315 Self::new(
316 worktree_store.clone(),
317 buffer_store,
318 GitStoreState::Local {
319 next_repository_id: Arc::new(AtomicU64::new(1)),
320 downstream: None,
321 project_environment: environment,
322 fs,
323 },
324 cx,
325 )
326 }
327
328 pub fn remote(
329 worktree_store: &Entity<WorktreeStore>,
330 buffer_store: Entity<BufferStore>,
331 upstream_client: AnyProtoClient,
332 project_id: ProjectId,
333 cx: &mut Context<Self>,
334 ) -> Self {
335 Self::new(
336 worktree_store.clone(),
337 buffer_store,
338 GitStoreState::Remote {
339 upstream_client,
340 upstream_project_id: project_id,
341 },
342 cx,
343 )
344 }
345
346 pub fn ssh(
347 worktree_store: &Entity<WorktreeStore>,
348 buffer_store: Entity<BufferStore>,
349 upstream_client: AnyProtoClient,
350 cx: &mut Context<Self>,
351 ) -> Self {
352 Self::new(
353 worktree_store.clone(),
354 buffer_store,
355 GitStoreState::Ssh {
356 upstream_client,
357 upstream_project_id: ProjectId(SSH_PROJECT_ID),
358 downstream: None,
359 },
360 cx,
361 )
362 }
363
364 fn new(
365 worktree_store: Entity<WorktreeStore>,
366 buffer_store: Entity<BufferStore>,
367 state: GitStoreState,
368 cx: &mut Context<Self>,
369 ) -> Self {
370 let _subscriptions = vec![
371 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
372 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
373 ];
374
375 GitStore {
376 state,
377 buffer_store,
378 worktree_store,
379 repositories: HashMap::default(),
380 active_repo_id: None,
381 _subscriptions,
382 loading_diffs: HashMap::default(),
383 shared_diffs: HashMap::default(),
384 diffs: HashMap::default(),
385 }
386 }
387
388 pub fn init(client: &AnyProtoClient) {
389 client.add_entity_request_handler(Self::handle_get_remotes);
390 client.add_entity_request_handler(Self::handle_get_branches);
391 client.add_entity_request_handler(Self::handle_change_branch);
392 client.add_entity_request_handler(Self::handle_create_branch);
393 client.add_entity_request_handler(Self::handle_git_init);
394 client.add_entity_request_handler(Self::handle_push);
395 client.add_entity_request_handler(Self::handle_pull);
396 client.add_entity_request_handler(Self::handle_fetch);
397 client.add_entity_request_handler(Self::handle_stage);
398 client.add_entity_request_handler(Self::handle_unstage);
399 client.add_entity_request_handler(Self::handle_commit);
400 client.add_entity_request_handler(Self::handle_reset);
401 client.add_entity_request_handler(Self::handle_show);
402 client.add_entity_request_handler(Self::handle_load_commit_diff);
403 client.add_entity_request_handler(Self::handle_checkout_files);
404 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
405 client.add_entity_request_handler(Self::handle_set_index_text);
406 client.add_entity_request_handler(Self::handle_askpass);
407 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
408 client.add_entity_request_handler(Self::handle_git_diff);
409 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
410 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
411 client.add_entity_message_handler(Self::handle_update_diff_bases);
412 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
413 client.add_entity_request_handler(Self::handle_blame_buffer);
414 client.add_entity_message_handler(Self::handle_update_repository);
415 client.add_entity_message_handler(Self::handle_remove_repository);
416 }
417
418 pub fn is_local(&self) -> bool {
419 matches!(self.state, GitStoreState::Local { .. })
420 }
421
422 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
423 match &mut self.state {
424 GitStoreState::Ssh {
425 downstream: downstream_client,
426 ..
427 } => {
428 for repo in self.repositories.values() {
429 let update = repo.read(cx).snapshot.initial_update(project_id);
430 for update in split_repository_update(update) {
431 client.send(update).log_err();
432 }
433 }
434 *downstream_client = Some((client, ProjectId(project_id)));
435 }
436 GitStoreState::Local {
437 downstream: downstream_client,
438 ..
439 } => {
440 let mut snapshots = HashMap::default();
441 let (updates_tx, mut updates_rx) = mpsc::unbounded();
442 for repo in self.repositories.values() {
443 updates_tx
444 .unbounded_send(DownstreamUpdate::UpdateRepository(
445 repo.read(cx).snapshot.clone(),
446 ))
447 .ok();
448 }
449 *downstream_client = Some(LocalDownstreamState {
450 client: client.clone(),
451 project_id: ProjectId(project_id),
452 updates_tx,
453 _task: cx.spawn(async move |this, cx| {
454 cx.background_spawn(async move {
455 while let Some(update) = updates_rx.next().await {
456 match update {
457 DownstreamUpdate::UpdateRepository(snapshot) => {
458 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
459 {
460 let update =
461 snapshot.build_update(old_snapshot, project_id);
462 *old_snapshot = snapshot;
463 for update in split_repository_update(update) {
464 client.send(update)?;
465 }
466 } else {
467 let update = snapshot.initial_update(project_id);
468 for update in split_repository_update(update) {
469 client.send(update)?;
470 }
471 snapshots.insert(snapshot.id, snapshot);
472 }
473 }
474 DownstreamUpdate::RemoveRepository(id) => {
475 client.send(proto::RemoveRepository {
476 project_id,
477 id: id.to_proto(),
478 })?;
479 }
480 }
481 }
482 anyhow::Ok(())
483 })
484 .await
485 .ok();
486 this.update(cx, |this, _| {
487 if let GitStoreState::Local {
488 downstream: downstream_client,
489 ..
490 } = &mut this.state
491 {
492 downstream_client.take();
493 } else {
494 unreachable!("unshared called on remote store");
495 }
496 })
497 }),
498 });
499 }
500 GitStoreState::Remote { .. } => {
501 debug_panic!("shared called on remote store");
502 }
503 }
504 }
505
506 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
507 match &mut self.state {
508 GitStoreState::Local {
509 downstream: downstream_client,
510 ..
511 } => {
512 downstream_client.take();
513 }
514 GitStoreState::Ssh {
515 downstream: downstream_client,
516 ..
517 } => {
518 downstream_client.take();
519 }
520 GitStoreState::Remote { .. } => {
521 debug_panic!("unshared called on remote store");
522 }
523 }
524 self.shared_diffs.clear();
525 }
526
527 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
528 self.shared_diffs.remove(peer_id);
529 }
530
531 pub fn active_repository(&self) -> Option<Entity<Repository>> {
532 self.active_repo_id
533 .as_ref()
534 .map(|id| self.repositories[&id].clone())
535 }
536
537 pub fn open_unstaged_diff(
538 &mut self,
539 buffer: Entity<Buffer>,
540 cx: &mut Context<Self>,
541 ) -> Task<Result<Entity<BufferDiff>>> {
542 let buffer_id = buffer.read(cx).remote_id();
543 if let Some(diff_state) = self.diffs.get(&buffer_id) {
544 if let Some(unstaged_diff) = diff_state
545 .read(cx)
546 .unstaged_diff
547 .as_ref()
548 .and_then(|weak| weak.upgrade())
549 {
550 if let Some(task) =
551 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
552 {
553 return cx.background_executor().spawn(async move {
554 task.await?;
555 Ok(unstaged_diff)
556 });
557 }
558 return Task::ready(Ok(unstaged_diff));
559 }
560 }
561
562 let Some((repo, repo_path)) =
563 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
564 else {
565 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
566 };
567
568 let task = self
569 .loading_diffs
570 .entry((buffer_id, DiffKind::Unstaged))
571 .or_insert_with(|| {
572 let staged_text = repo.update(cx, |repo, cx| {
573 repo.load_staged_text(buffer_id, repo_path, cx)
574 });
575 cx.spawn(async move |this, cx| {
576 Self::open_diff_internal(
577 this,
578 DiffKind::Unstaged,
579 staged_text.await.map(DiffBasesChange::SetIndex),
580 buffer,
581 cx,
582 )
583 .await
584 .map_err(Arc::new)
585 })
586 .shared()
587 })
588 .clone();
589
590 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
591 }
592
593 pub fn open_uncommitted_diff(
594 &mut self,
595 buffer: Entity<Buffer>,
596 cx: &mut Context<Self>,
597 ) -> Task<Result<Entity<BufferDiff>>> {
598 let buffer_id = buffer.read(cx).remote_id();
599
600 if let Some(diff_state) = self.diffs.get(&buffer_id) {
601 if let Some(uncommitted_diff) = diff_state
602 .read(cx)
603 .uncommitted_diff
604 .as_ref()
605 .and_then(|weak| weak.upgrade())
606 {
607 if let Some(task) =
608 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
609 {
610 return cx.background_executor().spawn(async move {
611 task.await?;
612 Ok(uncommitted_diff)
613 });
614 }
615 return Task::ready(Ok(uncommitted_diff));
616 }
617 }
618
619 let Some((repo, repo_path)) =
620 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
621 else {
622 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
623 };
624
625 let task = self
626 .loading_diffs
627 .entry((buffer_id, DiffKind::Uncommitted))
628 .or_insert_with(|| {
629 let changes = repo.update(cx, |repo, cx| {
630 repo.load_committed_text(buffer_id, repo_path, cx)
631 });
632
633 cx.spawn(async move |this, cx| {
634 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
635 .await
636 .map_err(Arc::new)
637 })
638 .shared()
639 })
640 .clone();
641
642 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
643 }
644
645 async fn open_diff_internal(
646 this: WeakEntity<Self>,
647 kind: DiffKind,
648 texts: Result<DiffBasesChange>,
649 buffer_entity: Entity<Buffer>,
650 cx: &mut AsyncApp,
651 ) -> Result<Entity<BufferDiff>> {
652 let diff_bases_change = match texts {
653 Err(e) => {
654 this.update(cx, |this, cx| {
655 let buffer = buffer_entity.read(cx);
656 let buffer_id = buffer.remote_id();
657 this.loading_diffs.remove(&(buffer_id, kind));
658 })?;
659 return Err(e);
660 }
661 Ok(change) => change,
662 };
663
664 this.update(cx, |this, cx| {
665 let buffer = buffer_entity.read(cx);
666 let buffer_id = buffer.remote_id();
667 let language = buffer.language().cloned();
668 let language_registry = buffer.language_registry();
669 let text_snapshot = buffer.text_snapshot();
670 this.loading_diffs.remove(&(buffer_id, kind));
671
672 let diff_state = this
673 .diffs
674 .entry(buffer_id)
675 .or_insert_with(|| cx.new(|_| BufferDiffState::default()));
676
677 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
678
679 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
680 diff_state.update(cx, |diff_state, cx| {
681 diff_state.language = language;
682 diff_state.language_registry = language_registry;
683
684 match kind {
685 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
686 DiffKind::Uncommitted => {
687 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
688 diff
689 } else {
690 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
691 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
692 unstaged_diff
693 };
694
695 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
696 diff_state.uncommitted_diff = Some(diff.downgrade())
697 }
698 }
699
700 let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, 0, cx);
701
702 anyhow::Ok(async move {
703 rx.await.ok();
704 Ok(diff)
705 })
706 })
707 })??
708 .await
709 }
710
711 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
712 let diff_state = self.diffs.get(&buffer_id)?;
713 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
714 }
715
716 pub fn get_uncommitted_diff(
717 &self,
718 buffer_id: BufferId,
719 cx: &App,
720 ) -> Option<Entity<BufferDiff>> {
721 let diff_state = self.diffs.get(&buffer_id)?;
722 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
723 }
724
725 pub fn project_path_git_status(
726 &self,
727 project_path: &ProjectPath,
728 cx: &App,
729 ) -> Option<FileStatus> {
730 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
731 Some(repo.read(cx).status_for_path(&repo_path)?.status)
732 }
733
734 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
735 let mut work_directory_abs_paths = Vec::new();
736 let mut checkpoints = Vec::new();
737 for repository in self.repositories.values() {
738 repository.update(cx, |repository, _| {
739 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
740 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
741 });
742 }
743
744 cx.background_executor().spawn(async move {
745 let checkpoints = future::try_join_all(checkpoints).await?;
746 Ok(GitStoreCheckpoint {
747 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
748 .into_iter()
749 .zip(checkpoints)
750 .collect(),
751 })
752 })
753 }
754
755 pub fn restore_checkpoint(
756 &self,
757 checkpoint: GitStoreCheckpoint,
758 cx: &mut App,
759 ) -> Task<Result<()>> {
760 let repositories_by_work_dir_abs_path = self
761 .repositories
762 .values()
763 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
764 .collect::<HashMap<_, _>>();
765
766 let mut tasks = Vec::new();
767 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
768 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
769 let restore = repository.update(cx, |repository, _| {
770 repository.restore_checkpoint(checkpoint)
771 });
772 tasks.push(async move { restore.await? });
773 }
774 }
775 cx.background_spawn(async move {
776 future::try_join_all(tasks).await?;
777 Ok(())
778 })
779 }
780
781 /// Compares two checkpoints, returning true if they are equal.
782 pub fn compare_checkpoints(
783 &self,
784 left: GitStoreCheckpoint,
785 mut right: GitStoreCheckpoint,
786 cx: &mut App,
787 ) -> Task<Result<bool>> {
788 let repositories_by_work_dir_abs_path = self
789 .repositories
790 .values()
791 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
792 .collect::<HashMap<_, _>>();
793
794 let mut tasks = Vec::new();
795 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
796 if let Some(right_checkpoint) = right
797 .checkpoints_by_work_dir_abs_path
798 .remove(&work_dir_abs_path)
799 {
800 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
801 {
802 let compare = repository.update(cx, |repository, _| {
803 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
804 });
805
806 tasks.push(async move { compare.await? });
807 }
808 } else {
809 return Task::ready(Ok(false));
810 }
811 }
812 cx.background_spawn(async move {
813 Ok(future::try_join_all(tasks)
814 .await?
815 .into_iter()
816 .all(|result| result))
817 })
818 }
819
820 pub fn delete_checkpoint(
821 &self,
822 checkpoint: GitStoreCheckpoint,
823 cx: &mut App,
824 ) -> Task<Result<()>> {
825 let repositories_by_work_directory_abs_path = self
826 .repositories
827 .values()
828 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
829 .collect::<HashMap<_, _>>();
830
831 let mut tasks = Vec::new();
832 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
833 if let Some(repository) =
834 repositories_by_work_directory_abs_path.get(&work_dir_abs_path)
835 {
836 let delete = repository.update(cx, |this, _| this.delete_checkpoint(checkpoint));
837 tasks.push(async move { delete.await? });
838 }
839 }
840 cx.background_spawn(async move {
841 future::try_join_all(tasks).await?;
842 Ok(())
843 })
844 }
845
846 /// Blames a buffer.
847 pub fn blame_buffer(
848 &self,
849 buffer: &Entity<Buffer>,
850 version: Option<clock::Global>,
851 cx: &mut App,
852 ) -> Task<Result<Option<Blame>>> {
853 let buffer = buffer.read(cx);
854 let Some((repo, repo_path)) =
855 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
856 else {
857 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
858 };
859 let content = match &version {
860 Some(version) => buffer.rope_for_version(version).clone(),
861 None => buffer.as_rope().clone(),
862 };
863 let version = version.unwrap_or(buffer.version());
864 let buffer_id = buffer.remote_id();
865
866 let rx = repo.update(cx, |repo, _| {
867 repo.send_job(None, move |state, _| async move {
868 match state {
869 RepositoryState::Local { backend, .. } => backend
870 .blame(repo_path.clone(), content)
871 .await
872 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
873 .map(Some),
874 RepositoryState::Remote { project_id, client } => {
875 let response = client
876 .request(proto::BlameBuffer {
877 project_id: project_id.to_proto(),
878 buffer_id: buffer_id.into(),
879 version: serialize_version(&version),
880 })
881 .await?;
882 Ok(deserialize_blame_buffer_response(response))
883 }
884 }
885 })
886 });
887
888 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
889 }
890
891 pub fn get_permalink_to_line(
892 &self,
893 buffer: &Entity<Buffer>,
894 selection: Range<u32>,
895 cx: &mut App,
896 ) -> Task<Result<url::Url>> {
897 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
898 return Task::ready(Err(anyhow!("buffer has no file")));
899 };
900
901 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
902 &(file.worktree.read(cx).id(), file.path.clone()).into(),
903 cx,
904 ) else {
905 // If we're not in a Git repo, check whether this is a Rust source
906 // file in the Cargo registry (presumably opened with go-to-definition
907 // from a normal Rust file). If so, we can put together a permalink
908 // using crate metadata.
909 if buffer
910 .read(cx)
911 .language()
912 .is_none_or(|lang| lang.name() != "Rust".into())
913 {
914 return Task::ready(Err(anyhow!("no permalink available")));
915 }
916 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
917 return Task::ready(Err(anyhow!("no permalink available")));
918 };
919 return cx.spawn(async move |cx| {
920 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
921 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
922 .map_err(|_| anyhow!("no permalink available"))
923 });
924
925 // TODO remote case
926 };
927
928 let buffer_id = buffer.read(cx).remote_id();
929 let branch = repo.read(cx).branch.clone();
930 let remote = branch
931 .as_ref()
932 .and_then(|b| b.upstream.as_ref())
933 .and_then(|b| b.remote_name())
934 .unwrap_or("origin")
935 .to_string();
936
937 let rx = repo.update(cx, |repo, _| {
938 repo.send_job(None, move |state, cx| async move {
939 match state {
940 RepositoryState::Local { backend, .. } => {
941 let origin_url = backend
942 .remote_url(&remote)
943 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
944
945 let sha = backend
946 .head_sha()
947 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
948
949 let provider_registry =
950 cx.update(GitHostingProviderRegistry::default_global)?;
951
952 let (provider, remote) =
953 parse_git_remote_url(provider_registry, &origin_url)
954 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
955
956 let path = repo_path
957 .to_str()
958 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
959
960 Ok(provider.build_permalink(
961 remote,
962 BuildPermalinkParams {
963 sha: &sha,
964 path,
965 selection: Some(selection),
966 },
967 ))
968 }
969 RepositoryState::Remote { project_id, client } => {
970 let response = client
971 .request(proto::GetPermalinkToLine {
972 project_id: project_id.to_proto(),
973 buffer_id: buffer_id.into(),
974 selection: Some(proto::Range {
975 start: selection.start as u64,
976 end: selection.end as u64,
977 }),
978 })
979 .await?;
980
981 url::Url::parse(&response.permalink).context("failed to parse permalink")
982 }
983 }
984 })
985 });
986 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
987 }
988
989 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
990 match &self.state {
991 GitStoreState::Local {
992 downstream: downstream_client,
993 ..
994 } => downstream_client
995 .as_ref()
996 .map(|state| (state.client.clone(), state.project_id)),
997 GitStoreState::Ssh {
998 downstream: downstream_client,
999 ..
1000 } => downstream_client.clone(),
1001 GitStoreState::Remote { .. } => None,
1002 }
1003 }
1004
1005 fn upstream_client(&self) -> Option<AnyProtoClient> {
1006 match &self.state {
1007 GitStoreState::Local { .. } => None,
1008 GitStoreState::Ssh {
1009 upstream_client, ..
1010 }
1011 | GitStoreState::Remote {
1012 upstream_client, ..
1013 } => Some(upstream_client.clone()),
1014 }
1015 }
1016
1017 fn on_worktree_store_event(
1018 &mut self,
1019 worktree_store: Entity<WorktreeStore>,
1020 event: &WorktreeStoreEvent,
1021 cx: &mut Context<Self>,
1022 ) {
1023 let GitStoreState::Local {
1024 project_environment,
1025 downstream,
1026 next_repository_id,
1027 fs,
1028 } = &self.state
1029 else {
1030 return;
1031 };
1032
1033 match event {
1034 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1035 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
1036 for (relative_path, _, _) in updated_entries.iter() {
1037 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1038 &(*worktree_id, relative_path.clone()).into(),
1039 cx,
1040 ) else {
1041 continue;
1042 };
1043 paths_by_git_repo.entry(repo).or_default().push(repo_path)
1044 }
1045
1046 for (repo, paths) in paths_by_git_repo {
1047 repo.update(cx, |repo, cx| {
1048 repo.paths_changed(
1049 paths,
1050 downstream
1051 .as_ref()
1052 .map(|downstream| downstream.updates_tx.clone()),
1053 cx,
1054 );
1055 });
1056 }
1057 }
1058 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1059 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1060 else {
1061 return;
1062 };
1063 if !worktree.read(cx).is_visible() {
1064 log::debug!(
1065 "not adding repositories for local worktree {:?} because it's not visible",
1066 worktree.read(cx).abs_path()
1067 );
1068 return;
1069 }
1070 self.update_repositories_from_worktree(
1071 project_environment.clone(),
1072 next_repository_id.clone(),
1073 downstream
1074 .as_ref()
1075 .map(|downstream| downstream.updates_tx.clone()),
1076 changed_repos.clone(),
1077 fs.clone(),
1078 cx,
1079 );
1080 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1081 }
1082 _ => {}
1083 }
1084 }
1085
1086 fn on_repository_event(
1087 &mut self,
1088 repo: Entity<Repository>,
1089 event: &RepositoryEvent,
1090 cx: &mut Context<Self>,
1091 ) {
1092 let id = repo.read(cx).id;
1093 cx.emit(GitStoreEvent::RepositoryUpdated(
1094 id,
1095 event.clone(),
1096 self.active_repo_id == Some(id),
1097 ))
1098 }
1099
1100 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1101 cx.emit(GitStoreEvent::JobsUpdated)
1102 }
1103
1104 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1105 fn update_repositories_from_worktree(
1106 &mut self,
1107 project_environment: Entity<ProjectEnvironment>,
1108 next_repository_id: Arc<AtomicU64>,
1109 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1110 updated_git_repositories: UpdatedGitRepositoriesSet,
1111 fs: Arc<dyn Fs>,
1112 cx: &mut Context<Self>,
1113 ) {
1114 let mut removed_ids = Vec::new();
1115 for update in updated_git_repositories.iter() {
1116 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1117 let existing_work_directory_abs_path =
1118 repo.read(cx).work_directory_abs_path.clone();
1119 Some(&existing_work_directory_abs_path)
1120 == update.old_work_directory_abs_path.as_ref()
1121 || Some(&existing_work_directory_abs_path)
1122 == update.new_work_directory_abs_path.as_ref()
1123 }) {
1124 if let Some(new_work_directory_abs_path) =
1125 update.new_work_directory_abs_path.clone()
1126 {
1127 existing.update(cx, |existing, cx| {
1128 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1129 existing.schedule_scan(updates_tx.clone(), cx);
1130 });
1131 } else {
1132 removed_ids.push(*id);
1133 }
1134 } else if let Some((work_directory_abs_path, dot_git_abs_path)) = update
1135 .new_work_directory_abs_path
1136 .clone()
1137 .zip(update.dot_git_abs_path.clone())
1138 {
1139 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1140 let git_store = cx.weak_entity();
1141 let repo = cx.new(|cx| {
1142 let mut repo = Repository::local(
1143 id,
1144 work_directory_abs_path,
1145 dot_git_abs_path,
1146 project_environment.downgrade(),
1147 fs.clone(),
1148 git_store,
1149 cx,
1150 );
1151 repo.schedule_scan(updates_tx.clone(), cx);
1152 repo
1153 });
1154 self._subscriptions
1155 .push(cx.subscribe(&repo, Self::on_repository_event));
1156 self._subscriptions
1157 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1158 self.repositories.insert(id, repo);
1159 cx.emit(GitStoreEvent::RepositoryAdded(id));
1160 self.active_repo_id.get_or_insert_with(|| {
1161 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1162 id
1163 });
1164 }
1165 }
1166
1167 for id in removed_ids {
1168 if self.active_repo_id == Some(id) {
1169 self.active_repo_id = None;
1170 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1171 }
1172 self.repositories.remove(&id);
1173 if let Some(updates_tx) = updates_tx.as_ref() {
1174 updates_tx
1175 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1176 .ok();
1177 }
1178 }
1179 }
1180
1181 fn on_buffer_store_event(
1182 &mut self,
1183 _: Entity<BufferStore>,
1184 event: &BufferStoreEvent,
1185 cx: &mut Context<Self>,
1186 ) {
1187 match event {
1188 BufferStoreEvent::BufferAdded(buffer) => {
1189 cx.subscribe(&buffer, |this, buffer, event, cx| {
1190 if let BufferEvent::LanguageChanged = event {
1191 let buffer_id = buffer.read(cx).remote_id();
1192 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1193 diff_state.update(cx, |diff_state, cx| {
1194 diff_state.buffer_language_changed(buffer, cx);
1195 });
1196 }
1197 }
1198 })
1199 .detach();
1200 }
1201 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1202 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1203 diffs.remove(buffer_id);
1204 }
1205 }
1206 BufferStoreEvent::BufferDropped(buffer_id) => {
1207 self.diffs.remove(&buffer_id);
1208 for diffs in self.shared_diffs.values_mut() {
1209 diffs.remove(buffer_id);
1210 }
1211 }
1212
1213 _ => {}
1214 }
1215 }
1216
1217 pub fn recalculate_buffer_diffs(
1218 &mut self,
1219 buffers: Vec<Entity<Buffer>>,
1220 cx: &mut Context<Self>,
1221 ) -> impl Future<Output = ()> + use<> {
1222 let mut futures = Vec::new();
1223 for buffer in buffers {
1224 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1225 let buffer = buffer.read(cx).text_snapshot();
1226 futures.push(diff_state.update(cx, |diff_state, cx| {
1227 diff_state.recalculate_diffs(
1228 buffer,
1229 diff_state.hunk_staging_operation_count,
1230 cx,
1231 )
1232 }));
1233 }
1234 }
1235 async move {
1236 futures::future::join_all(futures).await;
1237 }
1238 }
1239
1240 fn on_buffer_diff_event(
1241 &mut self,
1242 diff: Entity<buffer_diff::BufferDiff>,
1243 event: &BufferDiffEvent,
1244 cx: &mut Context<Self>,
1245 ) {
1246 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1247 let buffer_id = diff.read(cx).buffer_id;
1248 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1249 diff_state.update(cx, |diff_state, _| {
1250 diff_state.hunk_staging_operation_count += 1;
1251 });
1252 }
1253 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1254 let recv = repo.update(cx, |repo, cx| {
1255 log::debug!("updating index text for buffer {}", path.display());
1256 repo.spawn_set_index_text_job(
1257 path,
1258 new_index_text.as_ref().map(|rope| rope.to_string()),
1259 cx,
1260 )
1261 });
1262 let diff = diff.downgrade();
1263 cx.spawn(async move |this, cx| {
1264 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1265 diff.update(cx, |diff, cx| {
1266 diff.clear_pending_hunks(cx);
1267 })
1268 .ok();
1269 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1270 .ok();
1271 }
1272 })
1273 .detach();
1274 }
1275 }
1276 }
1277
1278 fn local_worktree_git_repos_changed(
1279 &mut self,
1280 worktree: Entity<Worktree>,
1281 changed_repos: &UpdatedGitRepositoriesSet,
1282 cx: &mut Context<Self>,
1283 ) {
1284 log::debug!("local worktree repos changed");
1285 debug_assert!(worktree.read(cx).is_local());
1286
1287 let mut diff_state_updates = HashMap::<Entity<Repository>, Vec<_>>::default();
1288 for (buffer_id, diff_state) in &self.diffs {
1289 let Some(buffer) = self.buffer_store.read(cx).get(*buffer_id) else {
1290 continue;
1291 };
1292 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1293 continue;
1294 };
1295 if file.worktree != worktree {
1296 continue;
1297 }
1298 let Some((repo, repo_path)) =
1299 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
1300 else {
1301 continue;
1302 };
1303 if !changed_repos.iter().any(|update| {
1304 update.old_work_directory_abs_path.as_ref()
1305 == Some(&repo.read(cx).work_directory_abs_path)
1306 || update.new_work_directory_abs_path.as_ref()
1307 == Some(&repo.read(cx).work_directory_abs_path)
1308 }) {
1309 continue;
1310 }
1311
1312 let diff_state = diff_state.read(cx);
1313 let has_unstaged_diff = diff_state
1314 .unstaged_diff
1315 .as_ref()
1316 .is_some_and(|diff| diff.is_upgradable());
1317 let has_uncommitted_diff = diff_state
1318 .uncommitted_diff
1319 .as_ref()
1320 .is_some_and(|set| set.is_upgradable());
1321
1322 let update = (
1323 buffer,
1324 repo_path,
1325 has_unstaged_diff.then(|| diff_state.index_text.clone()),
1326 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
1327 diff_state.hunk_staging_operation_count,
1328 );
1329 diff_state_updates.entry(repo).or_default().push(update);
1330 }
1331
1332 if diff_state_updates.is_empty() {
1333 return;
1334 }
1335
1336 for (repo, repo_diff_state_updates) in diff_state_updates.into_iter() {
1337 let git_store = cx.weak_entity();
1338
1339 let _ = repo.update(cx, |repo, _| {
1340 repo.send_keyed_job(
1341 Some(GitJobKey::BatchReadIndex),
1342 None,
1343 |state, mut cx| async move {
1344 let RepositoryState::Local { backend, .. } = state else {
1345 log::error!("tried to recompute diffs for a non-local repository");
1346 return;
1347 };
1348 let mut diff_bases_changes_by_buffer = Vec::new();
1349 for (
1350 buffer,
1351 repo_path,
1352 current_index_text,
1353 current_head_text,
1354 hunk_staging_operation_count,
1355 ) in &repo_diff_state_updates
1356 {
1357 let index_text = if current_index_text.is_some() {
1358 backend.load_index_text(repo_path.clone()).await
1359 } else {
1360 None
1361 };
1362 let head_text = if current_head_text.is_some() {
1363 backend.load_committed_text(repo_path.clone()).await
1364 } else {
1365 None
1366 };
1367
1368 // Avoid triggering a diff update if the base text has not changed.
1369 if let Some((current_index, current_head)) =
1370 current_index_text.as_ref().zip(current_head_text.as_ref())
1371 {
1372 if current_index.as_deref() == index_text.as_ref()
1373 && current_head.as_deref() == head_text.as_ref()
1374 {
1375 continue;
1376 }
1377 }
1378
1379 let diff_bases_change =
1380 match (current_index_text.is_some(), current_head_text.is_some()) {
1381 (true, true) => Some(if index_text == head_text {
1382 DiffBasesChange::SetBoth(head_text)
1383 } else {
1384 DiffBasesChange::SetEach {
1385 index: index_text,
1386 head: head_text,
1387 }
1388 }),
1389 (true, false) => Some(DiffBasesChange::SetIndex(index_text)),
1390 (false, true) => Some(DiffBasesChange::SetHead(head_text)),
1391 (false, false) => None,
1392 };
1393
1394 diff_bases_changes_by_buffer.push((
1395 buffer,
1396 diff_bases_change,
1397 *hunk_staging_operation_count,
1398 ))
1399 }
1400
1401 git_store
1402 .update(&mut cx, |git_store, cx| {
1403 for (buffer, diff_bases_change, hunk_staging_operation_count) in
1404 diff_bases_changes_by_buffer
1405 {
1406 let Some(diff_state) =
1407 git_store.diffs.get(&buffer.read(cx).remote_id())
1408 else {
1409 continue;
1410 };
1411 let Some(diff_bases_change) = diff_bases_change else {
1412 continue;
1413 };
1414
1415 let downstream_client = git_store.downstream_client();
1416 diff_state.update(cx, |diff_state, cx| {
1417 use proto::update_diff_bases::Mode;
1418
1419 let buffer = buffer.read(cx);
1420 if let Some((client, project_id)) = downstream_client {
1421 let (staged_text, committed_text, mode) =
1422 match diff_bases_change.clone() {
1423 DiffBasesChange::SetIndex(index) => {
1424 (index, None, Mode::IndexOnly)
1425 }
1426 DiffBasesChange::SetHead(head) => {
1427 (None, head, Mode::HeadOnly)
1428 }
1429 DiffBasesChange::SetEach { index, head } => {
1430 (index, head, Mode::IndexAndHead)
1431 }
1432 DiffBasesChange::SetBoth(text) => {
1433 (None, text, Mode::IndexMatchesHead)
1434 }
1435 };
1436 let message = proto::UpdateDiffBases {
1437 project_id: project_id.to_proto(),
1438 buffer_id: buffer.remote_id().to_proto(),
1439 staged_text,
1440 committed_text,
1441 mode: mode as i32,
1442 };
1443
1444 client.send(message).log_err();
1445 }
1446
1447 let _ = diff_state.diff_bases_changed(
1448 buffer.text_snapshot(),
1449 diff_bases_change,
1450 hunk_staging_operation_count,
1451 cx,
1452 );
1453 });
1454 }
1455 })
1456 .ok();
1457 },
1458 )
1459 });
1460 }
1461 }
1462
1463 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1464 &self.repositories
1465 }
1466
1467 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1468 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1469 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1470 Some(status.status)
1471 }
1472
1473 pub fn repository_and_path_for_buffer_id(
1474 &self,
1475 buffer_id: BufferId,
1476 cx: &App,
1477 ) -> Option<(Entity<Repository>, RepoPath)> {
1478 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1479 let project_path = buffer.read(cx).project_path(cx)?;
1480 self.repository_and_path_for_project_path(&project_path, cx)
1481 }
1482
1483 pub fn repository_and_path_for_project_path(
1484 &self,
1485 path: &ProjectPath,
1486 cx: &App,
1487 ) -> Option<(Entity<Repository>, RepoPath)> {
1488 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1489 self.repositories
1490 .values()
1491 .filter_map(|repo| {
1492 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1493 Some((repo.clone(), repo_path))
1494 })
1495 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1496 }
1497
1498 pub fn git_init(
1499 &self,
1500 path: Arc<Path>,
1501 fallback_branch_name: String,
1502 cx: &App,
1503 ) -> Task<Result<()>> {
1504 match &self.state {
1505 GitStoreState::Local { fs, .. } => {
1506 let fs = fs.clone();
1507 cx.background_executor()
1508 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1509 }
1510 GitStoreState::Ssh {
1511 upstream_client,
1512 upstream_project_id: project_id,
1513 ..
1514 }
1515 | GitStoreState::Remote {
1516 upstream_client,
1517 upstream_project_id: project_id,
1518 ..
1519 } => {
1520 let client = upstream_client.clone();
1521 let project_id = *project_id;
1522 cx.background_executor().spawn(async move {
1523 client
1524 .request(proto::GitInit {
1525 project_id: project_id.0,
1526 abs_path: path.to_string_lossy().to_string(),
1527 fallback_branch_name,
1528 })
1529 .await?;
1530 Ok(())
1531 })
1532 }
1533 }
1534 }
1535
1536 async fn handle_update_repository(
1537 this: Entity<Self>,
1538 envelope: TypedEnvelope<proto::UpdateRepository>,
1539 mut cx: AsyncApp,
1540 ) -> Result<()> {
1541 this.update(&mut cx, |this, cx| {
1542 let mut update = envelope.payload;
1543
1544 let id = RepositoryId::from_proto(update.id);
1545 let client = this
1546 .upstream_client()
1547 .context("no upstream client")?
1548 .clone();
1549
1550 let mut is_new = false;
1551 let repo = this.repositories.entry(id).or_insert_with(|| {
1552 is_new = true;
1553 let git_store = cx.weak_entity();
1554 cx.new(|cx| {
1555 Repository::remote(
1556 id,
1557 Path::new(&update.abs_path).into(),
1558 ProjectId(update.project_id),
1559 client,
1560 git_store,
1561 cx,
1562 )
1563 })
1564 });
1565 if is_new {
1566 this._subscriptions
1567 .push(cx.subscribe(&repo, Self::on_repository_event))
1568 }
1569
1570 repo.update(cx, {
1571 let update = update.clone();
1572 |repo, cx| repo.apply_remote_update(update, cx)
1573 })?;
1574
1575 this.active_repo_id.get_or_insert_with(|| {
1576 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1577 id
1578 });
1579
1580 if let Some((client, project_id)) = this.downstream_client() {
1581 update.project_id = project_id.to_proto();
1582 client.send(update).log_err();
1583 }
1584 Ok(())
1585 })?
1586 }
1587
1588 async fn handle_remove_repository(
1589 this: Entity<Self>,
1590 envelope: TypedEnvelope<proto::RemoveRepository>,
1591 mut cx: AsyncApp,
1592 ) -> Result<()> {
1593 this.update(&mut cx, |this, cx| {
1594 let mut update = envelope.payload;
1595 let id = RepositoryId::from_proto(update.id);
1596 this.repositories.remove(&id);
1597 if let Some((client, project_id)) = this.downstream_client() {
1598 update.project_id = project_id.to_proto();
1599 client.send(update).log_err();
1600 }
1601 if this.active_repo_id == Some(id) {
1602 this.active_repo_id = None;
1603 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1604 }
1605 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1606 })
1607 }
1608
1609 async fn handle_git_init(
1610 this: Entity<Self>,
1611 envelope: TypedEnvelope<proto::GitInit>,
1612 cx: AsyncApp,
1613 ) -> Result<proto::Ack> {
1614 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1615 let name = envelope.payload.fallback_branch_name;
1616 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1617 .await?;
1618
1619 Ok(proto::Ack {})
1620 }
1621
1622 async fn handle_fetch(
1623 this: Entity<Self>,
1624 envelope: TypedEnvelope<proto::Fetch>,
1625 mut cx: AsyncApp,
1626 ) -> Result<proto::RemoteMessageResponse> {
1627 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1628 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1629 let askpass_id = envelope.payload.askpass_id;
1630
1631 let askpass = make_remote_delegate(
1632 this,
1633 envelope.payload.project_id,
1634 repository_id,
1635 askpass_id,
1636 &mut cx,
1637 );
1638
1639 let remote_output = repository_handle
1640 .update(&mut cx, |repository_handle, cx| {
1641 repository_handle.fetch(askpass, cx)
1642 })?
1643 .await??;
1644
1645 Ok(proto::RemoteMessageResponse {
1646 stdout: remote_output.stdout,
1647 stderr: remote_output.stderr,
1648 })
1649 }
1650
1651 async fn handle_push(
1652 this: Entity<Self>,
1653 envelope: TypedEnvelope<proto::Push>,
1654 mut cx: AsyncApp,
1655 ) -> Result<proto::RemoteMessageResponse> {
1656 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1657 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1658
1659 let askpass_id = envelope.payload.askpass_id;
1660 let askpass = make_remote_delegate(
1661 this,
1662 envelope.payload.project_id,
1663 repository_id,
1664 askpass_id,
1665 &mut cx,
1666 );
1667
1668 let options = envelope
1669 .payload
1670 .options
1671 .as_ref()
1672 .map(|_| match envelope.payload.options() {
1673 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1674 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1675 });
1676
1677 let branch_name = envelope.payload.branch_name.into();
1678 let remote_name = envelope.payload.remote_name.into();
1679
1680 let remote_output = repository_handle
1681 .update(&mut cx, |repository_handle, cx| {
1682 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1683 })?
1684 .await??;
1685 Ok(proto::RemoteMessageResponse {
1686 stdout: remote_output.stdout,
1687 stderr: remote_output.stderr,
1688 })
1689 }
1690
1691 async fn handle_pull(
1692 this: Entity<Self>,
1693 envelope: TypedEnvelope<proto::Pull>,
1694 mut cx: AsyncApp,
1695 ) -> Result<proto::RemoteMessageResponse> {
1696 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1697 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1698 let askpass_id = envelope.payload.askpass_id;
1699 let askpass = make_remote_delegate(
1700 this,
1701 envelope.payload.project_id,
1702 repository_id,
1703 askpass_id,
1704 &mut cx,
1705 );
1706
1707 let branch_name = envelope.payload.branch_name.into();
1708 let remote_name = envelope.payload.remote_name.into();
1709
1710 let remote_message = repository_handle
1711 .update(&mut cx, |repository_handle, cx| {
1712 repository_handle.pull(branch_name, remote_name, askpass, cx)
1713 })?
1714 .await??;
1715
1716 Ok(proto::RemoteMessageResponse {
1717 stdout: remote_message.stdout,
1718 stderr: remote_message.stderr,
1719 })
1720 }
1721
1722 async fn handle_stage(
1723 this: Entity<Self>,
1724 envelope: TypedEnvelope<proto::Stage>,
1725 mut cx: AsyncApp,
1726 ) -> Result<proto::Ack> {
1727 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1728 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1729
1730 let entries = envelope
1731 .payload
1732 .paths
1733 .into_iter()
1734 .map(PathBuf::from)
1735 .map(RepoPath::new)
1736 .collect();
1737
1738 repository_handle
1739 .update(&mut cx, |repository_handle, cx| {
1740 repository_handle.stage_entries(entries, cx)
1741 })?
1742 .await?;
1743 Ok(proto::Ack {})
1744 }
1745
1746 async fn handle_unstage(
1747 this: Entity<Self>,
1748 envelope: TypedEnvelope<proto::Unstage>,
1749 mut cx: AsyncApp,
1750 ) -> Result<proto::Ack> {
1751 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1752 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1753
1754 let entries = envelope
1755 .payload
1756 .paths
1757 .into_iter()
1758 .map(PathBuf::from)
1759 .map(RepoPath::new)
1760 .collect();
1761
1762 repository_handle
1763 .update(&mut cx, |repository_handle, cx| {
1764 repository_handle.unstage_entries(entries, cx)
1765 })?
1766 .await?;
1767
1768 Ok(proto::Ack {})
1769 }
1770
1771 async fn handle_set_index_text(
1772 this: Entity<Self>,
1773 envelope: TypedEnvelope<proto::SetIndexText>,
1774 mut cx: AsyncApp,
1775 ) -> Result<proto::Ack> {
1776 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1777 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1778
1779 repository_handle
1780 .update(&mut cx, |repository_handle, cx| {
1781 repository_handle.spawn_set_index_text_job(
1782 RepoPath::from_str(&envelope.payload.path),
1783 envelope.payload.text,
1784 cx,
1785 )
1786 })?
1787 .await??;
1788 Ok(proto::Ack {})
1789 }
1790
1791 async fn handle_commit(
1792 this: Entity<Self>,
1793 envelope: TypedEnvelope<proto::Commit>,
1794 mut cx: AsyncApp,
1795 ) -> Result<proto::Ack> {
1796 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1797 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1798
1799 let message = SharedString::from(envelope.payload.message);
1800 let name = envelope.payload.name.map(SharedString::from);
1801 let email = envelope.payload.email.map(SharedString::from);
1802
1803 repository_handle
1804 .update(&mut cx, |repository_handle, cx| {
1805 repository_handle.commit(message, name.zip(email), cx)
1806 })?
1807 .await??;
1808 Ok(proto::Ack {})
1809 }
1810
1811 async fn handle_get_remotes(
1812 this: Entity<Self>,
1813 envelope: TypedEnvelope<proto::GetRemotes>,
1814 mut cx: AsyncApp,
1815 ) -> Result<proto::GetRemotesResponse> {
1816 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1817 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1818
1819 let branch_name = envelope.payload.branch_name;
1820
1821 let remotes = repository_handle
1822 .update(&mut cx, |repository_handle, _| {
1823 repository_handle.get_remotes(branch_name)
1824 })?
1825 .await??;
1826
1827 Ok(proto::GetRemotesResponse {
1828 remotes: remotes
1829 .into_iter()
1830 .map(|remotes| proto::get_remotes_response::Remote {
1831 name: remotes.name.to_string(),
1832 })
1833 .collect::<Vec<_>>(),
1834 })
1835 }
1836
1837 async fn handle_get_branches(
1838 this: Entity<Self>,
1839 envelope: TypedEnvelope<proto::GitGetBranches>,
1840 mut cx: AsyncApp,
1841 ) -> Result<proto::GitBranchesResponse> {
1842 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1843 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1844
1845 let branches = repository_handle
1846 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1847 .await??;
1848
1849 Ok(proto::GitBranchesResponse {
1850 branches: branches
1851 .into_iter()
1852 .map(|branch| branch_to_proto(&branch))
1853 .collect::<Vec<_>>(),
1854 })
1855 }
1856 async fn handle_create_branch(
1857 this: Entity<Self>,
1858 envelope: TypedEnvelope<proto::GitCreateBranch>,
1859 mut cx: AsyncApp,
1860 ) -> Result<proto::Ack> {
1861 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1862 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1863 let branch_name = envelope.payload.branch_name;
1864
1865 repository_handle
1866 .update(&mut cx, |repository_handle, _| {
1867 repository_handle.create_branch(branch_name)
1868 })?
1869 .await??;
1870
1871 Ok(proto::Ack {})
1872 }
1873
1874 async fn handle_change_branch(
1875 this: Entity<Self>,
1876 envelope: TypedEnvelope<proto::GitChangeBranch>,
1877 mut cx: AsyncApp,
1878 ) -> Result<proto::Ack> {
1879 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1880 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1881 let branch_name = envelope.payload.branch_name;
1882
1883 repository_handle
1884 .update(&mut cx, |repository_handle, _| {
1885 repository_handle.change_branch(branch_name)
1886 })?
1887 .await??;
1888
1889 Ok(proto::Ack {})
1890 }
1891
1892 async fn handle_show(
1893 this: Entity<Self>,
1894 envelope: TypedEnvelope<proto::GitShow>,
1895 mut cx: AsyncApp,
1896 ) -> Result<proto::GitCommitDetails> {
1897 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1898 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1899
1900 let commit = repository_handle
1901 .update(&mut cx, |repository_handle, _| {
1902 repository_handle.show(envelope.payload.commit)
1903 })?
1904 .await??;
1905 Ok(proto::GitCommitDetails {
1906 sha: commit.sha.into(),
1907 message: commit.message.into(),
1908 commit_timestamp: commit.commit_timestamp,
1909 author_email: commit.author_email.into(),
1910 author_name: commit.author_name.into(),
1911 })
1912 }
1913
1914 async fn handle_load_commit_diff(
1915 this: Entity<Self>,
1916 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1917 mut cx: AsyncApp,
1918 ) -> Result<proto::LoadCommitDiffResponse> {
1919 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1920 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1921
1922 let commit_diff = repository_handle
1923 .update(&mut cx, |repository_handle, _| {
1924 repository_handle.load_commit_diff(envelope.payload.commit)
1925 })?
1926 .await??;
1927 Ok(proto::LoadCommitDiffResponse {
1928 files: commit_diff
1929 .files
1930 .into_iter()
1931 .map(|file| proto::CommitFile {
1932 path: file.path.to_string(),
1933 old_text: file.old_text,
1934 new_text: file.new_text,
1935 })
1936 .collect(),
1937 })
1938 }
1939
1940 async fn handle_reset(
1941 this: Entity<Self>,
1942 envelope: TypedEnvelope<proto::GitReset>,
1943 mut cx: AsyncApp,
1944 ) -> Result<proto::Ack> {
1945 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1946 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1947
1948 let mode = match envelope.payload.mode() {
1949 git_reset::ResetMode::Soft => ResetMode::Soft,
1950 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1951 };
1952
1953 repository_handle
1954 .update(&mut cx, |repository_handle, cx| {
1955 repository_handle.reset(envelope.payload.commit, mode, cx)
1956 })?
1957 .await??;
1958 Ok(proto::Ack {})
1959 }
1960
1961 async fn handle_checkout_files(
1962 this: Entity<Self>,
1963 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1964 mut cx: AsyncApp,
1965 ) -> Result<proto::Ack> {
1966 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1967 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1968 let paths = envelope
1969 .payload
1970 .paths
1971 .iter()
1972 .map(|s| RepoPath::from_str(s))
1973 .collect();
1974
1975 repository_handle
1976 .update(&mut cx, |repository_handle, cx| {
1977 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1978 })?
1979 .await??;
1980 Ok(proto::Ack {})
1981 }
1982
1983 async fn handle_open_commit_message_buffer(
1984 this: Entity<Self>,
1985 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1986 mut cx: AsyncApp,
1987 ) -> Result<proto::OpenBufferResponse> {
1988 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1989 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1990 let buffer = repository
1991 .update(&mut cx, |repository, cx| {
1992 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1993 })?
1994 .await?;
1995
1996 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1997 this.update(&mut cx, |this, cx| {
1998 this.buffer_store.update(cx, |buffer_store, cx| {
1999 buffer_store
2000 .create_buffer_for_peer(
2001 &buffer,
2002 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2003 cx,
2004 )
2005 .detach_and_log_err(cx);
2006 })
2007 })?;
2008
2009 Ok(proto::OpenBufferResponse {
2010 buffer_id: buffer_id.to_proto(),
2011 })
2012 }
2013
2014 async fn handle_askpass(
2015 this: Entity<Self>,
2016 envelope: TypedEnvelope<proto::AskPassRequest>,
2017 mut cx: AsyncApp,
2018 ) -> Result<proto::AskPassResponse> {
2019 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2020 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2021
2022 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2023 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2024 debug_panic!("no askpass found");
2025 return Err(anyhow::anyhow!("no askpass found"));
2026 };
2027
2028 let response = askpass.ask_password(envelope.payload.prompt).await?;
2029
2030 delegates
2031 .lock()
2032 .insert(envelope.payload.askpass_id, askpass);
2033
2034 Ok(proto::AskPassResponse { response })
2035 }
2036
2037 async fn handle_check_for_pushed_commits(
2038 this: Entity<Self>,
2039 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2040 mut cx: AsyncApp,
2041 ) -> Result<proto::CheckForPushedCommitsResponse> {
2042 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2043 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2044
2045 let branches = repository_handle
2046 .update(&mut cx, |repository_handle, _| {
2047 repository_handle.check_for_pushed_commits()
2048 })?
2049 .await??;
2050 Ok(proto::CheckForPushedCommitsResponse {
2051 pushed_to: branches
2052 .into_iter()
2053 .map(|commit| commit.to_string())
2054 .collect(),
2055 })
2056 }
2057
2058 async fn handle_git_diff(
2059 this: Entity<Self>,
2060 envelope: TypedEnvelope<proto::GitDiff>,
2061 mut cx: AsyncApp,
2062 ) -> Result<proto::GitDiffResponse> {
2063 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2064 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2065 let diff_type = match envelope.payload.diff_type() {
2066 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2067 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2068 };
2069
2070 let mut diff = repository_handle
2071 .update(&mut cx, |repository_handle, cx| {
2072 repository_handle.diff(diff_type, cx)
2073 })?
2074 .await??;
2075 const ONE_MB: usize = 1_000_000;
2076 if diff.len() > ONE_MB {
2077 diff = diff.chars().take(ONE_MB).collect()
2078 }
2079
2080 Ok(proto::GitDiffResponse { diff })
2081 }
2082
2083 async fn handle_open_unstaged_diff(
2084 this: Entity<Self>,
2085 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2086 mut cx: AsyncApp,
2087 ) -> Result<proto::OpenUnstagedDiffResponse> {
2088 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2089 let diff = this
2090 .update(&mut cx, |this, cx| {
2091 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2092 Some(this.open_unstaged_diff(buffer, cx))
2093 })?
2094 .ok_or_else(|| anyhow!("no such buffer"))?
2095 .await?;
2096 this.update(&mut cx, |this, _| {
2097 let shared_diffs = this
2098 .shared_diffs
2099 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2100 .or_default();
2101 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2102 })?;
2103 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2104 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2105 }
2106
2107 async fn handle_open_uncommitted_diff(
2108 this: Entity<Self>,
2109 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2110 mut cx: AsyncApp,
2111 ) -> Result<proto::OpenUncommittedDiffResponse> {
2112 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2113 let diff = this
2114 .update(&mut cx, |this, cx| {
2115 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2116 Some(this.open_uncommitted_diff(buffer, cx))
2117 })?
2118 .ok_or_else(|| anyhow!("no such buffer"))?
2119 .await?;
2120 this.update(&mut cx, |this, _| {
2121 let shared_diffs = this
2122 .shared_diffs
2123 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2124 .or_default();
2125 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2126 })?;
2127 diff.read_with(&cx, |diff, cx| {
2128 use proto::open_uncommitted_diff_response::Mode;
2129
2130 let unstaged_diff = diff.secondary_diff();
2131 let index_snapshot = unstaged_diff.and_then(|diff| {
2132 let diff = diff.read(cx);
2133 diff.base_text_exists().then(|| diff.base_text())
2134 });
2135
2136 let mode;
2137 let staged_text;
2138 let committed_text;
2139 if diff.base_text_exists() {
2140 let committed_snapshot = diff.base_text();
2141 committed_text = Some(committed_snapshot.text());
2142 if let Some(index_text) = index_snapshot {
2143 if index_text.remote_id() == committed_snapshot.remote_id() {
2144 mode = Mode::IndexMatchesHead;
2145 staged_text = None;
2146 } else {
2147 mode = Mode::IndexAndHead;
2148 staged_text = Some(index_text.text());
2149 }
2150 } else {
2151 mode = Mode::IndexAndHead;
2152 staged_text = None;
2153 }
2154 } else {
2155 mode = Mode::IndexAndHead;
2156 committed_text = None;
2157 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2158 }
2159
2160 proto::OpenUncommittedDiffResponse {
2161 committed_text,
2162 staged_text,
2163 mode: mode.into(),
2164 }
2165 })
2166 }
2167
2168 async fn handle_update_diff_bases(
2169 this: Entity<Self>,
2170 request: TypedEnvelope<proto::UpdateDiffBases>,
2171 mut cx: AsyncApp,
2172 ) -> Result<()> {
2173 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2174 this.update(&mut cx, |this, cx| {
2175 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2176 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2177 let buffer = buffer.read(cx).text_snapshot();
2178 diff_state.update(cx, |diff_state, cx| {
2179 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2180 })
2181 }
2182 }
2183 })
2184 }
2185
2186 async fn handle_blame_buffer(
2187 this: Entity<Self>,
2188 envelope: TypedEnvelope<proto::BlameBuffer>,
2189 mut cx: AsyncApp,
2190 ) -> Result<proto::BlameBufferResponse> {
2191 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2192 let version = deserialize_version(&envelope.payload.version);
2193 let buffer = this.read_with(&cx, |this, cx| {
2194 this.buffer_store.read(cx).get_existing(buffer_id)
2195 })??;
2196 buffer
2197 .update(&mut cx, |buffer, _| {
2198 buffer.wait_for_version(version.clone())
2199 })?
2200 .await?;
2201 let blame = this
2202 .update(&mut cx, |this, cx| {
2203 this.blame_buffer(&buffer, Some(version), cx)
2204 })?
2205 .await?;
2206 Ok(serialize_blame_buffer_response(blame))
2207 }
2208
2209 async fn handle_get_permalink_to_line(
2210 this: Entity<Self>,
2211 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2212 mut cx: AsyncApp,
2213 ) -> Result<proto::GetPermalinkToLineResponse> {
2214 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2215 // let version = deserialize_version(&envelope.payload.version);
2216 let selection = {
2217 let proto_selection = envelope
2218 .payload
2219 .selection
2220 .context("no selection to get permalink for defined")?;
2221 proto_selection.start as u32..proto_selection.end as u32
2222 };
2223 let buffer = this.read_with(&cx, |this, cx| {
2224 this.buffer_store.read(cx).get_existing(buffer_id)
2225 })??;
2226 let permalink = this
2227 .update(&mut cx, |this, cx| {
2228 this.get_permalink_to_line(&buffer, selection, cx)
2229 })?
2230 .await?;
2231 Ok(proto::GetPermalinkToLineResponse {
2232 permalink: permalink.to_string(),
2233 })
2234 }
2235
2236 fn repository_for_request(
2237 this: &Entity<Self>,
2238 id: RepositoryId,
2239 cx: &mut AsyncApp,
2240 ) -> Result<Entity<Repository>> {
2241 this.update(cx, |this, _| {
2242 this.repositories
2243 .get(&id)
2244 .context("missing repository handle")
2245 .cloned()
2246 })?
2247 }
2248
2249 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2250 self.repositories
2251 .iter()
2252 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2253 .collect()
2254 }
2255}
2256
2257impl BufferDiffState {
2258 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2259 self.language = buffer.read(cx).language().cloned();
2260 self.language_changed = true;
2261 let _ = self.recalculate_diffs(
2262 buffer.read(cx).text_snapshot(),
2263 self.hunk_staging_operation_count,
2264 cx,
2265 );
2266 }
2267
2268 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2269 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2270 }
2271
2272 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2273 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2274 }
2275
2276 fn handle_base_texts_updated(
2277 &mut self,
2278 buffer: text::BufferSnapshot,
2279 message: proto::UpdateDiffBases,
2280 cx: &mut Context<Self>,
2281 ) {
2282 use proto::update_diff_bases::Mode;
2283
2284 let Some(mode) = Mode::from_i32(message.mode) else {
2285 return;
2286 };
2287
2288 let diff_bases_change = match mode {
2289 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2290 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2291 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2292 Mode::IndexAndHead => DiffBasesChange::SetEach {
2293 index: message.staged_text,
2294 head: message.committed_text,
2295 },
2296 };
2297
2298 let _ = self.diff_bases_changed(
2299 buffer,
2300 diff_bases_change,
2301 self.hunk_staging_operation_count,
2302 cx,
2303 );
2304 }
2305
2306 pub fn wait_for_recalculation(&mut self) -> Option<oneshot::Receiver<()>> {
2307 if self.diff_updated_futures.is_empty() {
2308 return None;
2309 }
2310 let (tx, rx) = oneshot::channel();
2311 self.diff_updated_futures.push(tx);
2312 Some(rx)
2313 }
2314
2315 fn diff_bases_changed(
2316 &mut self,
2317 buffer: text::BufferSnapshot,
2318 diff_bases_change: DiffBasesChange,
2319 prev_hunk_staging_operation_count: usize,
2320 cx: &mut Context<Self>,
2321 ) -> oneshot::Receiver<()> {
2322 match diff_bases_change {
2323 DiffBasesChange::SetIndex(index) => {
2324 self.index_text = index.map(|mut index| {
2325 text::LineEnding::normalize(&mut index);
2326 Arc::new(index)
2327 });
2328 self.index_changed = true;
2329 }
2330 DiffBasesChange::SetHead(head) => {
2331 self.head_text = head.map(|mut head| {
2332 text::LineEnding::normalize(&mut head);
2333 Arc::new(head)
2334 });
2335 self.head_changed = true;
2336 }
2337 DiffBasesChange::SetBoth(text) => {
2338 let text = text.map(|mut text| {
2339 text::LineEnding::normalize(&mut text);
2340 Arc::new(text)
2341 });
2342 self.head_text = text.clone();
2343 self.index_text = text;
2344 self.head_changed = true;
2345 self.index_changed = true;
2346 }
2347 DiffBasesChange::SetEach { index, head } => {
2348 self.index_text = index.map(|mut index| {
2349 text::LineEnding::normalize(&mut index);
2350 Arc::new(index)
2351 });
2352 self.index_changed = true;
2353 self.head_text = head.map(|mut head| {
2354 text::LineEnding::normalize(&mut head);
2355 Arc::new(head)
2356 });
2357 self.head_changed = true;
2358 }
2359 }
2360
2361 self.recalculate_diffs(buffer, prev_hunk_staging_operation_count, cx)
2362 }
2363
2364 fn recalculate_diffs(
2365 &mut self,
2366 buffer: text::BufferSnapshot,
2367 prev_hunk_staging_operation_count: usize,
2368 cx: &mut Context<Self>,
2369 ) -> oneshot::Receiver<()> {
2370 log::debug!("recalculate diffs");
2371 let (tx, rx) = oneshot::channel();
2372 self.diff_updated_futures.push(tx);
2373
2374 let language = self.language.clone();
2375 let language_registry = self.language_registry.clone();
2376 let unstaged_diff = self.unstaged_diff();
2377 let uncommitted_diff = self.uncommitted_diff();
2378 let head = self.head_text.clone();
2379 let index = self.index_text.clone();
2380 let index_changed = self.index_changed;
2381 let head_changed = self.head_changed;
2382 let language_changed = self.language_changed;
2383 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2384 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2385 (None, None) => true,
2386 _ => false,
2387 };
2388 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2389 let mut new_unstaged_diff = None;
2390 if let Some(unstaged_diff) = &unstaged_diff {
2391 new_unstaged_diff = Some(
2392 BufferDiff::update_diff(
2393 unstaged_diff.clone(),
2394 buffer.clone(),
2395 index,
2396 index_changed,
2397 language_changed,
2398 language.clone(),
2399 language_registry.clone(),
2400 cx,
2401 )
2402 .await?,
2403 );
2404 }
2405
2406 let mut new_uncommitted_diff = None;
2407 if let Some(uncommitted_diff) = &uncommitted_diff {
2408 new_uncommitted_diff = if index_matches_head {
2409 new_unstaged_diff.clone()
2410 } else {
2411 Some(
2412 BufferDiff::update_diff(
2413 uncommitted_diff.clone(),
2414 buffer.clone(),
2415 head,
2416 head_changed,
2417 language_changed,
2418 language.clone(),
2419 language_registry.clone(),
2420 cx,
2421 )
2422 .await?,
2423 )
2424 }
2425 }
2426
2427 if this.update(cx, |this, _| {
2428 this.hunk_staging_operation_count > prev_hunk_staging_operation_count
2429 })? {
2430 eprintln!("early return");
2431 return Ok(());
2432 }
2433
2434 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2435 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2436 {
2437 unstaged_diff.update(cx, |diff, cx| {
2438 if language_changed {
2439 diff.language_changed(cx);
2440 }
2441 diff.set_snapshot(new_unstaged_diff, &buffer, None, cx)
2442 })?
2443 } else {
2444 None
2445 };
2446
2447 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2448 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2449 {
2450 uncommitted_diff.update(cx, |diff, cx| {
2451 if language_changed {
2452 diff.language_changed(cx);
2453 }
2454 diff.set_snapshot(new_uncommitted_diff, &buffer, unstaged_changed_range, cx);
2455 })?;
2456 }
2457
2458 if let Some(this) = this.upgrade() {
2459 this.update(cx, |this, _| {
2460 this.index_changed = false;
2461 this.head_changed = false;
2462 this.language_changed = false;
2463 for tx in this.diff_updated_futures.drain(..) {
2464 tx.send(()).ok();
2465 }
2466 })?;
2467 }
2468
2469 Ok(())
2470 }));
2471
2472 rx
2473 }
2474}
2475
2476fn make_remote_delegate(
2477 this: Entity<GitStore>,
2478 project_id: u64,
2479 repository_id: RepositoryId,
2480 askpass_id: u64,
2481 cx: &mut AsyncApp,
2482) -> AskPassDelegate {
2483 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2484 this.update(cx, |this, cx| {
2485 let Some((client, _)) = this.downstream_client() else {
2486 return;
2487 };
2488 let response = client.request(proto::AskPassRequest {
2489 project_id,
2490 repository_id: repository_id.to_proto(),
2491 askpass_id,
2492 prompt,
2493 });
2494 cx.spawn(async move |_, _| {
2495 tx.send(response.await?.response).ok();
2496 anyhow::Ok(())
2497 })
2498 .detach_and_log_err(cx);
2499 })
2500 .log_err();
2501 })
2502}
2503
2504impl RepositoryId {
2505 pub fn to_proto(self) -> u64 {
2506 self.0
2507 }
2508
2509 pub fn from_proto(id: u64) -> Self {
2510 RepositoryId(id)
2511 }
2512}
2513
2514impl RepositorySnapshot {
2515 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2516 Self {
2517 id,
2518 merge_message: None,
2519 statuses_by_path: Default::default(),
2520 work_directory_abs_path,
2521 branch: None,
2522 merge_conflicts: Default::default(),
2523 merge_head_shas: Default::default(),
2524 scan_id: 0,
2525 }
2526 }
2527
2528 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2529 proto::UpdateRepository {
2530 branch_summary: self.branch.as_ref().map(branch_to_proto),
2531 updated_statuses: self
2532 .statuses_by_path
2533 .iter()
2534 .map(|entry| entry.to_proto())
2535 .collect(),
2536 removed_statuses: Default::default(),
2537 current_merge_conflicts: self
2538 .merge_conflicts
2539 .iter()
2540 .map(|repo_path| repo_path.to_proto())
2541 .collect(),
2542 project_id,
2543 id: self.id.to_proto(),
2544 abs_path: self.work_directory_abs_path.to_proto(),
2545 entry_ids: vec![self.id.to_proto()],
2546 scan_id: self.scan_id,
2547 is_last_update: true,
2548 }
2549 }
2550
2551 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2552 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2553 let mut removed_statuses: Vec<String> = Vec::new();
2554
2555 let mut new_statuses = self.statuses_by_path.iter().peekable();
2556 let mut old_statuses = old.statuses_by_path.iter().peekable();
2557
2558 let mut current_new_entry = new_statuses.next();
2559 let mut current_old_entry = old_statuses.next();
2560 loop {
2561 match (current_new_entry, current_old_entry) {
2562 (Some(new_entry), Some(old_entry)) => {
2563 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2564 Ordering::Less => {
2565 updated_statuses.push(new_entry.to_proto());
2566 current_new_entry = new_statuses.next();
2567 }
2568 Ordering::Equal => {
2569 if new_entry.status != old_entry.status {
2570 updated_statuses.push(new_entry.to_proto());
2571 }
2572 current_old_entry = old_statuses.next();
2573 current_new_entry = new_statuses.next();
2574 }
2575 Ordering::Greater => {
2576 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2577 current_old_entry = old_statuses.next();
2578 }
2579 }
2580 }
2581 (None, Some(old_entry)) => {
2582 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2583 current_old_entry = old_statuses.next();
2584 }
2585 (Some(new_entry), None) => {
2586 updated_statuses.push(new_entry.to_proto());
2587 current_new_entry = new_statuses.next();
2588 }
2589 (None, None) => break,
2590 }
2591 }
2592
2593 proto::UpdateRepository {
2594 branch_summary: self.branch.as_ref().map(branch_to_proto),
2595 updated_statuses,
2596 removed_statuses,
2597 current_merge_conflicts: self
2598 .merge_conflicts
2599 .iter()
2600 .map(|path| path.as_ref().to_proto())
2601 .collect(),
2602 project_id,
2603 id: self.id.to_proto(),
2604 abs_path: self.work_directory_abs_path.to_proto(),
2605 entry_ids: vec![],
2606 scan_id: self.scan_id,
2607 is_last_update: true,
2608 }
2609 }
2610
2611 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2612 self.statuses_by_path.iter().cloned()
2613 }
2614
2615 pub fn status_summary(&self) -> GitSummary {
2616 self.statuses_by_path.summary().item_summary
2617 }
2618
2619 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2620 self.statuses_by_path
2621 .get(&PathKey(path.0.clone()), &())
2622 .cloned()
2623 }
2624
2625 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2626 abs_path
2627 .strip_prefix(&self.work_directory_abs_path)
2628 .map(RepoPath::from)
2629 .ok()
2630 }
2631
2632 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2633 self.statuses_by_path
2634 .get(&PathKey(repo_path.0.clone()), &())
2635 .map_or(false, |entry| entry.status.is_conflicted())
2636 }
2637
2638 /// This is the name that will be displayed in the repository selector for this repository.
2639 pub fn display_name(&self) -> SharedString {
2640 self.work_directory_abs_path
2641 .file_name()
2642 .unwrap_or_default()
2643 .to_string_lossy()
2644 .to_string()
2645 .into()
2646 }
2647}
2648
2649impl Repository {
2650 fn local(
2651 id: RepositoryId,
2652 work_directory_abs_path: Arc<Path>,
2653 dot_git_abs_path: Arc<Path>,
2654 project_environment: WeakEntity<ProjectEnvironment>,
2655 fs: Arc<dyn Fs>,
2656 git_store: WeakEntity<GitStore>,
2657 cx: &mut Context<Self>,
2658 ) -> Self {
2659 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2660 Repository {
2661 this: cx.weak_entity(),
2662 git_store,
2663 snapshot,
2664 commit_message_buffer: None,
2665 askpass_delegates: Default::default(),
2666 paths_needing_status_update: Default::default(),
2667 latest_askpass_id: 0,
2668 job_sender: Repository::spawn_local_git_worker(
2669 work_directory_abs_path,
2670 dot_git_abs_path,
2671 project_environment,
2672 fs,
2673 cx,
2674 ),
2675 job_id: 0,
2676 active_jobs: Default::default(),
2677 }
2678 }
2679
2680 fn remote(
2681 id: RepositoryId,
2682 work_directory_abs_path: Arc<Path>,
2683 project_id: ProjectId,
2684 client: AnyProtoClient,
2685 git_store: WeakEntity<GitStore>,
2686 cx: &mut Context<Self>,
2687 ) -> Self {
2688 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2689 Self {
2690 this: cx.weak_entity(),
2691 snapshot,
2692 commit_message_buffer: None,
2693 git_store,
2694 paths_needing_status_update: Default::default(),
2695 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2696 askpass_delegates: Default::default(),
2697 latest_askpass_id: 0,
2698 active_jobs: Default::default(),
2699 job_id: 0,
2700 }
2701 }
2702
2703 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2704 self.git_store.upgrade()
2705 }
2706
2707 pub fn send_job<F, Fut, R>(
2708 &mut self,
2709 status: Option<SharedString>,
2710 job: F,
2711 ) -> oneshot::Receiver<R>
2712 where
2713 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2714 Fut: Future<Output = R> + 'static,
2715 R: Send + 'static,
2716 {
2717 self.send_keyed_job(None, status, job)
2718 }
2719
2720 fn send_keyed_job<F, Fut, R>(
2721 &mut self,
2722 key: Option<GitJobKey>,
2723 status: Option<SharedString>,
2724 job: F,
2725 ) -> oneshot::Receiver<R>
2726 where
2727 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2728 Fut: Future<Output = R> + 'static,
2729 R: Send + 'static,
2730 {
2731 let (result_tx, result_rx) = futures::channel::oneshot::channel();
2732 let job_id = post_inc(&mut self.job_id);
2733 let this = self.this.clone();
2734 self.job_sender
2735 .unbounded_send(GitJob {
2736 key,
2737 job: Box::new(move |state, cx: &mut AsyncApp| {
2738 let job = job(state, cx.clone());
2739 cx.spawn(async move |cx| {
2740 if let Some(s) = status.clone() {
2741 this.update(cx, |this, cx| {
2742 this.active_jobs.insert(
2743 job_id,
2744 JobInfo {
2745 start: Instant::now(),
2746 message: s.clone(),
2747 },
2748 );
2749
2750 cx.notify();
2751 })
2752 .ok();
2753 }
2754 let result = job.await;
2755
2756 this.update(cx, |this, cx| {
2757 this.active_jobs.remove(&job_id);
2758 cx.notify();
2759 })
2760 .ok();
2761
2762 result_tx.send(result).ok();
2763 })
2764 }),
2765 })
2766 .ok();
2767 result_rx
2768 }
2769
2770 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
2771 let Some(git_store) = self.git_store.upgrade() else {
2772 return;
2773 };
2774 let entity = cx.entity();
2775 git_store.update(cx, |git_store, cx| {
2776 let Some((&id, _)) = git_store
2777 .repositories
2778 .iter()
2779 .find(|(_, handle)| *handle == &entity)
2780 else {
2781 return;
2782 };
2783 git_store.active_repo_id = Some(id);
2784 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2785 });
2786 }
2787
2788 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
2789 self.snapshot.status()
2790 }
2791
2792 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
2793 let git_store = self.git_store.upgrade()?;
2794 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2795 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
2796 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
2797 Some(ProjectPath {
2798 worktree_id: worktree.read(cx).id(),
2799 path: relative_path.into(),
2800 })
2801 }
2802
2803 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
2804 let git_store = self.git_store.upgrade()?;
2805 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2806 let abs_path = worktree_store.absolutize(path, cx)?;
2807 self.snapshot.abs_path_to_repo_path(&abs_path)
2808 }
2809
2810 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
2811 other
2812 .read(cx)
2813 .snapshot
2814 .work_directory_abs_path
2815 .starts_with(&self.snapshot.work_directory_abs_path)
2816 }
2817
2818 pub fn open_commit_buffer(
2819 &mut self,
2820 languages: Option<Arc<LanguageRegistry>>,
2821 buffer_store: Entity<BufferStore>,
2822 cx: &mut Context<Self>,
2823 ) -> Task<Result<Entity<Buffer>>> {
2824 let id = self.id;
2825 if let Some(buffer) = self.commit_message_buffer.clone() {
2826 return Task::ready(Ok(buffer));
2827 }
2828 let this = cx.weak_entity();
2829
2830 let rx = self.send_job(None, move |state, mut cx| async move {
2831 let Some(this) = this.upgrade() else {
2832 bail!("git store was dropped");
2833 };
2834 match state {
2835 RepositoryState::Local { .. } => {
2836 this.update(&mut cx, |_, cx| {
2837 Self::open_local_commit_buffer(languages, buffer_store, cx)
2838 })?
2839 .await
2840 }
2841 RepositoryState::Remote { project_id, client } => {
2842 let request = client.request(proto::OpenCommitMessageBuffer {
2843 project_id: project_id.0,
2844 repository_id: id.to_proto(),
2845 });
2846 let response = request.await.context("requesting to open commit buffer")?;
2847 let buffer_id = BufferId::new(response.buffer_id)?;
2848 let buffer = buffer_store
2849 .update(&mut cx, |buffer_store, cx| {
2850 buffer_store.wait_for_remote_buffer(buffer_id, cx)
2851 })?
2852 .await?;
2853 if let Some(language_registry) = languages {
2854 let git_commit_language =
2855 language_registry.language_for_name("Git Commit").await?;
2856 buffer.update(&mut cx, |buffer, cx| {
2857 buffer.set_language(Some(git_commit_language), cx);
2858 })?;
2859 }
2860 this.update(&mut cx, |this, _| {
2861 this.commit_message_buffer = Some(buffer.clone());
2862 })?;
2863 Ok(buffer)
2864 }
2865 }
2866 });
2867
2868 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
2869 }
2870
2871 fn open_local_commit_buffer(
2872 language_registry: Option<Arc<LanguageRegistry>>,
2873 buffer_store: Entity<BufferStore>,
2874 cx: &mut Context<Self>,
2875 ) -> Task<Result<Entity<Buffer>>> {
2876 cx.spawn(async move |repository, cx| {
2877 let buffer = buffer_store
2878 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
2879 .await?;
2880
2881 if let Some(language_registry) = language_registry {
2882 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
2883 buffer.update(cx, |buffer, cx| {
2884 buffer.set_language(Some(git_commit_language), cx);
2885 })?;
2886 }
2887
2888 repository.update(cx, |repository, _| {
2889 repository.commit_message_buffer = Some(buffer.clone());
2890 })?;
2891 Ok(buffer)
2892 })
2893 }
2894
2895 pub fn checkout_files(
2896 &mut self,
2897 commit: &str,
2898 paths: Vec<RepoPath>,
2899 _cx: &mut App,
2900 ) -> oneshot::Receiver<Result<()>> {
2901 let commit = commit.to_string();
2902 let id = self.id;
2903
2904 self.send_job(
2905 Some(format!("git checkout {}", commit).into()),
2906 move |git_repo, _| async move {
2907 match git_repo {
2908 RepositoryState::Local {
2909 backend,
2910 environment,
2911 ..
2912 } => {
2913 backend
2914 .checkout_files(commit, paths, environment.clone())
2915 .await
2916 }
2917 RepositoryState::Remote { project_id, client } => {
2918 client
2919 .request(proto::GitCheckoutFiles {
2920 project_id: project_id.0,
2921 repository_id: id.to_proto(),
2922 commit,
2923 paths: paths
2924 .into_iter()
2925 .map(|p| p.to_string_lossy().to_string())
2926 .collect(),
2927 })
2928 .await?;
2929
2930 Ok(())
2931 }
2932 }
2933 },
2934 )
2935 }
2936
2937 pub fn reset(
2938 &mut self,
2939 commit: String,
2940 reset_mode: ResetMode,
2941 _cx: &mut App,
2942 ) -> oneshot::Receiver<Result<()>> {
2943 let commit = commit.to_string();
2944 let id = self.id;
2945
2946 self.send_job(None, move |git_repo, _| async move {
2947 match git_repo {
2948 RepositoryState::Local {
2949 backend,
2950 environment,
2951 ..
2952 } => backend.reset(commit, reset_mode, environment).await,
2953 RepositoryState::Remote { project_id, client } => {
2954 client
2955 .request(proto::GitReset {
2956 project_id: project_id.0,
2957 repository_id: id.to_proto(),
2958 commit,
2959 mode: match reset_mode {
2960 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
2961 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
2962 },
2963 })
2964 .await?;
2965
2966 Ok(())
2967 }
2968 }
2969 })
2970 }
2971
2972 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
2973 let id = self.id;
2974 self.send_job(None, move |git_repo, _cx| async move {
2975 match git_repo {
2976 RepositoryState::Local { backend, .. } => backend.show(commit).await,
2977 RepositoryState::Remote { project_id, client } => {
2978 let resp = client
2979 .request(proto::GitShow {
2980 project_id: project_id.0,
2981 repository_id: id.to_proto(),
2982 commit,
2983 })
2984 .await?;
2985
2986 Ok(CommitDetails {
2987 sha: resp.sha.into(),
2988 message: resp.message.into(),
2989 commit_timestamp: resp.commit_timestamp,
2990 author_email: resp.author_email.into(),
2991 author_name: resp.author_name.into(),
2992 })
2993 }
2994 }
2995 })
2996 }
2997
2998 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
2999 let id = self.id;
3000 self.send_job(None, move |git_repo, cx| async move {
3001 match git_repo {
3002 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3003 RepositoryState::Remote {
3004 client, project_id, ..
3005 } => {
3006 let response = client
3007 .request(proto::LoadCommitDiff {
3008 project_id: project_id.0,
3009 repository_id: id.to_proto(),
3010 commit,
3011 })
3012 .await?;
3013 Ok(CommitDiff {
3014 files: response
3015 .files
3016 .into_iter()
3017 .map(|file| CommitFile {
3018 path: Path::new(&file.path).into(),
3019 old_text: file.old_text,
3020 new_text: file.new_text,
3021 })
3022 .collect(),
3023 })
3024 }
3025 }
3026 })
3027 }
3028
3029 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3030 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3031 }
3032
3033 pub fn stage_entries(
3034 &self,
3035 entries: Vec<RepoPath>,
3036 cx: &mut Context<Self>,
3037 ) -> Task<anyhow::Result<()>> {
3038 if entries.is_empty() {
3039 return Task::ready(Ok(()));
3040 }
3041 let id = self.id;
3042
3043 let mut save_futures = Vec::new();
3044 if let Some(buffer_store) = self.buffer_store(cx) {
3045 buffer_store.update(cx, |buffer_store, cx| {
3046 for path in &entries {
3047 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3048 continue;
3049 };
3050 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3051 if buffer
3052 .read(cx)
3053 .file()
3054 .map_or(false, |file| file.disk_state().exists())
3055 {
3056 save_futures.push(buffer_store.save_buffer(buffer, cx));
3057 }
3058 }
3059 }
3060 })
3061 }
3062
3063 cx.spawn(async move |this, cx| {
3064 for save_future in save_futures {
3065 save_future.await?;
3066 }
3067
3068 this.update(cx, |this, _| {
3069 this.send_job(None, move |git_repo, _cx| async move {
3070 match git_repo {
3071 RepositoryState::Local {
3072 backend,
3073 environment,
3074 ..
3075 } => backend.stage_paths(entries, environment.clone()).await,
3076 RepositoryState::Remote { project_id, client } => {
3077 client
3078 .request(proto::Stage {
3079 project_id: project_id.0,
3080 repository_id: id.to_proto(),
3081 paths: entries
3082 .into_iter()
3083 .map(|repo_path| repo_path.as_ref().to_proto())
3084 .collect(),
3085 })
3086 .await
3087 .context("sending stage request")?;
3088
3089 Ok(())
3090 }
3091 }
3092 })
3093 })?
3094 .await??;
3095
3096 Ok(())
3097 })
3098 }
3099
3100 pub fn unstage_entries(
3101 &self,
3102 entries: Vec<RepoPath>,
3103 cx: &mut Context<Self>,
3104 ) -> Task<anyhow::Result<()>> {
3105 if entries.is_empty() {
3106 return Task::ready(Ok(()));
3107 }
3108 let id = self.id;
3109
3110 let mut save_futures = Vec::new();
3111 if let Some(buffer_store) = self.buffer_store(cx) {
3112 buffer_store.update(cx, |buffer_store, cx| {
3113 for path in &entries {
3114 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3115 continue;
3116 };
3117 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3118 if buffer
3119 .read(cx)
3120 .file()
3121 .map_or(false, |file| file.disk_state().exists())
3122 {
3123 save_futures.push(buffer_store.save_buffer(buffer, cx));
3124 }
3125 }
3126 }
3127 })
3128 }
3129
3130 cx.spawn(async move |this, cx| {
3131 for save_future in save_futures {
3132 save_future.await?;
3133 }
3134
3135 this.update(cx, |this, _| {
3136 this.send_job(None, move |git_repo, _cx| async move {
3137 match git_repo {
3138 RepositoryState::Local {
3139 backend,
3140 environment,
3141 ..
3142 } => backend.unstage_paths(entries, environment).await,
3143 RepositoryState::Remote { project_id, client } => {
3144 client
3145 .request(proto::Unstage {
3146 project_id: project_id.0,
3147 repository_id: id.to_proto(),
3148 paths: entries
3149 .into_iter()
3150 .map(|repo_path| repo_path.as_ref().to_proto())
3151 .collect(),
3152 })
3153 .await
3154 .context("sending unstage request")?;
3155
3156 Ok(())
3157 }
3158 }
3159 })
3160 })?
3161 .await??;
3162
3163 Ok(())
3164 })
3165 }
3166
3167 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3168 let to_stage = self
3169 .cached_status()
3170 .filter(|entry| !entry.status.staging().is_fully_staged())
3171 .map(|entry| entry.repo_path.clone())
3172 .collect();
3173 self.stage_entries(to_stage, cx)
3174 }
3175
3176 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3177 let to_unstage = self
3178 .cached_status()
3179 .filter(|entry| entry.status.staging().has_staged())
3180 .map(|entry| entry.repo_path.clone())
3181 .collect();
3182 self.unstage_entries(to_unstage, cx)
3183 }
3184
3185 pub fn commit(
3186 &mut self,
3187 message: SharedString,
3188 name_and_email: Option<(SharedString, SharedString)>,
3189 _cx: &mut App,
3190 ) -> oneshot::Receiver<Result<()>> {
3191 let id = self.id;
3192
3193 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3194 match git_repo {
3195 RepositoryState::Local {
3196 backend,
3197 environment,
3198 ..
3199 } => backend.commit(message, name_and_email, environment).await,
3200 RepositoryState::Remote { project_id, client } => {
3201 let (name, email) = name_and_email.unzip();
3202 client
3203 .request(proto::Commit {
3204 project_id: project_id.0,
3205 repository_id: id.to_proto(),
3206 message: String::from(message),
3207 name: name.map(String::from),
3208 email: email.map(String::from),
3209 })
3210 .await
3211 .context("sending commit request")?;
3212
3213 Ok(())
3214 }
3215 }
3216 })
3217 }
3218
3219 pub fn fetch(
3220 &mut self,
3221 askpass: AskPassDelegate,
3222 _cx: &mut App,
3223 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3224 let askpass_delegates = self.askpass_delegates.clone();
3225 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3226 let id = self.id;
3227
3228 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3229 match git_repo {
3230 RepositoryState::Local {
3231 backend,
3232 environment,
3233 ..
3234 } => backend.fetch(askpass, environment, cx).await,
3235 RepositoryState::Remote { project_id, client } => {
3236 askpass_delegates.lock().insert(askpass_id, askpass);
3237 let _defer = util::defer(|| {
3238 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3239 debug_assert!(askpass_delegate.is_some());
3240 });
3241
3242 let response = client
3243 .request(proto::Fetch {
3244 project_id: project_id.0,
3245 repository_id: id.to_proto(),
3246 askpass_id,
3247 })
3248 .await
3249 .context("sending fetch request")?;
3250
3251 Ok(RemoteCommandOutput {
3252 stdout: response.stdout,
3253 stderr: response.stderr,
3254 })
3255 }
3256 }
3257 })
3258 }
3259
3260 pub fn push(
3261 &mut self,
3262 branch: SharedString,
3263 remote: SharedString,
3264 options: Option<PushOptions>,
3265 askpass: AskPassDelegate,
3266 cx: &mut Context<Self>,
3267 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3268 let askpass_delegates = self.askpass_delegates.clone();
3269 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3270 let id = self.id;
3271
3272 let args = options
3273 .map(|option| match option {
3274 PushOptions::SetUpstream => " --set-upstream",
3275 PushOptions::Force => " --force",
3276 })
3277 .unwrap_or("");
3278
3279 let updates_tx = self
3280 .git_store()
3281 .and_then(|git_store| match &git_store.read(cx).state {
3282 GitStoreState::Local { downstream, .. } => downstream
3283 .as_ref()
3284 .map(|downstream| downstream.updates_tx.clone()),
3285 _ => None,
3286 });
3287
3288 let this = cx.weak_entity();
3289 self.send_job(
3290 Some(format!("git push {} {} {}", args, branch, remote).into()),
3291 move |git_repo, mut cx| async move {
3292 match git_repo {
3293 RepositoryState::Local {
3294 backend,
3295 environment,
3296 ..
3297 } => {
3298 let result = backend
3299 .push(
3300 branch.to_string(),
3301 remote.to_string(),
3302 options,
3303 askpass,
3304 environment.clone(),
3305 cx.clone(),
3306 )
3307 .await;
3308 if result.is_ok() {
3309 let branches = backend.branches().await?;
3310 let branch = branches.into_iter().find(|branch| branch.is_head);
3311 log::info!("head branch after scan is {branch:?}");
3312 let snapshot = this.update(&mut cx, |this, cx| {
3313 this.snapshot.branch = branch;
3314 let snapshot = this.snapshot.clone();
3315 cx.emit(RepositoryEvent::Updated { full_scan: false });
3316 snapshot
3317 })?;
3318 if let Some(updates_tx) = updates_tx {
3319 updates_tx
3320 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3321 .ok();
3322 }
3323 }
3324 result
3325 }
3326 RepositoryState::Remote { project_id, client } => {
3327 askpass_delegates.lock().insert(askpass_id, askpass);
3328 let _defer = util::defer(|| {
3329 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3330 debug_assert!(askpass_delegate.is_some());
3331 });
3332 let response = client
3333 .request(proto::Push {
3334 project_id: project_id.0,
3335 repository_id: id.to_proto(),
3336 askpass_id,
3337 branch_name: branch.to_string(),
3338 remote_name: remote.to_string(),
3339 options: options.map(|options| match options {
3340 PushOptions::Force => proto::push::PushOptions::Force,
3341 PushOptions::SetUpstream => {
3342 proto::push::PushOptions::SetUpstream
3343 }
3344 }
3345 as i32),
3346 })
3347 .await
3348 .context("sending push request")?;
3349
3350 Ok(RemoteCommandOutput {
3351 stdout: response.stdout,
3352 stderr: response.stderr,
3353 })
3354 }
3355 }
3356 },
3357 )
3358 }
3359
3360 pub fn pull(
3361 &mut self,
3362 branch: SharedString,
3363 remote: SharedString,
3364 askpass: AskPassDelegate,
3365 _cx: &mut App,
3366 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3367 let askpass_delegates = self.askpass_delegates.clone();
3368 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3369 let id = self.id;
3370
3371 self.send_job(
3372 Some(format!("git pull {} {}", remote, branch).into()),
3373 move |git_repo, cx| async move {
3374 match git_repo {
3375 RepositoryState::Local {
3376 backend,
3377 environment,
3378 ..
3379 } => {
3380 backend
3381 .pull(
3382 branch.to_string(),
3383 remote.to_string(),
3384 askpass,
3385 environment.clone(),
3386 cx,
3387 )
3388 .await
3389 }
3390 RepositoryState::Remote { project_id, client } => {
3391 askpass_delegates.lock().insert(askpass_id, askpass);
3392 let _defer = util::defer(|| {
3393 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3394 debug_assert!(askpass_delegate.is_some());
3395 });
3396 let response = client
3397 .request(proto::Pull {
3398 project_id: project_id.0,
3399 repository_id: id.to_proto(),
3400 askpass_id,
3401 branch_name: branch.to_string(),
3402 remote_name: remote.to_string(),
3403 })
3404 .await
3405 .context("sending pull request")?;
3406
3407 Ok(RemoteCommandOutput {
3408 stdout: response.stdout,
3409 stderr: response.stderr,
3410 })
3411 }
3412 }
3413 },
3414 )
3415 }
3416
3417 fn spawn_set_index_text_job(
3418 &mut self,
3419 path: RepoPath,
3420 content: Option<String>,
3421 _cx: &mut App,
3422 ) -> oneshot::Receiver<anyhow::Result<()>> {
3423 let id = self.id;
3424
3425 self.send_keyed_job(
3426 Some(GitJobKey::WriteIndex(path.clone())),
3427 None,
3428 move |git_repo, _cx| async move {
3429 match git_repo {
3430 RepositoryState::Local {
3431 backend,
3432 environment,
3433 ..
3434 } => {
3435 backend
3436 .set_index_text(path, content, environment.clone())
3437 .await
3438 }
3439 RepositoryState::Remote { project_id, client } => {
3440 client
3441 .request(proto::SetIndexText {
3442 project_id: project_id.0,
3443 repository_id: id.to_proto(),
3444 path: path.as_ref().to_proto(),
3445 text: content,
3446 })
3447 .await?;
3448 Ok(())
3449 }
3450 }
3451 },
3452 )
3453 }
3454
3455 pub fn get_remotes(
3456 &mut self,
3457 branch_name: Option<String>,
3458 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3459 let id = self.id;
3460 self.send_job(None, move |repo, _cx| async move {
3461 match repo {
3462 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3463 RepositoryState::Remote { project_id, client } => {
3464 let response = client
3465 .request(proto::GetRemotes {
3466 project_id: project_id.0,
3467 repository_id: id.to_proto(),
3468 branch_name,
3469 })
3470 .await?;
3471
3472 let remotes = response
3473 .remotes
3474 .into_iter()
3475 .map(|remotes| git::repository::Remote {
3476 name: remotes.name.into(),
3477 })
3478 .collect();
3479
3480 Ok(remotes)
3481 }
3482 }
3483 })
3484 }
3485
3486 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3487 let id = self.id;
3488 self.send_job(None, move |repo, cx| async move {
3489 match repo {
3490 RepositoryState::Local { backend, .. } => {
3491 let backend = backend.clone();
3492 cx.background_spawn(async move { backend.branches().await })
3493 .await
3494 }
3495 RepositoryState::Remote { project_id, client } => {
3496 let response = client
3497 .request(proto::GitGetBranches {
3498 project_id: project_id.0,
3499 repository_id: id.to_proto(),
3500 })
3501 .await?;
3502
3503 let branches = response
3504 .branches
3505 .into_iter()
3506 .map(|branch| proto_to_branch(&branch))
3507 .collect();
3508
3509 Ok(branches)
3510 }
3511 }
3512 })
3513 }
3514
3515 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3516 let id = self.id;
3517 self.send_job(None, move |repo, _cx| async move {
3518 match repo {
3519 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
3520 RepositoryState::Remote { project_id, client } => {
3521 let response = client
3522 .request(proto::GitDiff {
3523 project_id: project_id.0,
3524 repository_id: id.to_proto(),
3525 diff_type: match diff_type {
3526 DiffType::HeadToIndex => {
3527 proto::git_diff::DiffType::HeadToIndex.into()
3528 }
3529 DiffType::HeadToWorktree => {
3530 proto::git_diff::DiffType::HeadToWorktree.into()
3531 }
3532 },
3533 })
3534 .await?;
3535
3536 Ok(response.diff)
3537 }
3538 }
3539 })
3540 }
3541
3542 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3543 let id = self.id;
3544 self.send_job(
3545 Some(format!("git switch -c {branch_name}").into()),
3546 move |repo, _cx| async move {
3547 match repo {
3548 RepositoryState::Local { backend, .. } => {
3549 backend.create_branch(branch_name).await
3550 }
3551 RepositoryState::Remote { project_id, client } => {
3552 client
3553 .request(proto::GitCreateBranch {
3554 project_id: project_id.0,
3555 repository_id: id.to_proto(),
3556 branch_name,
3557 })
3558 .await?;
3559
3560 Ok(())
3561 }
3562 }
3563 },
3564 )
3565 }
3566
3567 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3568 let id = self.id;
3569 self.send_job(
3570 Some(format!("git switch {branch_name}").into()),
3571 move |repo, _cx| async move {
3572 match repo {
3573 RepositoryState::Local { backend, .. } => {
3574 backend.change_branch(branch_name).await
3575 }
3576 RepositoryState::Remote { project_id, client } => {
3577 client
3578 .request(proto::GitChangeBranch {
3579 project_id: project_id.0,
3580 repository_id: id.to_proto(),
3581 branch_name,
3582 })
3583 .await?;
3584
3585 Ok(())
3586 }
3587 }
3588 },
3589 )
3590 }
3591
3592 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3593 let id = self.id;
3594 self.send_job(None, move |repo, _cx| async move {
3595 match repo {
3596 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
3597 RepositoryState::Remote { project_id, client } => {
3598 let response = client
3599 .request(proto::CheckForPushedCommits {
3600 project_id: project_id.0,
3601 repository_id: id.to_proto(),
3602 })
3603 .await?;
3604
3605 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3606
3607 Ok(branches)
3608 }
3609 }
3610 })
3611 }
3612
3613 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
3614 self.send_job(None, |repo, _cx| async move {
3615 match repo {
3616 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
3617 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3618 }
3619 })
3620 }
3621
3622 pub fn restore_checkpoint(
3623 &mut self,
3624 checkpoint: GitRepositoryCheckpoint,
3625 ) -> oneshot::Receiver<Result<()>> {
3626 self.send_job(None, move |repo, _cx| async move {
3627 match repo {
3628 RepositoryState::Local { backend, .. } => {
3629 backend.restore_checkpoint(checkpoint).await
3630 }
3631 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3632 }
3633 })
3634 }
3635
3636 pub(crate) fn apply_remote_update(
3637 &mut self,
3638 update: proto::UpdateRepository,
3639 cx: &mut Context<Self>,
3640 ) -> Result<()> {
3641 let conflicted_paths = TreeSet::from_ordered_entries(
3642 update
3643 .current_merge_conflicts
3644 .into_iter()
3645 .map(|path| RepoPath(Path::new(&path).into())),
3646 );
3647 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
3648 self.snapshot.merge_conflicts = conflicted_paths;
3649
3650 let edits = update
3651 .removed_statuses
3652 .into_iter()
3653 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
3654 .chain(
3655 update
3656 .updated_statuses
3657 .into_iter()
3658 .filter_map(|updated_status| {
3659 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
3660 }),
3661 )
3662 .collect::<Vec<_>>();
3663 self.snapshot.statuses_by_path.edit(edits, &());
3664 if update.is_last_update {
3665 self.snapshot.scan_id = update.scan_id;
3666 }
3667 cx.emit(RepositoryEvent::Updated { full_scan: true });
3668 Ok(())
3669 }
3670
3671 pub fn compare_checkpoints(
3672 &mut self,
3673 left: GitRepositoryCheckpoint,
3674 right: GitRepositoryCheckpoint,
3675 ) -> oneshot::Receiver<Result<bool>> {
3676 self.send_job(None, move |repo, _cx| async move {
3677 match repo {
3678 RepositoryState::Local { backend, .. } => {
3679 backend.compare_checkpoints(left, right).await
3680 }
3681 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3682 }
3683 })
3684 }
3685
3686 pub fn delete_checkpoint(
3687 &mut self,
3688 checkpoint: GitRepositoryCheckpoint,
3689 ) -> oneshot::Receiver<Result<()>> {
3690 self.send_job(None, move |repo, _cx| async move {
3691 match repo {
3692 RepositoryState::Local { backend, .. } => {
3693 backend.delete_checkpoint(checkpoint).await
3694 }
3695 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3696 }
3697 })
3698 }
3699
3700 pub fn diff_checkpoints(
3701 &mut self,
3702 base_checkpoint: GitRepositoryCheckpoint,
3703 target_checkpoint: GitRepositoryCheckpoint,
3704 ) -> oneshot::Receiver<Result<String>> {
3705 self.send_job(None, move |repo, _cx| async move {
3706 match repo {
3707 RepositoryState::Local { backend, .. } => {
3708 backend
3709 .diff_checkpoints(base_checkpoint, target_checkpoint)
3710 .await
3711 }
3712 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3713 }
3714 })
3715 }
3716
3717 fn schedule_scan(
3718 &mut self,
3719 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3720 cx: &mut Context<Self>,
3721 ) {
3722 self.paths_changed(
3723 vec![git::repository::WORK_DIRECTORY_REPO_PATH.clone()],
3724 updates_tx.clone(),
3725 cx,
3726 );
3727
3728 let this = cx.weak_entity();
3729 let _ = self.send_keyed_job(
3730 Some(GitJobKey::ReloadGitState),
3731 None,
3732 |state, mut cx| async move {
3733 let Some(this) = this.upgrade() else {
3734 return Ok(());
3735 };
3736 let RepositoryState::Local { backend, .. } = state else {
3737 bail!("not a local repository")
3738 };
3739 let (snapshot, events) = this
3740 .update(&mut cx, |this, _| {
3741 compute_snapshot(
3742 this.id,
3743 this.work_directory_abs_path.clone(),
3744 this.snapshot.clone(),
3745 backend.clone(),
3746 )
3747 })?
3748 .await?;
3749 this.update(&mut cx, |this, cx| {
3750 this.snapshot = snapshot.clone();
3751 for event in events {
3752 cx.emit(event);
3753 }
3754 })?;
3755 if let Some(updates_tx) = updates_tx {
3756 updates_tx
3757 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3758 .ok();
3759 }
3760 Ok(())
3761 },
3762 );
3763 }
3764
3765 fn spawn_local_git_worker(
3766 work_directory_abs_path: Arc<Path>,
3767 dot_git_abs_path: Arc<Path>,
3768 project_environment: WeakEntity<ProjectEnvironment>,
3769 fs: Arc<dyn Fs>,
3770 cx: &mut Context<Self>,
3771 ) -> mpsc::UnboundedSender<GitJob> {
3772 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3773
3774 cx.spawn(async move |_, cx| {
3775 let environment = project_environment
3776 .upgrade()
3777 .ok_or_else(|| anyhow!("missing project environment"))?
3778 .update(cx, |project_environment, cx| {
3779 project_environment.get_environment(Some(work_directory_abs_path.clone()), cx)
3780 })?
3781 .await
3782 .unwrap_or_else(|| {
3783 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
3784 HashMap::default()
3785 });
3786 let backend = cx
3787 .background_spawn(async move {
3788 fs.open_repo(&dot_git_abs_path)
3789 .ok_or_else(|| anyhow!("failed to build repository"))
3790 })
3791 .await?;
3792
3793 if let Some(git_hosting_provider_registry) =
3794 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
3795 {
3796 git_hosting_providers::register_additional_providers(
3797 git_hosting_provider_registry,
3798 backend.clone(),
3799 );
3800 }
3801
3802 let state = RepositoryState::Local {
3803 backend,
3804 environment: Arc::new(environment),
3805 };
3806 let mut jobs = VecDeque::new();
3807 loop {
3808 while let Ok(Some(next_job)) = job_rx.try_next() {
3809 jobs.push_back(next_job);
3810 }
3811
3812 if let Some(job) = jobs.pop_front() {
3813 if let Some(current_key) = &job.key {
3814 if jobs
3815 .iter()
3816 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3817 {
3818 continue;
3819 }
3820 }
3821 (job.job)(state.clone(), cx).await;
3822 } else if let Some(job) = job_rx.next().await {
3823 jobs.push_back(job);
3824 } else {
3825 break;
3826 }
3827 }
3828 anyhow::Ok(())
3829 })
3830 .detach_and_log_err(cx);
3831
3832 job_tx
3833 }
3834
3835 fn spawn_remote_git_worker(
3836 project_id: ProjectId,
3837 client: AnyProtoClient,
3838 cx: &mut Context<Self>,
3839 ) -> mpsc::UnboundedSender<GitJob> {
3840 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3841
3842 cx.spawn(async move |_, cx| {
3843 let state = RepositoryState::Remote { project_id, client };
3844 let mut jobs = VecDeque::new();
3845 loop {
3846 while let Ok(Some(next_job)) = job_rx.try_next() {
3847 jobs.push_back(next_job);
3848 }
3849
3850 if let Some(job) = jobs.pop_front() {
3851 if let Some(current_key) = &job.key {
3852 if jobs
3853 .iter()
3854 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3855 {
3856 continue;
3857 }
3858 }
3859 (job.job)(state.clone(), cx).await;
3860 } else if let Some(job) = job_rx.next().await {
3861 jobs.push_back(job);
3862 } else {
3863 break;
3864 }
3865 }
3866 anyhow::Ok(())
3867 })
3868 .detach_and_log_err(cx);
3869
3870 job_tx
3871 }
3872
3873 fn load_staged_text(
3874 &mut self,
3875 buffer_id: BufferId,
3876 repo_path: RepoPath,
3877 cx: &App,
3878 ) -> Task<Result<Option<String>>> {
3879 let rx = self.send_job(None, move |state, _| async move {
3880 match state {
3881 RepositoryState::Local { backend, .. } => {
3882 anyhow::Ok(backend.load_index_text(repo_path).await)
3883 }
3884 RepositoryState::Remote { project_id, client } => {
3885 let response = client
3886 .request(proto::OpenUnstagedDiff {
3887 project_id: project_id.to_proto(),
3888 buffer_id: buffer_id.to_proto(),
3889 })
3890 .await?;
3891 Ok(response.staged_text)
3892 }
3893 }
3894 });
3895 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3896 }
3897
3898 fn load_committed_text(
3899 &mut self,
3900 buffer_id: BufferId,
3901 repo_path: RepoPath,
3902 cx: &App,
3903 ) -> Task<Result<DiffBasesChange>> {
3904 let rx = self.send_job(None, move |state, _| async move {
3905 match state {
3906 RepositoryState::Local { backend, .. } => {
3907 let committed_text = backend.load_committed_text(repo_path.clone()).await;
3908 let staged_text = backend.load_index_text(repo_path).await;
3909 let diff_bases_change = if committed_text == staged_text {
3910 DiffBasesChange::SetBoth(committed_text)
3911 } else {
3912 DiffBasesChange::SetEach {
3913 index: staged_text,
3914 head: committed_text,
3915 }
3916 };
3917 anyhow::Ok(diff_bases_change)
3918 }
3919 RepositoryState::Remote { project_id, client } => {
3920 use proto::open_uncommitted_diff_response::Mode;
3921
3922 let response = client
3923 .request(proto::OpenUncommittedDiff {
3924 project_id: project_id.to_proto(),
3925 buffer_id: buffer_id.to_proto(),
3926 })
3927 .await?;
3928 let mode =
3929 Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
3930 let bases = match mode {
3931 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
3932 Mode::IndexAndHead => DiffBasesChange::SetEach {
3933 head: response.committed_text,
3934 index: response.staged_text,
3935 },
3936 };
3937 Ok(bases)
3938 }
3939 }
3940 });
3941
3942 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3943 }
3944
3945 fn paths_changed(
3946 &mut self,
3947 paths: Vec<RepoPath>,
3948 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3949 cx: &mut Context<Self>,
3950 ) {
3951 self.paths_needing_status_update.extend(paths);
3952
3953 let this = cx.weak_entity();
3954 let _ = self.send_keyed_job(
3955 Some(GitJobKey::RefreshStatuses),
3956 None,
3957 |state, mut cx| async move {
3958 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
3959 (
3960 this.snapshot.clone(),
3961 mem::take(&mut this.paths_needing_status_update),
3962 )
3963 })?;
3964 let RepositoryState::Local { backend, .. } = state else {
3965 bail!("not a local repository")
3966 };
3967
3968 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
3969 let statuses = backend.status(&paths).await?;
3970
3971 let changed_path_statuses = cx
3972 .background_spawn(async move {
3973 let mut changed_path_statuses = Vec::new();
3974 let prev_statuses = prev_snapshot.statuses_by_path.clone();
3975 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
3976
3977 for (repo_path, status) in &*statuses.entries {
3978 changed_paths.remove(repo_path);
3979 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
3980 if &cursor.item().unwrap().status == status {
3981 continue;
3982 }
3983 }
3984
3985 changed_path_statuses.push(Edit::Insert(StatusEntry {
3986 repo_path: repo_path.clone(),
3987 status: *status,
3988 }));
3989 }
3990 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
3991 for path in changed_paths.iter() {
3992 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
3993 changed_path_statuses.push(Edit::Remove(PathKey(path.0.clone())));
3994 }
3995 }
3996 changed_path_statuses
3997 })
3998 .await;
3999
4000 this.update(&mut cx, |this, cx| {
4001 if !changed_path_statuses.is_empty() {
4002 this.snapshot
4003 .statuses_by_path
4004 .edit(changed_path_statuses, &());
4005 this.snapshot.scan_id += 1;
4006 if let Some(updates_tx) = updates_tx {
4007 updates_tx
4008 .unbounded_send(DownstreamUpdate::UpdateRepository(
4009 this.snapshot.clone(),
4010 ))
4011 .ok();
4012 }
4013 }
4014 cx.emit(RepositoryEvent::Updated { full_scan: false });
4015 })
4016 },
4017 );
4018 }
4019
4020 /// currently running git command and when it started
4021 pub fn current_job(&self) -> Option<JobInfo> {
4022 self.active_jobs.values().next().cloned()
4023 }
4024}
4025
4026fn get_permalink_in_rust_registry_src(
4027 provider_registry: Arc<GitHostingProviderRegistry>,
4028 path: PathBuf,
4029 selection: Range<u32>,
4030) -> Result<url::Url> {
4031 #[derive(Deserialize)]
4032 struct CargoVcsGit {
4033 sha1: String,
4034 }
4035
4036 #[derive(Deserialize)]
4037 struct CargoVcsInfo {
4038 git: CargoVcsGit,
4039 path_in_vcs: String,
4040 }
4041
4042 #[derive(Deserialize)]
4043 struct CargoPackage {
4044 repository: String,
4045 }
4046
4047 #[derive(Deserialize)]
4048 struct CargoToml {
4049 package: CargoPackage,
4050 }
4051
4052 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4053 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4054 Some((dir, json))
4055 }) else {
4056 bail!("No .cargo_vcs_info.json found in parent directories")
4057 };
4058 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4059 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4060 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4061 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4062 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
4063 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4064 let permalink = provider.build_permalink(
4065 remote,
4066 BuildPermalinkParams {
4067 sha: &cargo_vcs_info.git.sha1,
4068 path: &path.to_string_lossy(),
4069 selection: Some(selection),
4070 },
4071 );
4072 Ok(permalink)
4073}
4074
4075fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4076 let Some(blame) = blame else {
4077 return proto::BlameBufferResponse {
4078 blame_response: None,
4079 };
4080 };
4081
4082 let entries = blame
4083 .entries
4084 .into_iter()
4085 .map(|entry| proto::BlameEntry {
4086 sha: entry.sha.as_bytes().into(),
4087 start_line: entry.range.start,
4088 end_line: entry.range.end,
4089 original_line_number: entry.original_line_number,
4090 author: entry.author.clone(),
4091 author_mail: entry.author_mail.clone(),
4092 author_time: entry.author_time,
4093 author_tz: entry.author_tz.clone(),
4094 committer: entry.committer_name.clone(),
4095 committer_mail: entry.committer_email.clone(),
4096 committer_time: entry.committer_time,
4097 committer_tz: entry.committer_tz.clone(),
4098 summary: entry.summary.clone(),
4099 previous: entry.previous.clone(),
4100 filename: entry.filename.clone(),
4101 })
4102 .collect::<Vec<_>>();
4103
4104 let messages = blame
4105 .messages
4106 .into_iter()
4107 .map(|(oid, message)| proto::CommitMessage {
4108 oid: oid.as_bytes().into(),
4109 message,
4110 })
4111 .collect::<Vec<_>>();
4112
4113 proto::BlameBufferResponse {
4114 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4115 entries,
4116 messages,
4117 remote_url: blame.remote_url,
4118 }),
4119 }
4120}
4121
4122fn deserialize_blame_buffer_response(
4123 response: proto::BlameBufferResponse,
4124) -> Option<git::blame::Blame> {
4125 let response = response.blame_response?;
4126 let entries = response
4127 .entries
4128 .into_iter()
4129 .filter_map(|entry| {
4130 Some(git::blame::BlameEntry {
4131 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4132 range: entry.start_line..entry.end_line,
4133 original_line_number: entry.original_line_number,
4134 committer_name: entry.committer,
4135 committer_time: entry.committer_time,
4136 committer_tz: entry.committer_tz,
4137 committer_email: entry.committer_mail,
4138 author: entry.author,
4139 author_mail: entry.author_mail,
4140 author_time: entry.author_time,
4141 author_tz: entry.author_tz,
4142 summary: entry.summary,
4143 previous: entry.previous,
4144 filename: entry.filename,
4145 })
4146 })
4147 .collect::<Vec<_>>();
4148
4149 let messages = response
4150 .messages
4151 .into_iter()
4152 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4153 .collect::<HashMap<_, _>>();
4154
4155 Some(Blame {
4156 entries,
4157 messages,
4158 remote_url: response.remote_url,
4159 })
4160}
4161
4162fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4163 proto::Branch {
4164 is_head: branch.is_head,
4165 name: branch.name.to_string(),
4166 unix_timestamp: branch
4167 .most_recent_commit
4168 .as_ref()
4169 .map(|commit| commit.commit_timestamp as u64),
4170 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4171 ref_name: upstream.ref_name.to_string(),
4172 tracking: upstream
4173 .tracking
4174 .status()
4175 .map(|upstream| proto::UpstreamTracking {
4176 ahead: upstream.ahead as u64,
4177 behind: upstream.behind as u64,
4178 }),
4179 }),
4180 most_recent_commit: branch
4181 .most_recent_commit
4182 .as_ref()
4183 .map(|commit| proto::CommitSummary {
4184 sha: commit.sha.to_string(),
4185 subject: commit.subject.to_string(),
4186 commit_timestamp: commit.commit_timestamp,
4187 }),
4188 }
4189}
4190
4191fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4192 git::repository::Branch {
4193 is_head: proto.is_head,
4194 name: proto.name.clone().into(),
4195 upstream: proto
4196 .upstream
4197 .as_ref()
4198 .map(|upstream| git::repository::Upstream {
4199 ref_name: upstream.ref_name.to_string().into(),
4200 tracking: upstream
4201 .tracking
4202 .as_ref()
4203 .map(|tracking| {
4204 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4205 ahead: tracking.ahead as u32,
4206 behind: tracking.behind as u32,
4207 })
4208 })
4209 .unwrap_or(git::repository::UpstreamTracking::Gone),
4210 }),
4211 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4212 git::repository::CommitSummary {
4213 sha: commit.sha.to_string().into(),
4214 subject: commit.subject.to_string().into(),
4215 commit_timestamp: commit.commit_timestamp,
4216 has_parent: true,
4217 }
4218 }),
4219 }
4220}
4221
4222async fn compute_snapshot(
4223 id: RepositoryId,
4224 work_directory_abs_path: Arc<Path>,
4225 prev_snapshot: RepositorySnapshot,
4226 backend: Arc<dyn GitRepository>,
4227) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4228 let mut events = Vec::new();
4229 let branches = backend.branches().await?;
4230 let branch = branches.into_iter().find(|branch| branch.is_head);
4231 let statuses = backend.status(&[WORK_DIRECTORY_REPO_PATH.clone()]).await?;
4232 let merge_message = backend
4233 .merge_message()
4234 .await
4235 .and_then(|msg| Some(msg.lines().nth(0)?.to_owned().into()));
4236 let merge_head_shas = backend
4237 .merge_head_shas()
4238 .into_iter()
4239 .map(SharedString::from)
4240 .collect();
4241
4242 let statuses_by_path = SumTree::from_iter(
4243 statuses
4244 .entries
4245 .iter()
4246 .map(|(repo_path, status)| StatusEntry {
4247 repo_path: repo_path.clone(),
4248 status: *status,
4249 }),
4250 &(),
4251 );
4252
4253 let merge_head_shas_changed = merge_head_shas != prev_snapshot.merge_head_shas;
4254
4255 if merge_head_shas_changed
4256 || branch != prev_snapshot.branch
4257 || statuses_by_path != prev_snapshot.statuses_by_path
4258 {
4259 events.push(RepositoryEvent::Updated { full_scan: true });
4260 }
4261
4262 let mut current_merge_conflicts = TreeSet::default();
4263 for (repo_path, status) in statuses.entries.iter() {
4264 if status.is_conflicted() {
4265 current_merge_conflicts.insert(repo_path.clone());
4266 }
4267 }
4268
4269 // Cache merge conflict paths so they don't change from staging/unstaging,
4270 // until the merge heads change (at commit time, etc.).
4271 let mut merge_conflicts = prev_snapshot.merge_conflicts.clone();
4272 if merge_head_shas_changed {
4273 merge_conflicts = current_merge_conflicts;
4274 events.push(RepositoryEvent::MergeHeadsChanged);
4275 }
4276
4277 let snapshot = RepositorySnapshot {
4278 id,
4279 merge_message,
4280 statuses_by_path,
4281 work_directory_abs_path,
4282 scan_id: prev_snapshot.scan_id + 1,
4283 branch,
4284 merge_conflicts,
4285 merge_head_shas,
4286 };
4287
4288 Ok((snapshot, events))
4289}
4290
4291fn status_from_proto(
4292 simple_status: i32,
4293 status: Option<proto::GitFileStatus>,
4294) -> anyhow::Result<FileStatus> {
4295 use proto::git_file_status::Variant;
4296
4297 let Some(variant) = status.and_then(|status| status.variant) else {
4298 let code = proto::GitStatus::from_i32(simple_status)
4299 .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
4300 let result = match code {
4301 proto::GitStatus::Added => TrackedStatus {
4302 worktree_status: StatusCode::Added,
4303 index_status: StatusCode::Unmodified,
4304 }
4305 .into(),
4306 proto::GitStatus::Modified => TrackedStatus {
4307 worktree_status: StatusCode::Modified,
4308 index_status: StatusCode::Unmodified,
4309 }
4310 .into(),
4311 proto::GitStatus::Conflict => UnmergedStatus {
4312 first_head: UnmergedStatusCode::Updated,
4313 second_head: UnmergedStatusCode::Updated,
4314 }
4315 .into(),
4316 proto::GitStatus::Deleted => TrackedStatus {
4317 worktree_status: StatusCode::Deleted,
4318 index_status: StatusCode::Unmodified,
4319 }
4320 .into(),
4321 _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
4322 };
4323 return Ok(result);
4324 };
4325
4326 let result = match variant {
4327 Variant::Untracked(_) => FileStatus::Untracked,
4328 Variant::Ignored(_) => FileStatus::Ignored,
4329 Variant::Unmerged(unmerged) => {
4330 let [first_head, second_head] =
4331 [unmerged.first_head, unmerged.second_head].map(|head| {
4332 let code = proto::GitStatus::from_i32(head)
4333 .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
4334 let result = match code {
4335 proto::GitStatus::Added => UnmergedStatusCode::Added,
4336 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4337 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4338 _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
4339 };
4340 Ok(result)
4341 });
4342 let [first_head, second_head] = [first_head?, second_head?];
4343 UnmergedStatus {
4344 first_head,
4345 second_head,
4346 }
4347 .into()
4348 }
4349 Variant::Tracked(tracked) => {
4350 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4351 .map(|status| {
4352 let code = proto::GitStatus::from_i32(status)
4353 .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
4354 let result = match code {
4355 proto::GitStatus::Modified => StatusCode::Modified,
4356 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4357 proto::GitStatus::Added => StatusCode::Added,
4358 proto::GitStatus::Deleted => StatusCode::Deleted,
4359 proto::GitStatus::Renamed => StatusCode::Renamed,
4360 proto::GitStatus::Copied => StatusCode::Copied,
4361 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4362 _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
4363 };
4364 Ok(result)
4365 });
4366 let [index_status, worktree_status] = [index_status?, worktree_status?];
4367 TrackedStatus {
4368 index_status,
4369 worktree_status,
4370 }
4371 .into()
4372 }
4373 };
4374 Ok(result)
4375}
4376
4377fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4378 use proto::git_file_status::{Tracked, Unmerged, Variant};
4379
4380 let variant = match status {
4381 FileStatus::Untracked => Variant::Untracked(Default::default()),
4382 FileStatus::Ignored => Variant::Ignored(Default::default()),
4383 FileStatus::Unmerged(UnmergedStatus {
4384 first_head,
4385 second_head,
4386 }) => Variant::Unmerged(Unmerged {
4387 first_head: unmerged_status_to_proto(first_head),
4388 second_head: unmerged_status_to_proto(second_head),
4389 }),
4390 FileStatus::Tracked(TrackedStatus {
4391 index_status,
4392 worktree_status,
4393 }) => Variant::Tracked(Tracked {
4394 index_status: tracked_status_to_proto(index_status),
4395 worktree_status: tracked_status_to_proto(worktree_status),
4396 }),
4397 };
4398 proto::GitFileStatus {
4399 variant: Some(variant),
4400 }
4401}
4402
4403fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4404 match code {
4405 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4406 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4407 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4408 }
4409}
4410
4411fn tracked_status_to_proto(code: StatusCode) -> i32 {
4412 match code {
4413 StatusCode::Added => proto::GitStatus::Added as _,
4414 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4415 StatusCode::Modified => proto::GitStatus::Modified as _,
4416 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4417 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4418 StatusCode::Copied => proto::GitStatus::Copied as _,
4419 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4420 }
4421}