1pub mod git_traversal;
2
3use crate::{
4 ProjectEnvironment, ProjectItem, ProjectPath,
5 buffer_store::{BufferStore, BufferStoreEvent},
6 worktree_store::{WorktreeStore, WorktreeStoreEvent},
7};
8use anyhow::{Context as _, Result, anyhow, bail};
9use askpass::AskPassDelegate;
10use buffer_diff::{BufferDiff, BufferDiffEvent};
11use client::ProjectId;
12use collections::HashMap;
13use fs::Fs;
14use futures::{
15 FutureExt as _, StreamExt as _,
16 channel::{mpsc, oneshot},
17 future::{self, Shared},
18};
19use git::{
20 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
21 blame::Blame,
22 parse_git_remote_url,
23 repository::{
24 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, GitRepository,
25 GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
26 UpstreamTrackingStatus,
27 },
28 status::{
29 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
30 },
31};
32use gpui::{
33 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
34 WeakEntity,
35};
36use language::{
37 Buffer, BufferEvent, Language, LanguageRegistry,
38 proto::{deserialize_version, serialize_version},
39};
40use parking_lot::Mutex;
41use postage::stream::Stream as _;
42use rpc::{
43 AnyProtoClient, TypedEnvelope,
44 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
45};
46use serde::Deserialize;
47use std::{
48 cmp::Ordering,
49 collections::{BTreeSet, VecDeque},
50 future::Future,
51 mem,
52 ops::Range,
53 path::{Path, PathBuf},
54 sync::{
55 Arc,
56 atomic::{self, AtomicU64},
57 },
58 time::Instant,
59};
60use sum_tree::{Edit, SumTree, TreeSet};
61use text::{Bias, BufferId};
62use util::{ResultExt, debug_panic, post_inc};
63use worktree::{
64 File, PathKey, PathProgress, PathSummary, PathTarget, UpdatedGitRepositoriesSet,
65 UpdatedGitRepository, Worktree,
66};
67
68pub struct GitStore {
69 state: GitStoreState,
70 buffer_store: Entity<BufferStore>,
71 worktree_store: Entity<WorktreeStore>,
72 repositories: HashMap<RepositoryId, Entity<Repository>>,
73 active_repo_id: Option<RepositoryId>,
74 #[allow(clippy::type_complexity)]
75 loading_diffs:
76 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
77 diffs: HashMap<BufferId, Entity<BufferDiffState>>,
78 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
79 _subscriptions: Vec<Subscription>,
80}
81
82#[derive(Default)]
83struct SharedDiffs {
84 unstaged: Option<Entity<BufferDiff>>,
85 uncommitted: Option<Entity<BufferDiff>>,
86}
87
88struct BufferDiffState {
89 unstaged_diff: Option<WeakEntity<BufferDiff>>,
90 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
91 recalculate_diff_task: Option<Task<Result<()>>>,
92 language: Option<Arc<Language>>,
93 language_registry: Option<Arc<LanguageRegistry>>,
94 recalculating_tx: postage::watch::Sender<bool>,
95
96 /// These operation counts are used to ensure that head and index text
97 /// values read from the git repository are up-to-date with any hunk staging
98 /// operations that have been performed on the BufferDiff.
99 ///
100 /// The operation count is incremented immediately when the user initiates a
101 /// hunk stage/unstage operation. Then, upon finishing writing the new index
102 /// text do disk, the `operation count as of write` is updated to reflect
103 /// the operation count that prompted the write.
104 hunk_staging_operation_count: usize,
105 hunk_staging_operation_count_as_of_write: usize,
106
107 head_text: Option<Arc<String>>,
108 index_text: Option<Arc<String>>,
109 head_changed: bool,
110 index_changed: bool,
111 language_changed: bool,
112}
113
114#[derive(Clone, Debug)]
115enum DiffBasesChange {
116 SetIndex(Option<String>),
117 SetHead(Option<String>),
118 SetEach {
119 index: Option<String>,
120 head: Option<String>,
121 },
122 SetBoth(Option<String>),
123}
124
125#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
126enum DiffKind {
127 Unstaged,
128 Uncommitted,
129}
130
131enum GitStoreState {
132 Local {
133 next_repository_id: Arc<AtomicU64>,
134 downstream: Option<LocalDownstreamState>,
135 project_environment: Entity<ProjectEnvironment>,
136 fs: Arc<dyn Fs>,
137 },
138 Ssh {
139 upstream_client: AnyProtoClient,
140 upstream_project_id: ProjectId,
141 downstream: Option<(AnyProtoClient, ProjectId)>,
142 },
143 Remote {
144 upstream_client: AnyProtoClient,
145 upstream_project_id: ProjectId,
146 },
147}
148
149enum DownstreamUpdate {
150 UpdateRepository(RepositorySnapshot),
151 RemoveRepository(RepositoryId),
152}
153
154struct LocalDownstreamState {
155 client: AnyProtoClient,
156 project_id: ProjectId,
157 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
158 _task: Task<Result<()>>,
159}
160
161#[derive(Clone)]
162pub struct GitStoreCheckpoint {
163 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
164}
165
166#[derive(Clone, Debug, PartialEq, Eq)]
167pub struct StatusEntry {
168 pub repo_path: RepoPath,
169 pub status: FileStatus,
170}
171
172impl StatusEntry {
173 fn to_proto(&self) -> proto::StatusEntry {
174 let simple_status = match self.status {
175 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
176 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
177 FileStatus::Tracked(TrackedStatus {
178 index_status,
179 worktree_status,
180 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
181 worktree_status
182 } else {
183 index_status
184 }),
185 };
186
187 proto::StatusEntry {
188 repo_path: self.repo_path.as_ref().to_proto(),
189 simple_status,
190 status: Some(status_to_proto(self.status)),
191 }
192 }
193}
194
195impl TryFrom<proto::StatusEntry> for StatusEntry {
196 type Error = anyhow::Error;
197
198 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
199 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
200 let status = status_from_proto(value.simple_status, value.status)?;
201 Ok(Self { repo_path, status })
202 }
203}
204
205impl sum_tree::Item for StatusEntry {
206 type Summary = PathSummary<GitSummary>;
207
208 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
209 PathSummary {
210 max_path: self.repo_path.0.clone(),
211 item_summary: self.status.summary(),
212 }
213 }
214}
215
216impl sum_tree::KeyedItem for StatusEntry {
217 type Key = PathKey;
218
219 fn key(&self) -> Self::Key {
220 PathKey(self.repo_path.0.clone())
221 }
222}
223
224#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
225pub struct RepositoryId(pub u64);
226
227#[derive(Clone, Debug, PartialEq, Eq)]
228pub struct RepositorySnapshot {
229 pub id: RepositoryId,
230 pub merge_message: Option<SharedString>,
231 pub statuses_by_path: SumTree<StatusEntry>,
232 pub work_directory_abs_path: Arc<Path>,
233 pub branch: Option<Branch>,
234 pub head_commit: Option<CommitDetails>,
235 pub merge_conflicts: TreeSet<RepoPath>,
236 pub merge_head_shas: Vec<SharedString>,
237 pub scan_id: u64,
238}
239
240type JobId = u64;
241
242#[derive(Clone, Debug, PartialEq, Eq)]
243pub struct JobInfo {
244 pub start: Instant,
245 pub message: SharedString,
246}
247
248pub struct Repository {
249 this: WeakEntity<Self>,
250 snapshot: RepositorySnapshot,
251 commit_message_buffer: Option<Entity<Buffer>>,
252 git_store: WeakEntity<GitStore>,
253 // For a local repository, holds paths that have had worktree events since the last status scan completed,
254 // and that should be examined during the next status scan.
255 paths_needing_status_update: BTreeSet<RepoPath>,
256 job_sender: mpsc::UnboundedSender<GitJob>,
257 active_jobs: HashMap<JobId, JobInfo>,
258 job_id: JobId,
259 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
260 latest_askpass_id: u64,
261}
262
263impl std::ops::Deref for Repository {
264 type Target = RepositorySnapshot;
265
266 fn deref(&self) -> &Self::Target {
267 &self.snapshot
268 }
269}
270
271#[derive(Clone)]
272pub enum RepositoryState {
273 Local {
274 backend: Arc<dyn GitRepository>,
275 environment: Arc<HashMap<String, String>>,
276 },
277 Remote {
278 project_id: ProjectId,
279 client: AnyProtoClient,
280 },
281}
282
283#[derive(Clone, Debug)]
284pub enum RepositoryEvent {
285 Updated { full_scan: bool },
286 MergeHeadsChanged,
287}
288
289#[derive(Clone, Debug)]
290pub struct JobsUpdated;
291
292#[derive(Debug)]
293pub enum GitStoreEvent {
294 ActiveRepositoryChanged(Option<RepositoryId>),
295 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
296 RepositoryAdded(RepositoryId),
297 RepositoryRemoved(RepositoryId),
298 IndexWriteError(anyhow::Error),
299 JobsUpdated,
300}
301
302impl EventEmitter<RepositoryEvent> for Repository {}
303impl EventEmitter<JobsUpdated> for Repository {}
304impl EventEmitter<GitStoreEvent> for GitStore {}
305
306pub struct GitJob {
307 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
308 key: Option<GitJobKey>,
309}
310
311#[derive(PartialEq, Eq)]
312enum GitJobKey {
313 WriteIndex(RepoPath),
314 ReloadBufferDiffBases,
315 RefreshStatuses,
316 ReloadGitState,
317}
318
319impl GitStore {
320 pub fn local(
321 worktree_store: &Entity<WorktreeStore>,
322 buffer_store: Entity<BufferStore>,
323 environment: Entity<ProjectEnvironment>,
324 fs: Arc<dyn Fs>,
325 cx: &mut Context<Self>,
326 ) -> Self {
327 Self::new(
328 worktree_store.clone(),
329 buffer_store,
330 GitStoreState::Local {
331 next_repository_id: Arc::new(AtomicU64::new(1)),
332 downstream: None,
333 project_environment: environment,
334 fs,
335 },
336 cx,
337 )
338 }
339
340 pub fn remote(
341 worktree_store: &Entity<WorktreeStore>,
342 buffer_store: Entity<BufferStore>,
343 upstream_client: AnyProtoClient,
344 project_id: ProjectId,
345 cx: &mut Context<Self>,
346 ) -> Self {
347 Self::new(
348 worktree_store.clone(),
349 buffer_store,
350 GitStoreState::Remote {
351 upstream_client,
352 upstream_project_id: project_id,
353 },
354 cx,
355 )
356 }
357
358 pub fn ssh(
359 worktree_store: &Entity<WorktreeStore>,
360 buffer_store: Entity<BufferStore>,
361 upstream_client: AnyProtoClient,
362 cx: &mut Context<Self>,
363 ) -> Self {
364 Self::new(
365 worktree_store.clone(),
366 buffer_store,
367 GitStoreState::Ssh {
368 upstream_client,
369 upstream_project_id: ProjectId(SSH_PROJECT_ID),
370 downstream: None,
371 },
372 cx,
373 )
374 }
375
376 fn new(
377 worktree_store: Entity<WorktreeStore>,
378 buffer_store: Entity<BufferStore>,
379 state: GitStoreState,
380 cx: &mut Context<Self>,
381 ) -> Self {
382 let _subscriptions = vec![
383 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
384 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
385 ];
386
387 GitStore {
388 state,
389 buffer_store,
390 worktree_store,
391 repositories: HashMap::default(),
392 active_repo_id: None,
393 _subscriptions,
394 loading_diffs: HashMap::default(),
395 shared_diffs: HashMap::default(),
396 diffs: HashMap::default(),
397 }
398 }
399
400 pub fn init(client: &AnyProtoClient) {
401 client.add_entity_request_handler(Self::handle_get_remotes);
402 client.add_entity_request_handler(Self::handle_get_branches);
403 client.add_entity_request_handler(Self::handle_change_branch);
404 client.add_entity_request_handler(Self::handle_create_branch);
405 client.add_entity_request_handler(Self::handle_git_init);
406 client.add_entity_request_handler(Self::handle_push);
407 client.add_entity_request_handler(Self::handle_pull);
408 client.add_entity_request_handler(Self::handle_fetch);
409 client.add_entity_request_handler(Self::handle_stage);
410 client.add_entity_request_handler(Self::handle_unstage);
411 client.add_entity_request_handler(Self::handle_commit);
412 client.add_entity_request_handler(Self::handle_reset);
413 client.add_entity_request_handler(Self::handle_show);
414 client.add_entity_request_handler(Self::handle_load_commit_diff);
415 client.add_entity_request_handler(Self::handle_checkout_files);
416 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
417 client.add_entity_request_handler(Self::handle_set_index_text);
418 client.add_entity_request_handler(Self::handle_askpass);
419 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
420 client.add_entity_request_handler(Self::handle_git_diff);
421 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
422 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
423 client.add_entity_message_handler(Self::handle_update_diff_bases);
424 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
425 client.add_entity_request_handler(Self::handle_blame_buffer);
426 client.add_entity_message_handler(Self::handle_update_repository);
427 client.add_entity_message_handler(Self::handle_remove_repository);
428 }
429
430 pub fn is_local(&self) -> bool {
431 matches!(self.state, GitStoreState::Local { .. })
432 }
433
434 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
435 match &mut self.state {
436 GitStoreState::Ssh {
437 downstream: downstream_client,
438 ..
439 } => {
440 for repo in self.repositories.values() {
441 let update = repo.read(cx).snapshot.initial_update(project_id);
442 for update in split_repository_update(update) {
443 client.send(update).log_err();
444 }
445 }
446 *downstream_client = Some((client, ProjectId(project_id)));
447 }
448 GitStoreState::Local {
449 downstream: downstream_client,
450 ..
451 } => {
452 let mut snapshots = HashMap::default();
453 let (updates_tx, mut updates_rx) = mpsc::unbounded();
454 for repo in self.repositories.values() {
455 updates_tx
456 .unbounded_send(DownstreamUpdate::UpdateRepository(
457 repo.read(cx).snapshot.clone(),
458 ))
459 .ok();
460 }
461 *downstream_client = Some(LocalDownstreamState {
462 client: client.clone(),
463 project_id: ProjectId(project_id),
464 updates_tx,
465 _task: cx.spawn(async move |this, cx| {
466 cx.background_spawn(async move {
467 while let Some(update) = updates_rx.next().await {
468 match update {
469 DownstreamUpdate::UpdateRepository(snapshot) => {
470 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
471 {
472 let update =
473 snapshot.build_update(old_snapshot, project_id);
474 *old_snapshot = snapshot;
475 for update in split_repository_update(update) {
476 client.send(update)?;
477 }
478 } else {
479 let update = snapshot.initial_update(project_id);
480 for update in split_repository_update(update) {
481 client.send(update)?;
482 }
483 snapshots.insert(snapshot.id, snapshot);
484 }
485 }
486 DownstreamUpdate::RemoveRepository(id) => {
487 client.send(proto::RemoveRepository {
488 project_id,
489 id: id.to_proto(),
490 })?;
491 }
492 }
493 }
494 anyhow::Ok(())
495 })
496 .await
497 .ok();
498 this.update(cx, |this, _| {
499 if let GitStoreState::Local {
500 downstream: downstream_client,
501 ..
502 } = &mut this.state
503 {
504 downstream_client.take();
505 } else {
506 unreachable!("unshared called on remote store");
507 }
508 })
509 }),
510 });
511 }
512 GitStoreState::Remote { .. } => {
513 debug_panic!("shared called on remote store");
514 }
515 }
516 }
517
518 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
519 match &mut self.state {
520 GitStoreState::Local {
521 downstream: downstream_client,
522 ..
523 } => {
524 downstream_client.take();
525 }
526 GitStoreState::Ssh {
527 downstream: downstream_client,
528 ..
529 } => {
530 downstream_client.take();
531 }
532 GitStoreState::Remote { .. } => {
533 debug_panic!("unshared called on remote store");
534 }
535 }
536 self.shared_diffs.clear();
537 }
538
539 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
540 self.shared_diffs.remove(peer_id);
541 }
542
543 pub fn active_repository(&self) -> Option<Entity<Repository>> {
544 self.active_repo_id
545 .as_ref()
546 .map(|id| self.repositories[&id].clone())
547 }
548
549 pub fn open_unstaged_diff(
550 &mut self,
551 buffer: Entity<Buffer>,
552 cx: &mut Context<Self>,
553 ) -> Task<Result<Entity<BufferDiff>>> {
554 let buffer_id = buffer.read(cx).remote_id();
555 if let Some(diff_state) = self.diffs.get(&buffer_id) {
556 if let Some(unstaged_diff) = diff_state
557 .read(cx)
558 .unstaged_diff
559 .as_ref()
560 .and_then(|weak| weak.upgrade())
561 {
562 if let Some(task) =
563 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
564 {
565 return cx.background_executor().spawn(async move {
566 task.await;
567 Ok(unstaged_diff)
568 });
569 }
570 return Task::ready(Ok(unstaged_diff));
571 }
572 }
573
574 let Some((repo, repo_path)) =
575 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
576 else {
577 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
578 };
579
580 let task = self
581 .loading_diffs
582 .entry((buffer_id, DiffKind::Unstaged))
583 .or_insert_with(|| {
584 let staged_text = repo.update(cx, |repo, cx| {
585 repo.load_staged_text(buffer_id, repo_path, cx)
586 });
587 cx.spawn(async move |this, cx| {
588 Self::open_diff_internal(
589 this,
590 DiffKind::Unstaged,
591 staged_text.await.map(DiffBasesChange::SetIndex),
592 buffer,
593 cx,
594 )
595 .await
596 .map_err(Arc::new)
597 })
598 .shared()
599 })
600 .clone();
601
602 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
603 }
604
605 pub fn open_uncommitted_diff(
606 &mut self,
607 buffer: Entity<Buffer>,
608 cx: &mut Context<Self>,
609 ) -> Task<Result<Entity<BufferDiff>>> {
610 let buffer_id = buffer.read(cx).remote_id();
611
612 if let Some(diff_state) = self.diffs.get(&buffer_id) {
613 if let Some(uncommitted_diff) = diff_state
614 .read(cx)
615 .uncommitted_diff
616 .as_ref()
617 .and_then(|weak| weak.upgrade())
618 {
619 if let Some(task) =
620 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
621 {
622 return cx.background_executor().spawn(async move {
623 task.await;
624 Ok(uncommitted_diff)
625 });
626 }
627 return Task::ready(Ok(uncommitted_diff));
628 }
629 }
630
631 let Some((repo, repo_path)) =
632 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
633 else {
634 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
635 };
636
637 let task = self
638 .loading_diffs
639 .entry((buffer_id, DiffKind::Uncommitted))
640 .or_insert_with(|| {
641 let changes = repo.update(cx, |repo, cx| {
642 repo.load_committed_text(buffer_id, repo_path, cx)
643 });
644
645 cx.spawn(async move |this, cx| {
646 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
647 .await
648 .map_err(Arc::new)
649 })
650 .shared()
651 })
652 .clone();
653
654 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
655 }
656
657 async fn open_diff_internal(
658 this: WeakEntity<Self>,
659 kind: DiffKind,
660 texts: Result<DiffBasesChange>,
661 buffer_entity: Entity<Buffer>,
662 cx: &mut AsyncApp,
663 ) -> Result<Entity<BufferDiff>> {
664 let diff_bases_change = match texts {
665 Err(e) => {
666 this.update(cx, |this, cx| {
667 let buffer = buffer_entity.read(cx);
668 let buffer_id = buffer.remote_id();
669 this.loading_diffs.remove(&(buffer_id, kind));
670 })?;
671 return Err(e);
672 }
673 Ok(change) => change,
674 };
675
676 this.update(cx, |this, cx| {
677 let buffer = buffer_entity.read(cx);
678 let buffer_id = buffer.remote_id();
679 let language = buffer.language().cloned();
680 let language_registry = buffer.language_registry();
681 let text_snapshot = buffer.text_snapshot();
682 this.loading_diffs.remove(&(buffer_id, kind));
683
684 let diff_state = this
685 .diffs
686 .entry(buffer_id)
687 .or_insert_with(|| cx.new(|_| BufferDiffState::default()));
688
689 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
690
691 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
692 diff_state.update(cx, |diff_state, cx| {
693 diff_state.language = language;
694 diff_state.language_registry = language_registry;
695
696 match kind {
697 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
698 DiffKind::Uncommitted => {
699 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
700 diff
701 } else {
702 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
703 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
704 unstaged_diff
705 };
706
707 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
708 diff_state.uncommitted_diff = Some(diff.downgrade())
709 }
710 }
711
712 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
713 let rx = diff_state.wait_for_recalculation();
714
715 anyhow::Ok(async move {
716 if let Some(rx) = rx {
717 rx.await;
718 }
719 Ok(diff)
720 })
721 })
722 })??
723 .await
724 }
725
726 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
727 let diff_state = self.diffs.get(&buffer_id)?;
728 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
729 }
730
731 pub fn get_uncommitted_diff(
732 &self,
733 buffer_id: BufferId,
734 cx: &App,
735 ) -> Option<Entity<BufferDiff>> {
736 let diff_state = self.diffs.get(&buffer_id)?;
737 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
738 }
739
740 pub fn project_path_git_status(
741 &self,
742 project_path: &ProjectPath,
743 cx: &App,
744 ) -> Option<FileStatus> {
745 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
746 Some(repo.read(cx).status_for_path(&repo_path)?.status)
747 }
748
749 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
750 let mut work_directory_abs_paths = Vec::new();
751 let mut checkpoints = Vec::new();
752 for repository in self.repositories.values() {
753 repository.update(cx, |repository, _| {
754 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
755 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
756 });
757 }
758
759 cx.background_executor().spawn(async move {
760 let checkpoints = future::try_join_all(checkpoints).await?;
761 Ok(GitStoreCheckpoint {
762 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
763 .into_iter()
764 .zip(checkpoints)
765 .collect(),
766 })
767 })
768 }
769
770 pub fn restore_checkpoint(
771 &self,
772 checkpoint: GitStoreCheckpoint,
773 cx: &mut App,
774 ) -> Task<Result<()>> {
775 let repositories_by_work_dir_abs_path = self
776 .repositories
777 .values()
778 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
779 .collect::<HashMap<_, _>>();
780
781 let mut tasks = Vec::new();
782 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
783 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
784 let restore = repository.update(cx, |repository, _| {
785 repository.restore_checkpoint(checkpoint)
786 });
787 tasks.push(async move { restore.await? });
788 }
789 }
790 cx.background_spawn(async move {
791 future::try_join_all(tasks).await?;
792 Ok(())
793 })
794 }
795
796 /// Compares two checkpoints, returning true if they are equal.
797 pub fn compare_checkpoints(
798 &self,
799 left: GitStoreCheckpoint,
800 mut right: GitStoreCheckpoint,
801 cx: &mut App,
802 ) -> Task<Result<bool>> {
803 let repositories_by_work_dir_abs_path = self
804 .repositories
805 .values()
806 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
807 .collect::<HashMap<_, _>>();
808
809 let mut tasks = Vec::new();
810 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
811 if let Some(right_checkpoint) = right
812 .checkpoints_by_work_dir_abs_path
813 .remove(&work_dir_abs_path)
814 {
815 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
816 {
817 let compare = repository.update(cx, |repository, _| {
818 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
819 });
820
821 tasks.push(async move { compare.await? });
822 }
823 } else {
824 return Task::ready(Ok(false));
825 }
826 }
827 cx.background_spawn(async move {
828 Ok(future::try_join_all(tasks)
829 .await?
830 .into_iter()
831 .all(|result| result))
832 })
833 }
834
835 pub fn delete_checkpoint(
836 &self,
837 checkpoint: GitStoreCheckpoint,
838 cx: &mut App,
839 ) -> Task<Result<()>> {
840 let repositories_by_work_directory_abs_path = self
841 .repositories
842 .values()
843 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
844 .collect::<HashMap<_, _>>();
845
846 let mut tasks = Vec::new();
847 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
848 if let Some(repository) =
849 repositories_by_work_directory_abs_path.get(&work_dir_abs_path)
850 {
851 let delete = repository.update(cx, |this, _| this.delete_checkpoint(checkpoint));
852 tasks.push(async move { delete.await? });
853 }
854 }
855 cx.background_spawn(async move {
856 future::try_join_all(tasks).await?;
857 Ok(())
858 })
859 }
860
861 /// Blames a buffer.
862 pub fn blame_buffer(
863 &self,
864 buffer: &Entity<Buffer>,
865 version: Option<clock::Global>,
866 cx: &mut App,
867 ) -> Task<Result<Option<Blame>>> {
868 let buffer = buffer.read(cx);
869 let Some((repo, repo_path)) =
870 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
871 else {
872 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
873 };
874 let content = match &version {
875 Some(version) => buffer.rope_for_version(version).clone(),
876 None => buffer.as_rope().clone(),
877 };
878 let version = version.unwrap_or(buffer.version());
879 let buffer_id = buffer.remote_id();
880
881 let rx = repo.update(cx, |repo, _| {
882 repo.send_job(None, move |state, _| async move {
883 match state {
884 RepositoryState::Local { backend, .. } => backend
885 .blame(repo_path.clone(), content)
886 .await
887 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
888 .map(Some),
889 RepositoryState::Remote { project_id, client } => {
890 let response = client
891 .request(proto::BlameBuffer {
892 project_id: project_id.to_proto(),
893 buffer_id: buffer_id.into(),
894 version: serialize_version(&version),
895 })
896 .await?;
897 Ok(deserialize_blame_buffer_response(response))
898 }
899 }
900 })
901 });
902
903 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
904 }
905
906 pub fn get_permalink_to_line(
907 &self,
908 buffer: &Entity<Buffer>,
909 selection: Range<u32>,
910 cx: &mut App,
911 ) -> Task<Result<url::Url>> {
912 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
913 return Task::ready(Err(anyhow!("buffer has no file")));
914 };
915
916 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
917 &(file.worktree.read(cx).id(), file.path.clone()).into(),
918 cx,
919 ) else {
920 // If we're not in a Git repo, check whether this is a Rust source
921 // file in the Cargo registry (presumably opened with go-to-definition
922 // from a normal Rust file). If so, we can put together a permalink
923 // using crate metadata.
924 if buffer
925 .read(cx)
926 .language()
927 .is_none_or(|lang| lang.name() != "Rust".into())
928 {
929 return Task::ready(Err(anyhow!("no permalink available")));
930 }
931 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
932 return Task::ready(Err(anyhow!("no permalink available")));
933 };
934 return cx.spawn(async move |cx| {
935 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
936 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
937 .map_err(|_| anyhow!("no permalink available"))
938 });
939
940 // TODO remote case
941 };
942
943 let buffer_id = buffer.read(cx).remote_id();
944 let branch = repo.read(cx).branch.clone();
945 let remote = branch
946 .as_ref()
947 .and_then(|b| b.upstream.as_ref())
948 .and_then(|b| b.remote_name())
949 .unwrap_or("origin")
950 .to_string();
951
952 let rx = repo.update(cx, |repo, _| {
953 repo.send_job(None, move |state, cx| async move {
954 match state {
955 RepositoryState::Local { backend, .. } => {
956 let origin_url = backend
957 .remote_url(&remote)
958 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
959
960 let sha = backend
961 .head_sha()
962 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
963
964 let provider_registry =
965 cx.update(GitHostingProviderRegistry::default_global)?;
966
967 let (provider, remote) =
968 parse_git_remote_url(provider_registry, &origin_url)
969 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
970
971 let path = repo_path
972 .to_str()
973 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
974
975 Ok(provider.build_permalink(
976 remote,
977 BuildPermalinkParams {
978 sha: &sha,
979 path,
980 selection: Some(selection),
981 },
982 ))
983 }
984 RepositoryState::Remote { project_id, client } => {
985 let response = client
986 .request(proto::GetPermalinkToLine {
987 project_id: project_id.to_proto(),
988 buffer_id: buffer_id.into(),
989 selection: Some(proto::Range {
990 start: selection.start as u64,
991 end: selection.end as u64,
992 }),
993 })
994 .await?;
995
996 url::Url::parse(&response.permalink).context("failed to parse permalink")
997 }
998 }
999 })
1000 });
1001 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1002 }
1003
1004 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1005 match &self.state {
1006 GitStoreState::Local {
1007 downstream: downstream_client,
1008 ..
1009 } => downstream_client
1010 .as_ref()
1011 .map(|state| (state.client.clone(), state.project_id)),
1012 GitStoreState::Ssh {
1013 downstream: downstream_client,
1014 ..
1015 } => downstream_client.clone(),
1016 GitStoreState::Remote { .. } => None,
1017 }
1018 }
1019
1020 fn upstream_client(&self) -> Option<AnyProtoClient> {
1021 match &self.state {
1022 GitStoreState::Local { .. } => None,
1023 GitStoreState::Ssh {
1024 upstream_client, ..
1025 }
1026 | GitStoreState::Remote {
1027 upstream_client, ..
1028 } => Some(upstream_client.clone()),
1029 }
1030 }
1031
1032 fn on_worktree_store_event(
1033 &mut self,
1034 worktree_store: Entity<WorktreeStore>,
1035 event: &WorktreeStoreEvent,
1036 cx: &mut Context<Self>,
1037 ) {
1038 let GitStoreState::Local {
1039 project_environment,
1040 downstream,
1041 next_repository_id,
1042 fs,
1043 } = &self.state
1044 else {
1045 return;
1046 };
1047
1048 match event {
1049 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1050 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
1051 for (relative_path, _, _) in updated_entries.iter() {
1052 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1053 &(*worktree_id, relative_path.clone()).into(),
1054 cx,
1055 ) else {
1056 continue;
1057 };
1058 paths_by_git_repo.entry(repo).or_default().push(repo_path)
1059 }
1060
1061 for (repo, paths) in paths_by_git_repo {
1062 repo.update(cx, |repo, cx| {
1063 repo.paths_changed(
1064 paths,
1065 downstream
1066 .as_ref()
1067 .map(|downstream| downstream.updates_tx.clone()),
1068 cx,
1069 );
1070 });
1071 }
1072 }
1073 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1074 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1075 else {
1076 return;
1077 };
1078 if !worktree.read(cx).is_visible() {
1079 log::debug!(
1080 "not adding repositories for local worktree {:?} because it's not visible",
1081 worktree.read(cx).abs_path()
1082 );
1083 return;
1084 }
1085 self.update_repositories_from_worktree(
1086 project_environment.clone(),
1087 next_repository_id.clone(),
1088 downstream
1089 .as_ref()
1090 .map(|downstream| downstream.updates_tx.clone()),
1091 changed_repos.clone(),
1092 fs.clone(),
1093 cx,
1094 );
1095 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1096 }
1097 _ => {}
1098 }
1099 }
1100
1101 fn on_repository_event(
1102 &mut self,
1103 repo: Entity<Repository>,
1104 event: &RepositoryEvent,
1105 cx: &mut Context<Self>,
1106 ) {
1107 let id = repo.read(cx).id;
1108 cx.emit(GitStoreEvent::RepositoryUpdated(
1109 id,
1110 event.clone(),
1111 self.active_repo_id == Some(id),
1112 ))
1113 }
1114
1115 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1116 cx.emit(GitStoreEvent::JobsUpdated)
1117 }
1118
1119 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1120 fn update_repositories_from_worktree(
1121 &mut self,
1122 project_environment: Entity<ProjectEnvironment>,
1123 next_repository_id: Arc<AtomicU64>,
1124 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1125 updated_git_repositories: UpdatedGitRepositoriesSet,
1126 fs: Arc<dyn Fs>,
1127 cx: &mut Context<Self>,
1128 ) {
1129 let mut removed_ids = Vec::new();
1130 for update in updated_git_repositories.iter() {
1131 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1132 let existing_work_directory_abs_path =
1133 repo.read(cx).work_directory_abs_path.clone();
1134 Some(&existing_work_directory_abs_path)
1135 == update.old_work_directory_abs_path.as_ref()
1136 || Some(&existing_work_directory_abs_path)
1137 == update.new_work_directory_abs_path.as_ref()
1138 }) {
1139 if let Some(new_work_directory_abs_path) =
1140 update.new_work_directory_abs_path.clone()
1141 {
1142 existing.update(cx, |existing, cx| {
1143 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1144 existing.schedule_scan(updates_tx.clone(), cx);
1145 });
1146 } else {
1147 removed_ids.push(*id);
1148 }
1149 } else if let UpdatedGitRepository {
1150 new_work_directory_abs_path: Some(work_directory_abs_path),
1151 dot_git_abs_path: Some(dot_git_abs_path),
1152 repository_dir_abs_path: Some(repository_dir_abs_path),
1153 common_dir_abs_path: Some(common_dir_abs_path),
1154 ..
1155 } = update
1156 {
1157 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1158 let git_store = cx.weak_entity();
1159 let repo = cx.new(|cx| {
1160 let mut repo = Repository::local(
1161 id,
1162 work_directory_abs_path.clone(),
1163 dot_git_abs_path.clone(),
1164 repository_dir_abs_path.clone(),
1165 common_dir_abs_path.clone(),
1166 project_environment.downgrade(),
1167 fs.clone(),
1168 git_store,
1169 cx,
1170 );
1171 repo.schedule_scan(updates_tx.clone(), cx);
1172 repo
1173 });
1174 self._subscriptions
1175 .push(cx.subscribe(&repo, Self::on_repository_event));
1176 self._subscriptions
1177 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1178 self.repositories.insert(id, repo);
1179 cx.emit(GitStoreEvent::RepositoryAdded(id));
1180 self.active_repo_id.get_or_insert_with(|| {
1181 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1182 id
1183 });
1184 }
1185 }
1186
1187 for id in removed_ids {
1188 if self.active_repo_id == Some(id) {
1189 self.active_repo_id = None;
1190 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1191 }
1192 self.repositories.remove(&id);
1193 if let Some(updates_tx) = updates_tx.as_ref() {
1194 updates_tx
1195 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1196 .ok();
1197 }
1198 }
1199 }
1200
1201 fn on_buffer_store_event(
1202 &mut self,
1203 _: Entity<BufferStore>,
1204 event: &BufferStoreEvent,
1205 cx: &mut Context<Self>,
1206 ) {
1207 match event {
1208 BufferStoreEvent::BufferAdded(buffer) => {
1209 cx.subscribe(&buffer, |this, buffer, event, cx| {
1210 if let BufferEvent::LanguageChanged = event {
1211 let buffer_id = buffer.read(cx).remote_id();
1212 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1213 diff_state.update(cx, |diff_state, cx| {
1214 diff_state.buffer_language_changed(buffer, cx);
1215 });
1216 }
1217 }
1218 })
1219 .detach();
1220 }
1221 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1222 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1223 diffs.remove(buffer_id);
1224 }
1225 }
1226 BufferStoreEvent::BufferDropped(buffer_id) => {
1227 self.diffs.remove(&buffer_id);
1228 for diffs in self.shared_diffs.values_mut() {
1229 diffs.remove(buffer_id);
1230 }
1231 }
1232
1233 _ => {}
1234 }
1235 }
1236
1237 pub fn recalculate_buffer_diffs(
1238 &mut self,
1239 buffers: Vec<Entity<Buffer>>,
1240 cx: &mut Context<Self>,
1241 ) -> impl Future<Output = ()> + use<> {
1242 let mut futures = Vec::new();
1243 for buffer in buffers {
1244 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1245 let buffer = buffer.read(cx).text_snapshot();
1246 diff_state.update(cx, |diff_state, cx| {
1247 diff_state.recalculate_diffs(buffer, cx);
1248 futures.extend(diff_state.wait_for_recalculation());
1249 });
1250 }
1251 }
1252 async move {
1253 futures::future::join_all(futures).await;
1254 }
1255 }
1256
1257 fn on_buffer_diff_event(
1258 &mut self,
1259 diff: Entity<buffer_diff::BufferDiff>,
1260 event: &BufferDiffEvent,
1261 cx: &mut Context<Self>,
1262 ) {
1263 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1264 let buffer_id = diff.read(cx).buffer_id;
1265 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1266 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1267 diff_state.hunk_staging_operation_count += 1;
1268 diff_state.hunk_staging_operation_count
1269 });
1270 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1271 let recv = repo.update(cx, |repo, cx| {
1272 log::debug!("hunks changed for {}", path.display());
1273 repo.spawn_set_index_text_job(
1274 path,
1275 new_index_text.as_ref().map(|rope| rope.to_string()),
1276 Some(hunk_staging_operation_count),
1277 cx,
1278 )
1279 });
1280 let diff = diff.downgrade();
1281 cx.spawn(async move |this, cx| {
1282 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1283 diff.update(cx, |diff, cx| {
1284 diff.clear_pending_hunks(cx);
1285 })
1286 .ok();
1287 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1288 .ok();
1289 }
1290 })
1291 .detach();
1292 }
1293 }
1294 }
1295 }
1296
1297 fn local_worktree_git_repos_changed(
1298 &mut self,
1299 worktree: Entity<Worktree>,
1300 changed_repos: &UpdatedGitRepositoriesSet,
1301 cx: &mut Context<Self>,
1302 ) {
1303 log::debug!("local worktree repos changed");
1304 debug_assert!(worktree.read(cx).is_local());
1305
1306 for repository in self.repositories.values() {
1307 repository.update(cx, |repository, cx| {
1308 let repo_abs_path = &repository.work_directory_abs_path;
1309 if changed_repos.iter().any(|update| {
1310 update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1311 || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1312 }) {
1313 repository.reload_buffer_diff_bases(cx);
1314 }
1315 });
1316 }
1317 }
1318
1319 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1320 &self.repositories
1321 }
1322
1323 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1324 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1325 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1326 Some(status.status)
1327 }
1328
1329 pub fn repository_and_path_for_buffer_id(
1330 &self,
1331 buffer_id: BufferId,
1332 cx: &App,
1333 ) -> Option<(Entity<Repository>, RepoPath)> {
1334 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1335 let project_path = buffer.read(cx).project_path(cx)?;
1336 self.repository_and_path_for_project_path(&project_path, cx)
1337 }
1338
1339 pub fn repository_and_path_for_project_path(
1340 &self,
1341 path: &ProjectPath,
1342 cx: &App,
1343 ) -> Option<(Entity<Repository>, RepoPath)> {
1344 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1345 self.repositories
1346 .values()
1347 .filter_map(|repo| {
1348 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1349 Some((repo.clone(), repo_path))
1350 })
1351 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1352 }
1353
1354 pub fn git_init(
1355 &self,
1356 path: Arc<Path>,
1357 fallback_branch_name: String,
1358 cx: &App,
1359 ) -> Task<Result<()>> {
1360 match &self.state {
1361 GitStoreState::Local { fs, .. } => {
1362 let fs = fs.clone();
1363 cx.background_executor()
1364 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1365 }
1366 GitStoreState::Ssh {
1367 upstream_client,
1368 upstream_project_id: project_id,
1369 ..
1370 }
1371 | GitStoreState::Remote {
1372 upstream_client,
1373 upstream_project_id: project_id,
1374 ..
1375 } => {
1376 let client = upstream_client.clone();
1377 let project_id = *project_id;
1378 cx.background_executor().spawn(async move {
1379 client
1380 .request(proto::GitInit {
1381 project_id: project_id.0,
1382 abs_path: path.to_string_lossy().to_string(),
1383 fallback_branch_name,
1384 })
1385 .await?;
1386 Ok(())
1387 })
1388 }
1389 }
1390 }
1391
1392 async fn handle_update_repository(
1393 this: Entity<Self>,
1394 envelope: TypedEnvelope<proto::UpdateRepository>,
1395 mut cx: AsyncApp,
1396 ) -> Result<()> {
1397 this.update(&mut cx, |this, cx| {
1398 let mut update = envelope.payload;
1399
1400 let id = RepositoryId::from_proto(update.id);
1401 let client = this
1402 .upstream_client()
1403 .context("no upstream client")?
1404 .clone();
1405
1406 let mut is_new = false;
1407 let repo = this.repositories.entry(id).or_insert_with(|| {
1408 is_new = true;
1409 let git_store = cx.weak_entity();
1410 cx.new(|cx| {
1411 Repository::remote(
1412 id,
1413 Path::new(&update.abs_path).into(),
1414 ProjectId(update.project_id),
1415 client,
1416 git_store,
1417 cx,
1418 )
1419 })
1420 });
1421 if is_new {
1422 this._subscriptions
1423 .push(cx.subscribe(&repo, Self::on_repository_event))
1424 }
1425
1426 repo.update(cx, {
1427 let update = update.clone();
1428 |repo, cx| repo.apply_remote_update(update, cx)
1429 })?;
1430
1431 this.active_repo_id.get_or_insert_with(|| {
1432 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1433 id
1434 });
1435
1436 if let Some((client, project_id)) = this.downstream_client() {
1437 update.project_id = project_id.to_proto();
1438 client.send(update).log_err();
1439 }
1440 Ok(())
1441 })?
1442 }
1443
1444 async fn handle_remove_repository(
1445 this: Entity<Self>,
1446 envelope: TypedEnvelope<proto::RemoveRepository>,
1447 mut cx: AsyncApp,
1448 ) -> Result<()> {
1449 this.update(&mut cx, |this, cx| {
1450 let mut update = envelope.payload;
1451 let id = RepositoryId::from_proto(update.id);
1452 this.repositories.remove(&id);
1453 if let Some((client, project_id)) = this.downstream_client() {
1454 update.project_id = project_id.to_proto();
1455 client.send(update).log_err();
1456 }
1457 if this.active_repo_id == Some(id) {
1458 this.active_repo_id = None;
1459 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1460 }
1461 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1462 })
1463 }
1464
1465 async fn handle_git_init(
1466 this: Entity<Self>,
1467 envelope: TypedEnvelope<proto::GitInit>,
1468 cx: AsyncApp,
1469 ) -> Result<proto::Ack> {
1470 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1471 let name = envelope.payload.fallback_branch_name;
1472 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1473 .await?;
1474
1475 Ok(proto::Ack {})
1476 }
1477
1478 async fn handle_fetch(
1479 this: Entity<Self>,
1480 envelope: TypedEnvelope<proto::Fetch>,
1481 mut cx: AsyncApp,
1482 ) -> Result<proto::RemoteMessageResponse> {
1483 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1484 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1485 let askpass_id = envelope.payload.askpass_id;
1486
1487 let askpass = make_remote_delegate(
1488 this,
1489 envelope.payload.project_id,
1490 repository_id,
1491 askpass_id,
1492 &mut cx,
1493 );
1494
1495 let remote_output = repository_handle
1496 .update(&mut cx, |repository_handle, cx| {
1497 repository_handle.fetch(askpass, cx)
1498 })?
1499 .await??;
1500
1501 Ok(proto::RemoteMessageResponse {
1502 stdout: remote_output.stdout,
1503 stderr: remote_output.stderr,
1504 })
1505 }
1506
1507 async fn handle_push(
1508 this: Entity<Self>,
1509 envelope: TypedEnvelope<proto::Push>,
1510 mut cx: AsyncApp,
1511 ) -> Result<proto::RemoteMessageResponse> {
1512 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1513 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1514
1515 let askpass_id = envelope.payload.askpass_id;
1516 let askpass = make_remote_delegate(
1517 this,
1518 envelope.payload.project_id,
1519 repository_id,
1520 askpass_id,
1521 &mut cx,
1522 );
1523
1524 let options = envelope
1525 .payload
1526 .options
1527 .as_ref()
1528 .map(|_| match envelope.payload.options() {
1529 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1530 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1531 });
1532
1533 let branch_name = envelope.payload.branch_name.into();
1534 let remote_name = envelope.payload.remote_name.into();
1535
1536 let remote_output = repository_handle
1537 .update(&mut cx, |repository_handle, cx| {
1538 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1539 })?
1540 .await??;
1541 Ok(proto::RemoteMessageResponse {
1542 stdout: remote_output.stdout,
1543 stderr: remote_output.stderr,
1544 })
1545 }
1546
1547 async fn handle_pull(
1548 this: Entity<Self>,
1549 envelope: TypedEnvelope<proto::Pull>,
1550 mut cx: AsyncApp,
1551 ) -> Result<proto::RemoteMessageResponse> {
1552 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1553 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1554 let askpass_id = envelope.payload.askpass_id;
1555 let askpass = make_remote_delegate(
1556 this,
1557 envelope.payload.project_id,
1558 repository_id,
1559 askpass_id,
1560 &mut cx,
1561 );
1562
1563 let branch_name = envelope.payload.branch_name.into();
1564 let remote_name = envelope.payload.remote_name.into();
1565
1566 let remote_message = repository_handle
1567 .update(&mut cx, |repository_handle, cx| {
1568 repository_handle.pull(branch_name, remote_name, askpass, cx)
1569 })?
1570 .await??;
1571
1572 Ok(proto::RemoteMessageResponse {
1573 stdout: remote_message.stdout,
1574 stderr: remote_message.stderr,
1575 })
1576 }
1577
1578 async fn handle_stage(
1579 this: Entity<Self>,
1580 envelope: TypedEnvelope<proto::Stage>,
1581 mut cx: AsyncApp,
1582 ) -> Result<proto::Ack> {
1583 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1584 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1585
1586 let entries = envelope
1587 .payload
1588 .paths
1589 .into_iter()
1590 .map(PathBuf::from)
1591 .map(RepoPath::new)
1592 .collect();
1593
1594 repository_handle
1595 .update(&mut cx, |repository_handle, cx| {
1596 repository_handle.stage_entries(entries, cx)
1597 })?
1598 .await?;
1599 Ok(proto::Ack {})
1600 }
1601
1602 async fn handle_unstage(
1603 this: Entity<Self>,
1604 envelope: TypedEnvelope<proto::Unstage>,
1605 mut cx: AsyncApp,
1606 ) -> Result<proto::Ack> {
1607 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1608 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1609
1610 let entries = envelope
1611 .payload
1612 .paths
1613 .into_iter()
1614 .map(PathBuf::from)
1615 .map(RepoPath::new)
1616 .collect();
1617
1618 repository_handle
1619 .update(&mut cx, |repository_handle, cx| {
1620 repository_handle.unstage_entries(entries, cx)
1621 })?
1622 .await?;
1623
1624 Ok(proto::Ack {})
1625 }
1626
1627 async fn handle_set_index_text(
1628 this: Entity<Self>,
1629 envelope: TypedEnvelope<proto::SetIndexText>,
1630 mut cx: AsyncApp,
1631 ) -> Result<proto::Ack> {
1632 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1633 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1634 let repo_path = RepoPath::from_str(&envelope.payload.path);
1635
1636 repository_handle
1637 .update(&mut cx, |repository_handle, cx| {
1638 repository_handle.spawn_set_index_text_job(
1639 repo_path,
1640 envelope.payload.text,
1641 None,
1642 cx,
1643 )
1644 })?
1645 .await??;
1646 Ok(proto::Ack {})
1647 }
1648
1649 async fn handle_commit(
1650 this: Entity<Self>,
1651 envelope: TypedEnvelope<proto::Commit>,
1652 mut cx: AsyncApp,
1653 ) -> Result<proto::Ack> {
1654 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1655 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1656
1657 let message = SharedString::from(envelope.payload.message);
1658 let name = envelope.payload.name.map(SharedString::from);
1659 let email = envelope.payload.email.map(SharedString::from);
1660 let options = envelope.payload.options.unwrap_or_default();
1661
1662 repository_handle
1663 .update(&mut cx, |repository_handle, cx| {
1664 repository_handle.commit(
1665 message,
1666 name.zip(email),
1667 CommitOptions {
1668 amend: options.amend,
1669 },
1670 cx,
1671 )
1672 })?
1673 .await??;
1674 Ok(proto::Ack {})
1675 }
1676
1677 async fn handle_get_remotes(
1678 this: Entity<Self>,
1679 envelope: TypedEnvelope<proto::GetRemotes>,
1680 mut cx: AsyncApp,
1681 ) -> Result<proto::GetRemotesResponse> {
1682 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1683 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1684
1685 let branch_name = envelope.payload.branch_name;
1686
1687 let remotes = repository_handle
1688 .update(&mut cx, |repository_handle, _| {
1689 repository_handle.get_remotes(branch_name)
1690 })?
1691 .await??;
1692
1693 Ok(proto::GetRemotesResponse {
1694 remotes: remotes
1695 .into_iter()
1696 .map(|remotes| proto::get_remotes_response::Remote {
1697 name: remotes.name.to_string(),
1698 })
1699 .collect::<Vec<_>>(),
1700 })
1701 }
1702
1703 async fn handle_get_branches(
1704 this: Entity<Self>,
1705 envelope: TypedEnvelope<proto::GitGetBranches>,
1706 mut cx: AsyncApp,
1707 ) -> Result<proto::GitBranchesResponse> {
1708 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1709 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1710
1711 let branches = repository_handle
1712 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1713 .await??;
1714
1715 Ok(proto::GitBranchesResponse {
1716 branches: branches
1717 .into_iter()
1718 .map(|branch| branch_to_proto(&branch))
1719 .collect::<Vec<_>>(),
1720 })
1721 }
1722 async fn handle_create_branch(
1723 this: Entity<Self>,
1724 envelope: TypedEnvelope<proto::GitCreateBranch>,
1725 mut cx: AsyncApp,
1726 ) -> Result<proto::Ack> {
1727 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1728 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1729 let branch_name = envelope.payload.branch_name;
1730
1731 repository_handle
1732 .update(&mut cx, |repository_handle, _| {
1733 repository_handle.create_branch(branch_name)
1734 })?
1735 .await??;
1736
1737 Ok(proto::Ack {})
1738 }
1739
1740 async fn handle_change_branch(
1741 this: Entity<Self>,
1742 envelope: TypedEnvelope<proto::GitChangeBranch>,
1743 mut cx: AsyncApp,
1744 ) -> Result<proto::Ack> {
1745 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1746 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1747 let branch_name = envelope.payload.branch_name;
1748
1749 repository_handle
1750 .update(&mut cx, |repository_handle, _| {
1751 repository_handle.change_branch(branch_name)
1752 })?
1753 .await??;
1754
1755 Ok(proto::Ack {})
1756 }
1757
1758 async fn handle_show(
1759 this: Entity<Self>,
1760 envelope: TypedEnvelope<proto::GitShow>,
1761 mut cx: AsyncApp,
1762 ) -> Result<proto::GitCommitDetails> {
1763 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1764 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1765
1766 let commit = repository_handle
1767 .update(&mut cx, |repository_handle, _| {
1768 repository_handle.show(envelope.payload.commit)
1769 })?
1770 .await??;
1771 Ok(proto::GitCommitDetails {
1772 sha: commit.sha.into(),
1773 message: commit.message.into(),
1774 commit_timestamp: commit.commit_timestamp,
1775 author_email: commit.author_email.into(),
1776 author_name: commit.author_name.into(),
1777 })
1778 }
1779
1780 async fn handle_load_commit_diff(
1781 this: Entity<Self>,
1782 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1783 mut cx: AsyncApp,
1784 ) -> Result<proto::LoadCommitDiffResponse> {
1785 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1786 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1787
1788 let commit_diff = repository_handle
1789 .update(&mut cx, |repository_handle, _| {
1790 repository_handle.load_commit_diff(envelope.payload.commit)
1791 })?
1792 .await??;
1793 Ok(proto::LoadCommitDiffResponse {
1794 files: commit_diff
1795 .files
1796 .into_iter()
1797 .map(|file| proto::CommitFile {
1798 path: file.path.to_string(),
1799 old_text: file.old_text,
1800 new_text: file.new_text,
1801 })
1802 .collect(),
1803 })
1804 }
1805
1806 async fn handle_reset(
1807 this: Entity<Self>,
1808 envelope: TypedEnvelope<proto::GitReset>,
1809 mut cx: AsyncApp,
1810 ) -> Result<proto::Ack> {
1811 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1812 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1813
1814 let mode = match envelope.payload.mode() {
1815 git_reset::ResetMode::Soft => ResetMode::Soft,
1816 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1817 };
1818
1819 repository_handle
1820 .update(&mut cx, |repository_handle, cx| {
1821 repository_handle.reset(envelope.payload.commit, mode, cx)
1822 })?
1823 .await??;
1824 Ok(proto::Ack {})
1825 }
1826
1827 async fn handle_checkout_files(
1828 this: Entity<Self>,
1829 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1830 mut cx: AsyncApp,
1831 ) -> Result<proto::Ack> {
1832 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1833 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1834 let paths = envelope
1835 .payload
1836 .paths
1837 .iter()
1838 .map(|s| RepoPath::from_str(s))
1839 .collect();
1840
1841 repository_handle
1842 .update(&mut cx, |repository_handle, cx| {
1843 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1844 })?
1845 .await??;
1846 Ok(proto::Ack {})
1847 }
1848
1849 async fn handle_open_commit_message_buffer(
1850 this: Entity<Self>,
1851 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1852 mut cx: AsyncApp,
1853 ) -> Result<proto::OpenBufferResponse> {
1854 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1855 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1856 let buffer = repository
1857 .update(&mut cx, |repository, cx| {
1858 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1859 })?
1860 .await?;
1861
1862 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1863 this.update(&mut cx, |this, cx| {
1864 this.buffer_store.update(cx, |buffer_store, cx| {
1865 buffer_store
1866 .create_buffer_for_peer(
1867 &buffer,
1868 envelope.original_sender_id.unwrap_or(envelope.sender_id),
1869 cx,
1870 )
1871 .detach_and_log_err(cx);
1872 })
1873 })?;
1874
1875 Ok(proto::OpenBufferResponse {
1876 buffer_id: buffer_id.to_proto(),
1877 })
1878 }
1879
1880 async fn handle_askpass(
1881 this: Entity<Self>,
1882 envelope: TypedEnvelope<proto::AskPassRequest>,
1883 mut cx: AsyncApp,
1884 ) -> Result<proto::AskPassResponse> {
1885 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1886 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1887
1888 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
1889 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
1890 debug_panic!("no askpass found");
1891 return Err(anyhow::anyhow!("no askpass found"));
1892 };
1893
1894 let response = askpass.ask_password(envelope.payload.prompt).await?;
1895
1896 delegates
1897 .lock()
1898 .insert(envelope.payload.askpass_id, askpass);
1899
1900 Ok(proto::AskPassResponse { response })
1901 }
1902
1903 async fn handle_check_for_pushed_commits(
1904 this: Entity<Self>,
1905 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
1906 mut cx: AsyncApp,
1907 ) -> Result<proto::CheckForPushedCommitsResponse> {
1908 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1909 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1910
1911 let branches = repository_handle
1912 .update(&mut cx, |repository_handle, _| {
1913 repository_handle.check_for_pushed_commits()
1914 })?
1915 .await??;
1916 Ok(proto::CheckForPushedCommitsResponse {
1917 pushed_to: branches
1918 .into_iter()
1919 .map(|commit| commit.to_string())
1920 .collect(),
1921 })
1922 }
1923
1924 async fn handle_git_diff(
1925 this: Entity<Self>,
1926 envelope: TypedEnvelope<proto::GitDiff>,
1927 mut cx: AsyncApp,
1928 ) -> Result<proto::GitDiffResponse> {
1929 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1930 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1931 let diff_type = match envelope.payload.diff_type() {
1932 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
1933 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
1934 };
1935
1936 let mut diff = repository_handle
1937 .update(&mut cx, |repository_handle, cx| {
1938 repository_handle.diff(diff_type, cx)
1939 })?
1940 .await??;
1941 const ONE_MB: usize = 1_000_000;
1942 if diff.len() > ONE_MB {
1943 diff = diff.chars().take(ONE_MB).collect()
1944 }
1945
1946 Ok(proto::GitDiffResponse { diff })
1947 }
1948
1949 async fn handle_open_unstaged_diff(
1950 this: Entity<Self>,
1951 request: TypedEnvelope<proto::OpenUnstagedDiff>,
1952 mut cx: AsyncApp,
1953 ) -> Result<proto::OpenUnstagedDiffResponse> {
1954 let buffer_id = BufferId::new(request.payload.buffer_id)?;
1955 let diff = this
1956 .update(&mut cx, |this, cx| {
1957 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
1958 Some(this.open_unstaged_diff(buffer, cx))
1959 })?
1960 .ok_or_else(|| anyhow!("no such buffer"))?
1961 .await?;
1962 this.update(&mut cx, |this, _| {
1963 let shared_diffs = this
1964 .shared_diffs
1965 .entry(request.original_sender_id.unwrap_or(request.sender_id))
1966 .or_default();
1967 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
1968 })?;
1969 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
1970 Ok(proto::OpenUnstagedDiffResponse { staged_text })
1971 }
1972
1973 async fn handle_open_uncommitted_diff(
1974 this: Entity<Self>,
1975 request: TypedEnvelope<proto::OpenUncommittedDiff>,
1976 mut cx: AsyncApp,
1977 ) -> Result<proto::OpenUncommittedDiffResponse> {
1978 let buffer_id = BufferId::new(request.payload.buffer_id)?;
1979 let diff = this
1980 .update(&mut cx, |this, cx| {
1981 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
1982 Some(this.open_uncommitted_diff(buffer, cx))
1983 })?
1984 .ok_or_else(|| anyhow!("no such buffer"))?
1985 .await?;
1986 this.update(&mut cx, |this, _| {
1987 let shared_diffs = this
1988 .shared_diffs
1989 .entry(request.original_sender_id.unwrap_or(request.sender_id))
1990 .or_default();
1991 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
1992 })?;
1993 diff.read_with(&cx, |diff, cx| {
1994 use proto::open_uncommitted_diff_response::Mode;
1995
1996 let unstaged_diff = diff.secondary_diff();
1997 let index_snapshot = unstaged_diff.and_then(|diff| {
1998 let diff = diff.read(cx);
1999 diff.base_text_exists().then(|| diff.base_text())
2000 });
2001
2002 let mode;
2003 let staged_text;
2004 let committed_text;
2005 if diff.base_text_exists() {
2006 let committed_snapshot = diff.base_text();
2007 committed_text = Some(committed_snapshot.text());
2008 if let Some(index_text) = index_snapshot {
2009 if index_text.remote_id() == committed_snapshot.remote_id() {
2010 mode = Mode::IndexMatchesHead;
2011 staged_text = None;
2012 } else {
2013 mode = Mode::IndexAndHead;
2014 staged_text = Some(index_text.text());
2015 }
2016 } else {
2017 mode = Mode::IndexAndHead;
2018 staged_text = None;
2019 }
2020 } else {
2021 mode = Mode::IndexAndHead;
2022 committed_text = None;
2023 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2024 }
2025
2026 proto::OpenUncommittedDiffResponse {
2027 committed_text,
2028 staged_text,
2029 mode: mode.into(),
2030 }
2031 })
2032 }
2033
2034 async fn handle_update_diff_bases(
2035 this: Entity<Self>,
2036 request: TypedEnvelope<proto::UpdateDiffBases>,
2037 mut cx: AsyncApp,
2038 ) -> Result<()> {
2039 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2040 this.update(&mut cx, |this, cx| {
2041 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2042 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2043 let buffer = buffer.read(cx).text_snapshot();
2044 diff_state.update(cx, |diff_state, cx| {
2045 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2046 })
2047 }
2048 }
2049 })
2050 }
2051
2052 async fn handle_blame_buffer(
2053 this: Entity<Self>,
2054 envelope: TypedEnvelope<proto::BlameBuffer>,
2055 mut cx: AsyncApp,
2056 ) -> Result<proto::BlameBufferResponse> {
2057 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2058 let version = deserialize_version(&envelope.payload.version);
2059 let buffer = this.read_with(&cx, |this, cx| {
2060 this.buffer_store.read(cx).get_existing(buffer_id)
2061 })??;
2062 buffer
2063 .update(&mut cx, |buffer, _| {
2064 buffer.wait_for_version(version.clone())
2065 })?
2066 .await?;
2067 let blame = this
2068 .update(&mut cx, |this, cx| {
2069 this.blame_buffer(&buffer, Some(version), cx)
2070 })?
2071 .await?;
2072 Ok(serialize_blame_buffer_response(blame))
2073 }
2074
2075 async fn handle_get_permalink_to_line(
2076 this: Entity<Self>,
2077 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2078 mut cx: AsyncApp,
2079 ) -> Result<proto::GetPermalinkToLineResponse> {
2080 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2081 // let version = deserialize_version(&envelope.payload.version);
2082 let selection = {
2083 let proto_selection = envelope
2084 .payload
2085 .selection
2086 .context("no selection to get permalink for defined")?;
2087 proto_selection.start as u32..proto_selection.end as u32
2088 };
2089 let buffer = this.read_with(&cx, |this, cx| {
2090 this.buffer_store.read(cx).get_existing(buffer_id)
2091 })??;
2092 let permalink = this
2093 .update(&mut cx, |this, cx| {
2094 this.get_permalink_to_line(&buffer, selection, cx)
2095 })?
2096 .await?;
2097 Ok(proto::GetPermalinkToLineResponse {
2098 permalink: permalink.to_string(),
2099 })
2100 }
2101
2102 fn repository_for_request(
2103 this: &Entity<Self>,
2104 id: RepositoryId,
2105 cx: &mut AsyncApp,
2106 ) -> Result<Entity<Repository>> {
2107 this.update(cx, |this, _| {
2108 this.repositories
2109 .get(&id)
2110 .context("missing repository handle")
2111 .cloned()
2112 })?
2113 }
2114
2115 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2116 self.repositories
2117 .iter()
2118 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2119 .collect()
2120 }
2121}
2122
2123impl BufferDiffState {
2124 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2125 self.language = buffer.read(cx).language().cloned();
2126 self.language_changed = true;
2127 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2128 }
2129
2130 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2131 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2132 }
2133
2134 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2135 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2136 }
2137
2138 fn handle_base_texts_updated(
2139 &mut self,
2140 buffer: text::BufferSnapshot,
2141 message: proto::UpdateDiffBases,
2142 cx: &mut Context<Self>,
2143 ) {
2144 use proto::update_diff_bases::Mode;
2145
2146 let Some(mode) = Mode::from_i32(message.mode) else {
2147 return;
2148 };
2149
2150 let diff_bases_change = match mode {
2151 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2152 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2153 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2154 Mode::IndexAndHead => DiffBasesChange::SetEach {
2155 index: message.staged_text,
2156 head: message.committed_text,
2157 },
2158 };
2159
2160 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2161 }
2162
2163 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2164 if *self.recalculating_tx.borrow() {
2165 let mut rx = self.recalculating_tx.subscribe();
2166 return Some(async move {
2167 loop {
2168 let is_recalculating = rx.recv().await;
2169 if is_recalculating != Some(true) {
2170 break;
2171 }
2172 }
2173 });
2174 } else {
2175 None
2176 }
2177 }
2178
2179 fn diff_bases_changed(
2180 &mut self,
2181 buffer: text::BufferSnapshot,
2182 diff_bases_change: Option<DiffBasesChange>,
2183 cx: &mut Context<Self>,
2184 ) {
2185 match diff_bases_change {
2186 Some(DiffBasesChange::SetIndex(index)) => {
2187 self.index_text = index.map(|mut index| {
2188 text::LineEnding::normalize(&mut index);
2189 Arc::new(index)
2190 });
2191 self.index_changed = true;
2192 }
2193 Some(DiffBasesChange::SetHead(head)) => {
2194 self.head_text = head.map(|mut head| {
2195 text::LineEnding::normalize(&mut head);
2196 Arc::new(head)
2197 });
2198 self.head_changed = true;
2199 }
2200 Some(DiffBasesChange::SetBoth(text)) => {
2201 let text = text.map(|mut text| {
2202 text::LineEnding::normalize(&mut text);
2203 Arc::new(text)
2204 });
2205 self.head_text = text.clone();
2206 self.index_text = text;
2207 self.head_changed = true;
2208 self.index_changed = true;
2209 }
2210 Some(DiffBasesChange::SetEach { index, head }) => {
2211 self.index_text = index.map(|mut index| {
2212 text::LineEnding::normalize(&mut index);
2213 Arc::new(index)
2214 });
2215 self.index_changed = true;
2216 self.head_text = head.map(|mut head| {
2217 text::LineEnding::normalize(&mut head);
2218 Arc::new(head)
2219 });
2220 self.head_changed = true;
2221 }
2222 None => {}
2223 }
2224
2225 self.recalculate_diffs(buffer, cx)
2226 }
2227
2228 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2229 *self.recalculating_tx.borrow_mut() = true;
2230
2231 let language = self.language.clone();
2232 let language_registry = self.language_registry.clone();
2233 let unstaged_diff = self.unstaged_diff();
2234 let uncommitted_diff = self.uncommitted_diff();
2235 let head = self.head_text.clone();
2236 let index = self.index_text.clone();
2237 let index_changed = self.index_changed;
2238 let head_changed = self.head_changed;
2239 let language_changed = self.language_changed;
2240 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2241 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2242 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2243 (None, None) => true,
2244 _ => false,
2245 };
2246 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2247 log::debug!(
2248 "start recalculating diffs for buffer {}",
2249 buffer.remote_id()
2250 );
2251
2252 let mut new_unstaged_diff = None;
2253 if let Some(unstaged_diff) = &unstaged_diff {
2254 new_unstaged_diff = Some(
2255 BufferDiff::update_diff(
2256 unstaged_diff.clone(),
2257 buffer.clone(),
2258 index,
2259 index_changed,
2260 language_changed,
2261 language.clone(),
2262 language_registry.clone(),
2263 cx,
2264 )
2265 .await?,
2266 );
2267 }
2268
2269 let mut new_uncommitted_diff = None;
2270 if let Some(uncommitted_diff) = &uncommitted_diff {
2271 new_uncommitted_diff = if index_matches_head {
2272 new_unstaged_diff.clone()
2273 } else {
2274 Some(
2275 BufferDiff::update_diff(
2276 uncommitted_diff.clone(),
2277 buffer.clone(),
2278 head,
2279 head_changed,
2280 language_changed,
2281 language.clone(),
2282 language_registry.clone(),
2283 cx,
2284 )
2285 .await?,
2286 )
2287 }
2288 }
2289
2290 let cancel = this.update(cx, |this, _| {
2291 // This checks whether all pending stage/unstage operations
2292 // have quiesced (i.e. both the corresponding write and the
2293 // read of that write have completed). If not, then we cancel
2294 // this recalculation attempt to avoid invalidating pending
2295 // state too quickly; another recalculation will come along
2296 // later and clear the pending state once the state of the index has settled.
2297 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2298 *this.recalculating_tx.borrow_mut() = false;
2299 true
2300 } else {
2301 false
2302 }
2303 })?;
2304 if cancel {
2305 log::debug!(
2306 concat!(
2307 "aborting recalculating diffs for buffer {}",
2308 "due to subsequent hunk operations",
2309 ),
2310 buffer.remote_id()
2311 );
2312 return Ok(());
2313 }
2314
2315 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2316 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2317 {
2318 unstaged_diff.update(cx, |diff, cx| {
2319 if language_changed {
2320 diff.language_changed(cx);
2321 }
2322 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2323 })?
2324 } else {
2325 None
2326 };
2327
2328 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2329 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2330 {
2331 uncommitted_diff.update(cx, |diff, cx| {
2332 if language_changed {
2333 diff.language_changed(cx);
2334 }
2335 diff.set_snapshot_with_secondary(
2336 new_uncommitted_diff,
2337 &buffer,
2338 unstaged_changed_range,
2339 true,
2340 cx,
2341 );
2342 })?;
2343 }
2344
2345 log::debug!(
2346 "finished recalculating diffs for buffer {}",
2347 buffer.remote_id()
2348 );
2349
2350 if let Some(this) = this.upgrade() {
2351 this.update(cx, |this, _| {
2352 this.index_changed = false;
2353 this.head_changed = false;
2354 this.language_changed = false;
2355 *this.recalculating_tx.borrow_mut() = false;
2356 })?;
2357 }
2358
2359 Ok(())
2360 }));
2361 }
2362}
2363
2364impl Default for BufferDiffState {
2365 fn default() -> Self {
2366 Self {
2367 unstaged_diff: Default::default(),
2368 uncommitted_diff: Default::default(),
2369 recalculate_diff_task: Default::default(),
2370 language: Default::default(),
2371 language_registry: Default::default(),
2372 recalculating_tx: postage::watch::channel_with(false).0,
2373 hunk_staging_operation_count: 0,
2374 hunk_staging_operation_count_as_of_write: 0,
2375 head_text: Default::default(),
2376 index_text: Default::default(),
2377 head_changed: Default::default(),
2378 index_changed: Default::default(),
2379 language_changed: Default::default(),
2380 }
2381 }
2382}
2383
2384fn make_remote_delegate(
2385 this: Entity<GitStore>,
2386 project_id: u64,
2387 repository_id: RepositoryId,
2388 askpass_id: u64,
2389 cx: &mut AsyncApp,
2390) -> AskPassDelegate {
2391 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2392 this.update(cx, |this, cx| {
2393 let Some((client, _)) = this.downstream_client() else {
2394 return;
2395 };
2396 let response = client.request(proto::AskPassRequest {
2397 project_id,
2398 repository_id: repository_id.to_proto(),
2399 askpass_id,
2400 prompt,
2401 });
2402 cx.spawn(async move |_, _| {
2403 tx.send(response.await?.response).ok();
2404 anyhow::Ok(())
2405 })
2406 .detach_and_log_err(cx);
2407 })
2408 .log_err();
2409 })
2410}
2411
2412impl RepositoryId {
2413 pub fn to_proto(self) -> u64 {
2414 self.0
2415 }
2416
2417 pub fn from_proto(id: u64) -> Self {
2418 RepositoryId(id)
2419 }
2420}
2421
2422impl RepositorySnapshot {
2423 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2424 Self {
2425 id,
2426 merge_message: None,
2427 statuses_by_path: Default::default(),
2428 work_directory_abs_path,
2429 branch: None,
2430 head_commit: None,
2431 merge_conflicts: Default::default(),
2432 merge_head_shas: Default::default(),
2433 scan_id: 0,
2434 }
2435 }
2436
2437 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2438 proto::UpdateRepository {
2439 branch_summary: self.branch.as_ref().map(branch_to_proto),
2440 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2441 updated_statuses: self
2442 .statuses_by_path
2443 .iter()
2444 .map(|entry| entry.to_proto())
2445 .collect(),
2446 removed_statuses: Default::default(),
2447 current_merge_conflicts: self
2448 .merge_conflicts
2449 .iter()
2450 .map(|repo_path| repo_path.to_proto())
2451 .collect(),
2452 project_id,
2453 id: self.id.to_proto(),
2454 abs_path: self.work_directory_abs_path.to_proto(),
2455 entry_ids: vec![self.id.to_proto()],
2456 scan_id: self.scan_id,
2457 is_last_update: true,
2458 }
2459 }
2460
2461 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2462 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2463 let mut removed_statuses: Vec<String> = Vec::new();
2464
2465 let mut new_statuses = self.statuses_by_path.iter().peekable();
2466 let mut old_statuses = old.statuses_by_path.iter().peekable();
2467
2468 let mut current_new_entry = new_statuses.next();
2469 let mut current_old_entry = old_statuses.next();
2470 loop {
2471 match (current_new_entry, current_old_entry) {
2472 (Some(new_entry), Some(old_entry)) => {
2473 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2474 Ordering::Less => {
2475 updated_statuses.push(new_entry.to_proto());
2476 current_new_entry = new_statuses.next();
2477 }
2478 Ordering::Equal => {
2479 if new_entry.status != old_entry.status {
2480 updated_statuses.push(new_entry.to_proto());
2481 }
2482 current_old_entry = old_statuses.next();
2483 current_new_entry = new_statuses.next();
2484 }
2485 Ordering::Greater => {
2486 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2487 current_old_entry = old_statuses.next();
2488 }
2489 }
2490 }
2491 (None, Some(old_entry)) => {
2492 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2493 current_old_entry = old_statuses.next();
2494 }
2495 (Some(new_entry), None) => {
2496 updated_statuses.push(new_entry.to_proto());
2497 current_new_entry = new_statuses.next();
2498 }
2499 (None, None) => break,
2500 }
2501 }
2502
2503 proto::UpdateRepository {
2504 branch_summary: self.branch.as_ref().map(branch_to_proto),
2505 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2506 updated_statuses,
2507 removed_statuses,
2508 current_merge_conflicts: self
2509 .merge_conflicts
2510 .iter()
2511 .map(|path| path.as_ref().to_proto())
2512 .collect(),
2513 project_id,
2514 id: self.id.to_proto(),
2515 abs_path: self.work_directory_abs_path.to_proto(),
2516 entry_ids: vec![],
2517 scan_id: self.scan_id,
2518 is_last_update: true,
2519 }
2520 }
2521
2522 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2523 self.statuses_by_path.iter().cloned()
2524 }
2525
2526 pub fn status_summary(&self) -> GitSummary {
2527 self.statuses_by_path.summary().item_summary
2528 }
2529
2530 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2531 self.statuses_by_path
2532 .get(&PathKey(path.0.clone()), &())
2533 .cloned()
2534 }
2535
2536 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2537 abs_path
2538 .strip_prefix(&self.work_directory_abs_path)
2539 .map(RepoPath::from)
2540 .ok()
2541 }
2542
2543 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2544 self.merge_conflicts.contains(repo_path)
2545 }
2546
2547 /// This is the name that will be displayed in the repository selector for this repository.
2548 pub fn display_name(&self) -> SharedString {
2549 self.work_directory_abs_path
2550 .file_name()
2551 .unwrap_or_default()
2552 .to_string_lossy()
2553 .to_string()
2554 .into()
2555 }
2556}
2557
2558impl Repository {
2559 fn local(
2560 id: RepositoryId,
2561 work_directory_abs_path: Arc<Path>,
2562 dot_git_abs_path: Arc<Path>,
2563 repository_dir_abs_path: Arc<Path>,
2564 common_dir_abs_path: Arc<Path>,
2565 project_environment: WeakEntity<ProjectEnvironment>,
2566 fs: Arc<dyn Fs>,
2567 git_store: WeakEntity<GitStore>,
2568 cx: &mut Context<Self>,
2569 ) -> Self {
2570 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2571 Repository {
2572 this: cx.weak_entity(),
2573 git_store,
2574 snapshot,
2575 commit_message_buffer: None,
2576 askpass_delegates: Default::default(),
2577 paths_needing_status_update: Default::default(),
2578 latest_askpass_id: 0,
2579 job_sender: Repository::spawn_local_git_worker(
2580 work_directory_abs_path,
2581 dot_git_abs_path,
2582 repository_dir_abs_path,
2583 common_dir_abs_path,
2584 project_environment,
2585 fs,
2586 cx,
2587 ),
2588 job_id: 0,
2589 active_jobs: Default::default(),
2590 }
2591 }
2592
2593 fn remote(
2594 id: RepositoryId,
2595 work_directory_abs_path: Arc<Path>,
2596 project_id: ProjectId,
2597 client: AnyProtoClient,
2598 git_store: WeakEntity<GitStore>,
2599 cx: &mut Context<Self>,
2600 ) -> Self {
2601 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2602 Self {
2603 this: cx.weak_entity(),
2604 snapshot,
2605 commit_message_buffer: None,
2606 git_store,
2607 paths_needing_status_update: Default::default(),
2608 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2609 askpass_delegates: Default::default(),
2610 latest_askpass_id: 0,
2611 active_jobs: Default::default(),
2612 job_id: 0,
2613 }
2614 }
2615
2616 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2617 self.git_store.upgrade()
2618 }
2619
2620 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
2621 let this = cx.weak_entity();
2622 let git_store = self.git_store.clone();
2623 let _ = self.send_keyed_job(
2624 Some(GitJobKey::ReloadBufferDiffBases),
2625 None,
2626 |state, mut cx| async move {
2627 let RepositoryState::Local { backend, .. } = state else {
2628 log::error!("tried to recompute diffs for a non-local repository");
2629 return Ok(());
2630 };
2631
2632 let Some(this) = this.upgrade() else {
2633 return Ok(());
2634 };
2635
2636 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
2637 git_store.update(cx, |git_store, cx| {
2638 git_store
2639 .diffs
2640 .iter()
2641 .filter_map(|(buffer_id, diff_state)| {
2642 let buffer_store = git_store.buffer_store.read(cx);
2643 let buffer = buffer_store.get(*buffer_id)?;
2644 let file = File::from_dyn(buffer.read(cx).file())?;
2645 let abs_path =
2646 file.worktree.read(cx).absolutize(&file.path).ok()?;
2647 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
2648 log::debug!(
2649 "start reload diff bases for repo path {}",
2650 repo_path.0.display()
2651 );
2652 diff_state.update(cx, |diff_state, _| {
2653 let has_unstaged_diff = diff_state
2654 .unstaged_diff
2655 .as_ref()
2656 .is_some_and(|diff| diff.is_upgradable());
2657 let has_uncommitted_diff = diff_state
2658 .uncommitted_diff
2659 .as_ref()
2660 .is_some_and(|set| set.is_upgradable());
2661
2662 Some((
2663 buffer,
2664 repo_path,
2665 has_unstaged_diff.then(|| diff_state.index_text.clone()),
2666 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
2667 ))
2668 })
2669 })
2670 .collect::<Vec<_>>()
2671 })
2672 })??;
2673
2674 let buffer_diff_base_changes = cx
2675 .background_spawn(async move {
2676 let mut changes = Vec::new();
2677 for (buffer, repo_path, current_index_text, current_head_text) in
2678 &repo_diff_state_updates
2679 {
2680 let index_text = if current_index_text.is_some() {
2681 backend.load_index_text(repo_path.clone()).await
2682 } else {
2683 None
2684 };
2685 let head_text = if current_head_text.is_some() {
2686 backend.load_committed_text(repo_path.clone()).await
2687 } else {
2688 None
2689 };
2690
2691 let change =
2692 match (current_index_text.as_ref(), current_head_text.as_ref()) {
2693 (Some(current_index), Some(current_head)) => {
2694 let index_changed =
2695 index_text.as_ref() != current_index.as_deref();
2696 let head_changed =
2697 head_text.as_ref() != current_head.as_deref();
2698 if index_changed && head_changed {
2699 if index_text == head_text {
2700 Some(DiffBasesChange::SetBoth(head_text))
2701 } else {
2702 Some(DiffBasesChange::SetEach {
2703 index: index_text,
2704 head: head_text,
2705 })
2706 }
2707 } else if index_changed {
2708 Some(DiffBasesChange::SetIndex(index_text))
2709 } else if head_changed {
2710 Some(DiffBasesChange::SetHead(head_text))
2711 } else {
2712 None
2713 }
2714 }
2715 (Some(current_index), None) => {
2716 let index_changed =
2717 index_text.as_ref() != current_index.as_deref();
2718 index_changed
2719 .then_some(DiffBasesChange::SetIndex(index_text))
2720 }
2721 (None, Some(current_head)) => {
2722 let head_changed =
2723 head_text.as_ref() != current_head.as_deref();
2724 head_changed.then_some(DiffBasesChange::SetHead(head_text))
2725 }
2726 (None, None) => None,
2727 };
2728
2729 changes.push((buffer.clone(), change))
2730 }
2731 changes
2732 })
2733 .await;
2734
2735 git_store.update(&mut cx, |git_store, cx| {
2736 for (buffer, diff_bases_change) in buffer_diff_base_changes {
2737 let buffer_snapshot = buffer.read(cx).text_snapshot();
2738 let buffer_id = buffer_snapshot.remote_id();
2739 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
2740 continue;
2741 };
2742
2743 let downstream_client = git_store.downstream_client();
2744 diff_state.update(cx, |diff_state, cx| {
2745 use proto::update_diff_bases::Mode;
2746
2747 if let Some((diff_bases_change, (client, project_id))) =
2748 diff_bases_change.clone().zip(downstream_client)
2749 {
2750 let (staged_text, committed_text, mode) = match diff_bases_change {
2751 DiffBasesChange::SetIndex(index) => {
2752 (index, None, Mode::IndexOnly)
2753 }
2754 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
2755 DiffBasesChange::SetEach { index, head } => {
2756 (index, head, Mode::IndexAndHead)
2757 }
2758 DiffBasesChange::SetBoth(text) => {
2759 (None, text, Mode::IndexMatchesHead)
2760 }
2761 };
2762 client
2763 .send(proto::UpdateDiffBases {
2764 project_id: project_id.to_proto(),
2765 buffer_id: buffer_id.to_proto(),
2766 staged_text,
2767 committed_text,
2768 mode: mode as i32,
2769 })
2770 .log_err();
2771 }
2772
2773 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
2774 });
2775 }
2776 })
2777 },
2778 );
2779 }
2780
2781 pub fn send_job<F, Fut, R>(
2782 &mut self,
2783 status: Option<SharedString>,
2784 job: F,
2785 ) -> oneshot::Receiver<R>
2786 where
2787 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2788 Fut: Future<Output = R> + 'static,
2789 R: Send + 'static,
2790 {
2791 self.send_keyed_job(None, status, job)
2792 }
2793
2794 fn send_keyed_job<F, Fut, R>(
2795 &mut self,
2796 key: Option<GitJobKey>,
2797 status: Option<SharedString>,
2798 job: F,
2799 ) -> oneshot::Receiver<R>
2800 where
2801 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2802 Fut: Future<Output = R> + 'static,
2803 R: Send + 'static,
2804 {
2805 let (result_tx, result_rx) = futures::channel::oneshot::channel();
2806 let job_id = post_inc(&mut self.job_id);
2807 let this = self.this.clone();
2808 self.job_sender
2809 .unbounded_send(GitJob {
2810 key,
2811 job: Box::new(move |state, cx: &mut AsyncApp| {
2812 let job = job(state, cx.clone());
2813 cx.spawn(async move |cx| {
2814 if let Some(s) = status.clone() {
2815 this.update(cx, |this, cx| {
2816 this.active_jobs.insert(
2817 job_id,
2818 JobInfo {
2819 start: Instant::now(),
2820 message: s.clone(),
2821 },
2822 );
2823
2824 cx.notify();
2825 })
2826 .ok();
2827 }
2828 let result = job.await;
2829
2830 this.update(cx, |this, cx| {
2831 this.active_jobs.remove(&job_id);
2832 cx.notify();
2833 })
2834 .ok();
2835
2836 result_tx.send(result).ok();
2837 })
2838 }),
2839 })
2840 .ok();
2841 result_rx
2842 }
2843
2844 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
2845 let Some(git_store) = self.git_store.upgrade() else {
2846 return;
2847 };
2848 let entity = cx.entity();
2849 git_store.update(cx, |git_store, cx| {
2850 let Some((&id, _)) = git_store
2851 .repositories
2852 .iter()
2853 .find(|(_, handle)| *handle == &entity)
2854 else {
2855 return;
2856 };
2857 git_store.active_repo_id = Some(id);
2858 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2859 });
2860 }
2861
2862 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
2863 self.snapshot.status()
2864 }
2865
2866 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
2867 let git_store = self.git_store.upgrade()?;
2868 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2869 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
2870 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
2871 Some(ProjectPath {
2872 worktree_id: worktree.read(cx).id(),
2873 path: relative_path.into(),
2874 })
2875 }
2876
2877 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
2878 let git_store = self.git_store.upgrade()?;
2879 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2880 let abs_path = worktree_store.absolutize(path, cx)?;
2881 self.snapshot.abs_path_to_repo_path(&abs_path)
2882 }
2883
2884 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
2885 other
2886 .read(cx)
2887 .snapshot
2888 .work_directory_abs_path
2889 .starts_with(&self.snapshot.work_directory_abs_path)
2890 }
2891
2892 pub fn open_commit_buffer(
2893 &mut self,
2894 languages: Option<Arc<LanguageRegistry>>,
2895 buffer_store: Entity<BufferStore>,
2896 cx: &mut Context<Self>,
2897 ) -> Task<Result<Entity<Buffer>>> {
2898 let id = self.id;
2899 if let Some(buffer) = self.commit_message_buffer.clone() {
2900 return Task::ready(Ok(buffer));
2901 }
2902 let this = cx.weak_entity();
2903
2904 let rx = self.send_job(None, move |state, mut cx| async move {
2905 let Some(this) = this.upgrade() else {
2906 bail!("git store was dropped");
2907 };
2908 match state {
2909 RepositoryState::Local { .. } => {
2910 this.update(&mut cx, |_, cx| {
2911 Self::open_local_commit_buffer(languages, buffer_store, cx)
2912 })?
2913 .await
2914 }
2915 RepositoryState::Remote { project_id, client } => {
2916 let request = client.request(proto::OpenCommitMessageBuffer {
2917 project_id: project_id.0,
2918 repository_id: id.to_proto(),
2919 });
2920 let response = request.await.context("requesting to open commit buffer")?;
2921 let buffer_id = BufferId::new(response.buffer_id)?;
2922 let buffer = buffer_store
2923 .update(&mut cx, |buffer_store, cx| {
2924 buffer_store.wait_for_remote_buffer(buffer_id, cx)
2925 })?
2926 .await?;
2927 if let Some(language_registry) = languages {
2928 let git_commit_language =
2929 language_registry.language_for_name("Git Commit").await?;
2930 buffer.update(&mut cx, |buffer, cx| {
2931 buffer.set_language(Some(git_commit_language), cx);
2932 })?;
2933 }
2934 this.update(&mut cx, |this, _| {
2935 this.commit_message_buffer = Some(buffer.clone());
2936 })?;
2937 Ok(buffer)
2938 }
2939 }
2940 });
2941
2942 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
2943 }
2944
2945 fn open_local_commit_buffer(
2946 language_registry: Option<Arc<LanguageRegistry>>,
2947 buffer_store: Entity<BufferStore>,
2948 cx: &mut Context<Self>,
2949 ) -> Task<Result<Entity<Buffer>>> {
2950 cx.spawn(async move |repository, cx| {
2951 let buffer = buffer_store
2952 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
2953 .await?;
2954
2955 if let Some(language_registry) = language_registry {
2956 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
2957 buffer.update(cx, |buffer, cx| {
2958 buffer.set_language(Some(git_commit_language), cx);
2959 })?;
2960 }
2961
2962 repository.update(cx, |repository, _| {
2963 repository.commit_message_buffer = Some(buffer.clone());
2964 })?;
2965 Ok(buffer)
2966 })
2967 }
2968
2969 pub fn checkout_files(
2970 &mut self,
2971 commit: &str,
2972 paths: Vec<RepoPath>,
2973 _cx: &mut App,
2974 ) -> oneshot::Receiver<Result<()>> {
2975 let commit = commit.to_string();
2976 let id = self.id;
2977
2978 self.send_job(
2979 Some(format!("git checkout {}", commit).into()),
2980 move |git_repo, _| async move {
2981 match git_repo {
2982 RepositoryState::Local {
2983 backend,
2984 environment,
2985 ..
2986 } => {
2987 backend
2988 .checkout_files(commit, paths, environment.clone())
2989 .await
2990 }
2991 RepositoryState::Remote { project_id, client } => {
2992 client
2993 .request(proto::GitCheckoutFiles {
2994 project_id: project_id.0,
2995 repository_id: id.to_proto(),
2996 commit,
2997 paths: paths
2998 .into_iter()
2999 .map(|p| p.to_string_lossy().to_string())
3000 .collect(),
3001 })
3002 .await?;
3003
3004 Ok(())
3005 }
3006 }
3007 },
3008 )
3009 }
3010
3011 pub fn reset(
3012 &mut self,
3013 commit: String,
3014 reset_mode: ResetMode,
3015 _cx: &mut App,
3016 ) -> oneshot::Receiver<Result<()>> {
3017 let commit = commit.to_string();
3018 let id = self.id;
3019
3020 self.send_job(None, move |git_repo, _| async move {
3021 match git_repo {
3022 RepositoryState::Local {
3023 backend,
3024 environment,
3025 ..
3026 } => backend.reset(commit, reset_mode, environment).await,
3027 RepositoryState::Remote { project_id, client } => {
3028 client
3029 .request(proto::GitReset {
3030 project_id: project_id.0,
3031 repository_id: id.to_proto(),
3032 commit,
3033 mode: match reset_mode {
3034 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3035 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3036 },
3037 })
3038 .await?;
3039
3040 Ok(())
3041 }
3042 }
3043 })
3044 }
3045
3046 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3047 let id = self.id;
3048 self.send_job(None, move |git_repo, _cx| async move {
3049 match git_repo {
3050 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3051 RepositoryState::Remote { project_id, client } => {
3052 let resp = client
3053 .request(proto::GitShow {
3054 project_id: project_id.0,
3055 repository_id: id.to_proto(),
3056 commit,
3057 })
3058 .await?;
3059
3060 Ok(CommitDetails {
3061 sha: resp.sha.into(),
3062 message: resp.message.into(),
3063 commit_timestamp: resp.commit_timestamp,
3064 author_email: resp.author_email.into(),
3065 author_name: resp.author_name.into(),
3066 })
3067 }
3068 }
3069 })
3070 }
3071
3072 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3073 let id = self.id;
3074 self.send_job(None, move |git_repo, cx| async move {
3075 match git_repo {
3076 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3077 RepositoryState::Remote {
3078 client, project_id, ..
3079 } => {
3080 let response = client
3081 .request(proto::LoadCommitDiff {
3082 project_id: project_id.0,
3083 repository_id: id.to_proto(),
3084 commit,
3085 })
3086 .await?;
3087 Ok(CommitDiff {
3088 files: response
3089 .files
3090 .into_iter()
3091 .map(|file| CommitFile {
3092 path: Path::new(&file.path).into(),
3093 old_text: file.old_text,
3094 new_text: file.new_text,
3095 })
3096 .collect(),
3097 })
3098 }
3099 }
3100 })
3101 }
3102
3103 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3104 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3105 }
3106
3107 pub fn stage_entries(
3108 &self,
3109 entries: Vec<RepoPath>,
3110 cx: &mut Context<Self>,
3111 ) -> Task<anyhow::Result<()>> {
3112 if entries.is_empty() {
3113 return Task::ready(Ok(()));
3114 }
3115 let id = self.id;
3116
3117 let mut save_futures = Vec::new();
3118 if let Some(buffer_store) = self.buffer_store(cx) {
3119 buffer_store.update(cx, |buffer_store, cx| {
3120 for path in &entries {
3121 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3122 continue;
3123 };
3124 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3125 if buffer
3126 .read(cx)
3127 .file()
3128 .map_or(false, |file| file.disk_state().exists())
3129 {
3130 save_futures.push(buffer_store.save_buffer(buffer, cx));
3131 }
3132 }
3133 }
3134 })
3135 }
3136
3137 cx.spawn(async move |this, cx| {
3138 for save_future in save_futures {
3139 save_future.await?;
3140 }
3141
3142 this.update(cx, |this, _| {
3143 this.send_job(None, move |git_repo, _cx| async move {
3144 match git_repo {
3145 RepositoryState::Local {
3146 backend,
3147 environment,
3148 ..
3149 } => backend.stage_paths(entries, environment.clone()).await,
3150 RepositoryState::Remote { project_id, client } => {
3151 client
3152 .request(proto::Stage {
3153 project_id: project_id.0,
3154 repository_id: id.to_proto(),
3155 paths: entries
3156 .into_iter()
3157 .map(|repo_path| repo_path.as_ref().to_proto())
3158 .collect(),
3159 })
3160 .await
3161 .context("sending stage request")?;
3162
3163 Ok(())
3164 }
3165 }
3166 })
3167 })?
3168 .await??;
3169
3170 Ok(())
3171 })
3172 }
3173
3174 pub fn unstage_entries(
3175 &self,
3176 entries: Vec<RepoPath>,
3177 cx: &mut Context<Self>,
3178 ) -> Task<anyhow::Result<()>> {
3179 if entries.is_empty() {
3180 return Task::ready(Ok(()));
3181 }
3182 let id = self.id;
3183
3184 let mut save_futures = Vec::new();
3185 if let Some(buffer_store) = self.buffer_store(cx) {
3186 buffer_store.update(cx, |buffer_store, cx| {
3187 for path in &entries {
3188 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3189 continue;
3190 };
3191 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3192 if buffer
3193 .read(cx)
3194 .file()
3195 .map_or(false, |file| file.disk_state().exists())
3196 {
3197 save_futures.push(buffer_store.save_buffer(buffer, cx));
3198 }
3199 }
3200 }
3201 })
3202 }
3203
3204 cx.spawn(async move |this, cx| {
3205 for save_future in save_futures {
3206 save_future.await?;
3207 }
3208
3209 this.update(cx, |this, _| {
3210 this.send_job(None, move |git_repo, _cx| async move {
3211 match git_repo {
3212 RepositoryState::Local {
3213 backend,
3214 environment,
3215 ..
3216 } => backend.unstage_paths(entries, environment).await,
3217 RepositoryState::Remote { project_id, client } => {
3218 client
3219 .request(proto::Unstage {
3220 project_id: project_id.0,
3221 repository_id: id.to_proto(),
3222 paths: entries
3223 .into_iter()
3224 .map(|repo_path| repo_path.as_ref().to_proto())
3225 .collect(),
3226 })
3227 .await
3228 .context("sending unstage request")?;
3229
3230 Ok(())
3231 }
3232 }
3233 })
3234 })?
3235 .await??;
3236
3237 Ok(())
3238 })
3239 }
3240
3241 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3242 let to_stage = self
3243 .cached_status()
3244 .filter(|entry| !entry.status.staging().is_fully_staged())
3245 .map(|entry| entry.repo_path.clone())
3246 .collect();
3247 self.stage_entries(to_stage, cx)
3248 }
3249
3250 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3251 let to_unstage = self
3252 .cached_status()
3253 .filter(|entry| entry.status.staging().has_staged())
3254 .map(|entry| entry.repo_path.clone())
3255 .collect();
3256 self.unstage_entries(to_unstage, cx)
3257 }
3258
3259 pub fn commit(
3260 &mut self,
3261 message: SharedString,
3262 name_and_email: Option<(SharedString, SharedString)>,
3263 options: CommitOptions,
3264 _cx: &mut App,
3265 ) -> oneshot::Receiver<Result<()>> {
3266 let id = self.id;
3267
3268 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3269 match git_repo {
3270 RepositoryState::Local {
3271 backend,
3272 environment,
3273 ..
3274 } => {
3275 backend
3276 .commit(message, name_and_email, options, environment)
3277 .await
3278 }
3279 RepositoryState::Remote { project_id, client } => {
3280 let (name, email) = name_and_email.unzip();
3281 client
3282 .request(proto::Commit {
3283 project_id: project_id.0,
3284 repository_id: id.to_proto(),
3285 message: String::from(message),
3286 name: name.map(String::from),
3287 email: email.map(String::from),
3288 options: Some(proto::commit::CommitOptions {
3289 amend: options.amend,
3290 }),
3291 })
3292 .await
3293 .context("sending commit request")?;
3294
3295 Ok(())
3296 }
3297 }
3298 })
3299 }
3300
3301 pub fn fetch(
3302 &mut self,
3303 askpass: AskPassDelegate,
3304 _cx: &mut App,
3305 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3306 let askpass_delegates = self.askpass_delegates.clone();
3307 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3308 let id = self.id;
3309
3310 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3311 match git_repo {
3312 RepositoryState::Local {
3313 backend,
3314 environment,
3315 ..
3316 } => backend.fetch(askpass, environment, cx).await,
3317 RepositoryState::Remote { project_id, client } => {
3318 askpass_delegates.lock().insert(askpass_id, askpass);
3319 let _defer = util::defer(|| {
3320 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3321 debug_assert!(askpass_delegate.is_some());
3322 });
3323
3324 let response = client
3325 .request(proto::Fetch {
3326 project_id: project_id.0,
3327 repository_id: id.to_proto(),
3328 askpass_id,
3329 })
3330 .await
3331 .context("sending fetch request")?;
3332
3333 Ok(RemoteCommandOutput {
3334 stdout: response.stdout,
3335 stderr: response.stderr,
3336 })
3337 }
3338 }
3339 })
3340 }
3341
3342 pub fn push(
3343 &mut self,
3344 branch: SharedString,
3345 remote: SharedString,
3346 options: Option<PushOptions>,
3347 askpass: AskPassDelegate,
3348 cx: &mut Context<Self>,
3349 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3350 let askpass_delegates = self.askpass_delegates.clone();
3351 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3352 let id = self.id;
3353
3354 let args = options
3355 .map(|option| match option {
3356 PushOptions::SetUpstream => " --set-upstream",
3357 PushOptions::Force => " --force",
3358 })
3359 .unwrap_or("");
3360
3361 let updates_tx = self
3362 .git_store()
3363 .and_then(|git_store| match &git_store.read(cx).state {
3364 GitStoreState::Local { downstream, .. } => downstream
3365 .as_ref()
3366 .map(|downstream| downstream.updates_tx.clone()),
3367 _ => None,
3368 });
3369
3370 let this = cx.weak_entity();
3371 self.send_job(
3372 Some(format!("git push {} {} {}", args, branch, remote).into()),
3373 move |git_repo, mut cx| async move {
3374 match git_repo {
3375 RepositoryState::Local {
3376 backend,
3377 environment,
3378 ..
3379 } => {
3380 let result = backend
3381 .push(
3382 branch.to_string(),
3383 remote.to_string(),
3384 options,
3385 askpass,
3386 environment.clone(),
3387 cx.clone(),
3388 )
3389 .await;
3390 if result.is_ok() {
3391 let branches = backend.branches().await?;
3392 let branch = branches.into_iter().find(|branch| branch.is_head);
3393 log::info!("head branch after scan is {branch:?}");
3394 let snapshot = this.update(&mut cx, |this, cx| {
3395 this.snapshot.branch = branch;
3396 let snapshot = this.snapshot.clone();
3397 cx.emit(RepositoryEvent::Updated { full_scan: false });
3398 snapshot
3399 })?;
3400 if let Some(updates_tx) = updates_tx {
3401 updates_tx
3402 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3403 .ok();
3404 }
3405 }
3406 result
3407 }
3408 RepositoryState::Remote { project_id, client } => {
3409 askpass_delegates.lock().insert(askpass_id, askpass);
3410 let _defer = util::defer(|| {
3411 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3412 debug_assert!(askpass_delegate.is_some());
3413 });
3414 let response = client
3415 .request(proto::Push {
3416 project_id: project_id.0,
3417 repository_id: id.to_proto(),
3418 askpass_id,
3419 branch_name: branch.to_string(),
3420 remote_name: remote.to_string(),
3421 options: options.map(|options| match options {
3422 PushOptions::Force => proto::push::PushOptions::Force,
3423 PushOptions::SetUpstream => {
3424 proto::push::PushOptions::SetUpstream
3425 }
3426 }
3427 as i32),
3428 })
3429 .await
3430 .context("sending push request")?;
3431
3432 Ok(RemoteCommandOutput {
3433 stdout: response.stdout,
3434 stderr: response.stderr,
3435 })
3436 }
3437 }
3438 },
3439 )
3440 }
3441
3442 pub fn pull(
3443 &mut self,
3444 branch: SharedString,
3445 remote: SharedString,
3446 askpass: AskPassDelegate,
3447 _cx: &mut App,
3448 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3449 let askpass_delegates = self.askpass_delegates.clone();
3450 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3451 let id = self.id;
3452
3453 self.send_job(
3454 Some(format!("git pull {} {}", remote, branch).into()),
3455 move |git_repo, cx| async move {
3456 match git_repo {
3457 RepositoryState::Local {
3458 backend,
3459 environment,
3460 ..
3461 } => {
3462 backend
3463 .pull(
3464 branch.to_string(),
3465 remote.to_string(),
3466 askpass,
3467 environment.clone(),
3468 cx,
3469 )
3470 .await
3471 }
3472 RepositoryState::Remote { project_id, client } => {
3473 askpass_delegates.lock().insert(askpass_id, askpass);
3474 let _defer = util::defer(|| {
3475 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3476 debug_assert!(askpass_delegate.is_some());
3477 });
3478 let response = client
3479 .request(proto::Pull {
3480 project_id: project_id.0,
3481 repository_id: id.to_proto(),
3482 askpass_id,
3483 branch_name: branch.to_string(),
3484 remote_name: remote.to_string(),
3485 })
3486 .await
3487 .context("sending pull request")?;
3488
3489 Ok(RemoteCommandOutput {
3490 stdout: response.stdout,
3491 stderr: response.stderr,
3492 })
3493 }
3494 }
3495 },
3496 )
3497 }
3498
3499 fn spawn_set_index_text_job(
3500 &mut self,
3501 path: RepoPath,
3502 content: Option<String>,
3503 hunk_staging_operation_count: Option<usize>,
3504 cx: &mut Context<Self>,
3505 ) -> oneshot::Receiver<anyhow::Result<()>> {
3506 let id = self.id;
3507 let this = cx.weak_entity();
3508 let git_store = self.git_store.clone();
3509 self.send_keyed_job(
3510 Some(GitJobKey::WriteIndex(path.clone())),
3511 None,
3512 move |git_repo, mut cx| async move {
3513 log::debug!("start updating index text for buffer {}", path.display());
3514 match git_repo {
3515 RepositoryState::Local {
3516 backend,
3517 environment,
3518 ..
3519 } => {
3520 backend
3521 .set_index_text(path.clone(), content, environment.clone())
3522 .await?;
3523 }
3524 RepositoryState::Remote { project_id, client } => {
3525 client
3526 .request(proto::SetIndexText {
3527 project_id: project_id.0,
3528 repository_id: id.to_proto(),
3529 path: path.as_ref().to_proto(),
3530 text: content,
3531 })
3532 .await?;
3533 }
3534 }
3535 log::debug!("finish updating index text for buffer {}", path.display());
3536
3537 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
3538 let project_path = this
3539 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
3540 .ok()
3541 .flatten();
3542 git_store.update(&mut cx, |git_store, cx| {
3543 let buffer_id = git_store
3544 .buffer_store
3545 .read(cx)
3546 .get_by_path(&project_path?, cx)?
3547 .read(cx)
3548 .remote_id();
3549 let diff_state = git_store.diffs.get(&buffer_id)?;
3550 diff_state.update(cx, |diff_state, _| {
3551 diff_state.hunk_staging_operation_count_as_of_write =
3552 hunk_staging_operation_count;
3553 });
3554 Some(())
3555 })?;
3556 }
3557 Ok(())
3558 },
3559 )
3560 }
3561
3562 pub fn get_remotes(
3563 &mut self,
3564 branch_name: Option<String>,
3565 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3566 let id = self.id;
3567 self.send_job(None, move |repo, _cx| async move {
3568 match repo {
3569 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3570 RepositoryState::Remote { project_id, client } => {
3571 let response = client
3572 .request(proto::GetRemotes {
3573 project_id: project_id.0,
3574 repository_id: id.to_proto(),
3575 branch_name,
3576 })
3577 .await?;
3578
3579 let remotes = response
3580 .remotes
3581 .into_iter()
3582 .map(|remotes| git::repository::Remote {
3583 name: remotes.name.into(),
3584 })
3585 .collect();
3586
3587 Ok(remotes)
3588 }
3589 }
3590 })
3591 }
3592
3593 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3594 let id = self.id;
3595 self.send_job(None, move |repo, cx| async move {
3596 match repo {
3597 RepositoryState::Local { backend, .. } => {
3598 let backend = backend.clone();
3599 cx.background_spawn(async move { backend.branches().await })
3600 .await
3601 }
3602 RepositoryState::Remote { project_id, client } => {
3603 let response = client
3604 .request(proto::GitGetBranches {
3605 project_id: project_id.0,
3606 repository_id: id.to_proto(),
3607 })
3608 .await?;
3609
3610 let branches = response
3611 .branches
3612 .into_iter()
3613 .map(|branch| proto_to_branch(&branch))
3614 .collect();
3615
3616 Ok(branches)
3617 }
3618 }
3619 })
3620 }
3621
3622 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3623 let id = self.id;
3624 self.send_job(None, move |repo, _cx| async move {
3625 match repo {
3626 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
3627 RepositoryState::Remote { project_id, client } => {
3628 let response = client
3629 .request(proto::GitDiff {
3630 project_id: project_id.0,
3631 repository_id: id.to_proto(),
3632 diff_type: match diff_type {
3633 DiffType::HeadToIndex => {
3634 proto::git_diff::DiffType::HeadToIndex.into()
3635 }
3636 DiffType::HeadToWorktree => {
3637 proto::git_diff::DiffType::HeadToWorktree.into()
3638 }
3639 },
3640 })
3641 .await?;
3642
3643 Ok(response.diff)
3644 }
3645 }
3646 })
3647 }
3648
3649 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3650 let id = self.id;
3651 self.send_job(
3652 Some(format!("git switch -c {branch_name}").into()),
3653 move |repo, _cx| async move {
3654 match repo {
3655 RepositoryState::Local { backend, .. } => {
3656 backend.create_branch(branch_name).await
3657 }
3658 RepositoryState::Remote { project_id, client } => {
3659 client
3660 .request(proto::GitCreateBranch {
3661 project_id: project_id.0,
3662 repository_id: id.to_proto(),
3663 branch_name,
3664 })
3665 .await?;
3666
3667 Ok(())
3668 }
3669 }
3670 },
3671 )
3672 }
3673
3674 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3675 let id = self.id;
3676 self.send_job(
3677 Some(format!("git switch {branch_name}").into()),
3678 move |repo, _cx| async move {
3679 match repo {
3680 RepositoryState::Local { backend, .. } => {
3681 backend.change_branch(branch_name).await
3682 }
3683 RepositoryState::Remote { project_id, client } => {
3684 client
3685 .request(proto::GitChangeBranch {
3686 project_id: project_id.0,
3687 repository_id: id.to_proto(),
3688 branch_name,
3689 })
3690 .await?;
3691
3692 Ok(())
3693 }
3694 }
3695 },
3696 )
3697 }
3698
3699 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3700 let id = self.id;
3701 self.send_job(None, move |repo, _cx| async move {
3702 match repo {
3703 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
3704 RepositoryState::Remote { project_id, client } => {
3705 let response = client
3706 .request(proto::CheckForPushedCommits {
3707 project_id: project_id.0,
3708 repository_id: id.to_proto(),
3709 })
3710 .await?;
3711
3712 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3713
3714 Ok(branches)
3715 }
3716 }
3717 })
3718 }
3719
3720 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
3721 self.send_job(None, |repo, _cx| async move {
3722 match repo {
3723 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
3724 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3725 }
3726 })
3727 }
3728
3729 pub fn restore_checkpoint(
3730 &mut self,
3731 checkpoint: GitRepositoryCheckpoint,
3732 ) -> oneshot::Receiver<Result<()>> {
3733 self.send_job(None, move |repo, _cx| async move {
3734 match repo {
3735 RepositoryState::Local { backend, .. } => {
3736 backend.restore_checkpoint(checkpoint).await
3737 }
3738 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3739 }
3740 })
3741 }
3742
3743 pub(crate) fn apply_remote_update(
3744 &mut self,
3745 update: proto::UpdateRepository,
3746 cx: &mut Context<Self>,
3747 ) -> Result<()> {
3748 let conflicted_paths = TreeSet::from_ordered_entries(
3749 update
3750 .current_merge_conflicts
3751 .into_iter()
3752 .map(|path| RepoPath(Path::new(&path).into())),
3753 );
3754 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
3755 self.snapshot.head_commit = update
3756 .head_commit_details
3757 .as_ref()
3758 .map(proto_to_commit_details);
3759
3760 self.snapshot.merge_conflicts = conflicted_paths;
3761
3762 let edits = update
3763 .removed_statuses
3764 .into_iter()
3765 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
3766 .chain(
3767 update
3768 .updated_statuses
3769 .into_iter()
3770 .filter_map(|updated_status| {
3771 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
3772 }),
3773 )
3774 .collect::<Vec<_>>();
3775 self.snapshot.statuses_by_path.edit(edits, &());
3776 if update.is_last_update {
3777 self.snapshot.scan_id = update.scan_id;
3778 }
3779 cx.emit(RepositoryEvent::Updated { full_scan: true });
3780 Ok(())
3781 }
3782
3783 pub fn compare_checkpoints(
3784 &mut self,
3785 left: GitRepositoryCheckpoint,
3786 right: GitRepositoryCheckpoint,
3787 ) -> oneshot::Receiver<Result<bool>> {
3788 self.send_job(None, move |repo, _cx| async move {
3789 match repo {
3790 RepositoryState::Local { backend, .. } => {
3791 backend.compare_checkpoints(left, right).await
3792 }
3793 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3794 }
3795 })
3796 }
3797
3798 pub fn delete_checkpoint(
3799 &mut self,
3800 checkpoint: GitRepositoryCheckpoint,
3801 ) -> oneshot::Receiver<Result<()>> {
3802 self.send_job(None, move |repo, _cx| async move {
3803 match repo {
3804 RepositoryState::Local { backend, .. } => {
3805 backend.delete_checkpoint(checkpoint).await
3806 }
3807 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3808 }
3809 })
3810 }
3811
3812 pub fn diff_checkpoints(
3813 &mut self,
3814 base_checkpoint: GitRepositoryCheckpoint,
3815 target_checkpoint: GitRepositoryCheckpoint,
3816 ) -> oneshot::Receiver<Result<String>> {
3817 self.send_job(None, move |repo, _cx| async move {
3818 match repo {
3819 RepositoryState::Local { backend, .. } => {
3820 backend
3821 .diff_checkpoints(base_checkpoint, target_checkpoint)
3822 .await
3823 }
3824 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3825 }
3826 })
3827 }
3828
3829 fn schedule_scan(
3830 &mut self,
3831 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3832 cx: &mut Context<Self>,
3833 ) {
3834 let this = cx.weak_entity();
3835 let _ = self.send_keyed_job(
3836 Some(GitJobKey::ReloadGitState),
3837 None,
3838 |state, mut cx| async move {
3839 let Some(this) = this.upgrade() else {
3840 return Ok(());
3841 };
3842 let RepositoryState::Local { backend, .. } = state else {
3843 bail!("not a local repository")
3844 };
3845 let (snapshot, events) = this
3846 .update(&mut cx, |this, _| {
3847 compute_snapshot(
3848 this.id,
3849 this.work_directory_abs_path.clone(),
3850 this.snapshot.clone(),
3851 backend.clone(),
3852 )
3853 })?
3854 .await?;
3855 this.update(&mut cx, |this, cx| {
3856 this.snapshot = snapshot.clone();
3857 for event in events {
3858 cx.emit(event);
3859 }
3860 })?;
3861 if let Some(updates_tx) = updates_tx {
3862 updates_tx
3863 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3864 .ok();
3865 }
3866 Ok(())
3867 },
3868 );
3869 }
3870
3871 fn spawn_local_git_worker(
3872 work_directory_abs_path: Arc<Path>,
3873 dot_git_abs_path: Arc<Path>,
3874 _repository_dir_abs_path: Arc<Path>,
3875 _common_dir_abs_path: Arc<Path>,
3876 project_environment: WeakEntity<ProjectEnvironment>,
3877 fs: Arc<dyn Fs>,
3878 cx: &mut Context<Self>,
3879 ) -> mpsc::UnboundedSender<GitJob> {
3880 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3881
3882 cx.spawn(async move |_, cx| {
3883 let environment = project_environment
3884 .upgrade()
3885 .ok_or_else(|| anyhow!("missing project environment"))?
3886 .update(cx, |project_environment, cx| {
3887 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
3888 })?
3889 .await
3890 .unwrap_or_else(|| {
3891 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
3892 HashMap::default()
3893 });
3894 let backend = cx
3895 .background_spawn(async move {
3896 fs.open_repo(&dot_git_abs_path)
3897 .ok_or_else(|| anyhow!("failed to build repository"))
3898 })
3899 .await?;
3900
3901 if let Some(git_hosting_provider_registry) =
3902 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
3903 {
3904 git_hosting_providers::register_additional_providers(
3905 git_hosting_provider_registry,
3906 backend.clone(),
3907 );
3908 }
3909
3910 let state = RepositoryState::Local {
3911 backend,
3912 environment: Arc::new(environment),
3913 };
3914 let mut jobs = VecDeque::new();
3915 loop {
3916 while let Ok(Some(next_job)) = job_rx.try_next() {
3917 jobs.push_back(next_job);
3918 }
3919
3920 if let Some(job) = jobs.pop_front() {
3921 if let Some(current_key) = &job.key {
3922 if jobs
3923 .iter()
3924 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3925 {
3926 continue;
3927 }
3928 }
3929 (job.job)(state.clone(), cx).await;
3930 } else if let Some(job) = job_rx.next().await {
3931 jobs.push_back(job);
3932 } else {
3933 break;
3934 }
3935 }
3936 anyhow::Ok(())
3937 })
3938 .detach_and_log_err(cx);
3939
3940 job_tx
3941 }
3942
3943 fn spawn_remote_git_worker(
3944 project_id: ProjectId,
3945 client: AnyProtoClient,
3946 cx: &mut Context<Self>,
3947 ) -> mpsc::UnboundedSender<GitJob> {
3948 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3949
3950 cx.spawn(async move |_, cx| {
3951 let state = RepositoryState::Remote { project_id, client };
3952 let mut jobs = VecDeque::new();
3953 loop {
3954 while let Ok(Some(next_job)) = job_rx.try_next() {
3955 jobs.push_back(next_job);
3956 }
3957
3958 if let Some(job) = jobs.pop_front() {
3959 if let Some(current_key) = &job.key {
3960 if jobs
3961 .iter()
3962 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3963 {
3964 continue;
3965 }
3966 }
3967 (job.job)(state.clone(), cx).await;
3968 } else if let Some(job) = job_rx.next().await {
3969 jobs.push_back(job);
3970 } else {
3971 break;
3972 }
3973 }
3974 anyhow::Ok(())
3975 })
3976 .detach_and_log_err(cx);
3977
3978 job_tx
3979 }
3980
3981 fn load_staged_text(
3982 &mut self,
3983 buffer_id: BufferId,
3984 repo_path: RepoPath,
3985 cx: &App,
3986 ) -> Task<Result<Option<String>>> {
3987 let rx = self.send_job(None, move |state, _| async move {
3988 match state {
3989 RepositoryState::Local { backend, .. } => {
3990 anyhow::Ok(backend.load_index_text(repo_path).await)
3991 }
3992 RepositoryState::Remote { project_id, client } => {
3993 let response = client
3994 .request(proto::OpenUnstagedDiff {
3995 project_id: project_id.to_proto(),
3996 buffer_id: buffer_id.to_proto(),
3997 })
3998 .await?;
3999 Ok(response.staged_text)
4000 }
4001 }
4002 });
4003 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4004 }
4005
4006 fn load_committed_text(
4007 &mut self,
4008 buffer_id: BufferId,
4009 repo_path: RepoPath,
4010 cx: &App,
4011 ) -> Task<Result<DiffBasesChange>> {
4012 let rx = self.send_job(None, move |state, _| async move {
4013 match state {
4014 RepositoryState::Local { backend, .. } => {
4015 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4016 let staged_text = backend.load_index_text(repo_path).await;
4017 let diff_bases_change = if committed_text == staged_text {
4018 DiffBasesChange::SetBoth(committed_text)
4019 } else {
4020 DiffBasesChange::SetEach {
4021 index: staged_text,
4022 head: committed_text,
4023 }
4024 };
4025 anyhow::Ok(diff_bases_change)
4026 }
4027 RepositoryState::Remote { project_id, client } => {
4028 use proto::open_uncommitted_diff_response::Mode;
4029
4030 let response = client
4031 .request(proto::OpenUncommittedDiff {
4032 project_id: project_id.to_proto(),
4033 buffer_id: buffer_id.to_proto(),
4034 })
4035 .await?;
4036 let mode =
4037 Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
4038 let bases = match mode {
4039 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4040 Mode::IndexAndHead => DiffBasesChange::SetEach {
4041 head: response.committed_text,
4042 index: response.staged_text,
4043 },
4044 };
4045 Ok(bases)
4046 }
4047 }
4048 });
4049
4050 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4051 }
4052
4053 fn paths_changed(
4054 &mut self,
4055 paths: Vec<RepoPath>,
4056 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4057 cx: &mut Context<Self>,
4058 ) {
4059 self.paths_needing_status_update.extend(paths);
4060
4061 let this = cx.weak_entity();
4062 let _ = self.send_keyed_job(
4063 Some(GitJobKey::RefreshStatuses),
4064 None,
4065 |state, mut cx| async move {
4066 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4067 (
4068 this.snapshot.clone(),
4069 mem::take(&mut this.paths_needing_status_update),
4070 )
4071 })?;
4072 let RepositoryState::Local { backend, .. } = state else {
4073 bail!("not a local repository")
4074 };
4075
4076 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4077 let statuses = backend.status(&paths).await?;
4078
4079 let changed_path_statuses = cx
4080 .background_spawn(async move {
4081 let mut changed_path_statuses = Vec::new();
4082 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4083 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4084
4085 for (repo_path, status) in &*statuses.entries {
4086 changed_paths.remove(repo_path);
4087 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
4088 if cursor.item().is_some_and(|entry| entry.status == *status) {
4089 continue;
4090 }
4091 }
4092
4093 changed_path_statuses.push(Edit::Insert(StatusEntry {
4094 repo_path: repo_path.clone(),
4095 status: *status,
4096 }));
4097 }
4098 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4099 for path in changed_paths.iter() {
4100 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
4101 changed_path_statuses.push(Edit::Remove(PathKey(path.0.clone())));
4102 }
4103 }
4104 changed_path_statuses
4105 })
4106 .await;
4107
4108 this.update(&mut cx, |this, cx| {
4109 if !changed_path_statuses.is_empty() {
4110 this.snapshot
4111 .statuses_by_path
4112 .edit(changed_path_statuses, &());
4113 this.snapshot.scan_id += 1;
4114 if let Some(updates_tx) = updates_tx {
4115 updates_tx
4116 .unbounded_send(DownstreamUpdate::UpdateRepository(
4117 this.snapshot.clone(),
4118 ))
4119 .ok();
4120 }
4121 }
4122 cx.emit(RepositoryEvent::Updated { full_scan: false });
4123 })
4124 },
4125 );
4126 }
4127
4128 /// currently running git command and when it started
4129 pub fn current_job(&self) -> Option<JobInfo> {
4130 self.active_jobs.values().next().cloned()
4131 }
4132
4133 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4134 self.send_job(None, |_, _| async {})
4135 }
4136}
4137
4138fn get_permalink_in_rust_registry_src(
4139 provider_registry: Arc<GitHostingProviderRegistry>,
4140 path: PathBuf,
4141 selection: Range<u32>,
4142) -> Result<url::Url> {
4143 #[derive(Deserialize)]
4144 struct CargoVcsGit {
4145 sha1: String,
4146 }
4147
4148 #[derive(Deserialize)]
4149 struct CargoVcsInfo {
4150 git: CargoVcsGit,
4151 path_in_vcs: String,
4152 }
4153
4154 #[derive(Deserialize)]
4155 struct CargoPackage {
4156 repository: String,
4157 }
4158
4159 #[derive(Deserialize)]
4160 struct CargoToml {
4161 package: CargoPackage,
4162 }
4163
4164 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4165 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4166 Some((dir, json))
4167 }) else {
4168 bail!("No .cargo_vcs_info.json found in parent directories")
4169 };
4170 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4171 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4172 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4173 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4174 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
4175 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4176 let permalink = provider.build_permalink(
4177 remote,
4178 BuildPermalinkParams {
4179 sha: &cargo_vcs_info.git.sha1,
4180 path: &path.to_string_lossy(),
4181 selection: Some(selection),
4182 },
4183 );
4184 Ok(permalink)
4185}
4186
4187fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4188 let Some(blame) = blame else {
4189 return proto::BlameBufferResponse {
4190 blame_response: None,
4191 };
4192 };
4193
4194 let entries = blame
4195 .entries
4196 .into_iter()
4197 .map(|entry| proto::BlameEntry {
4198 sha: entry.sha.as_bytes().into(),
4199 start_line: entry.range.start,
4200 end_line: entry.range.end,
4201 original_line_number: entry.original_line_number,
4202 author: entry.author.clone(),
4203 author_mail: entry.author_mail.clone(),
4204 author_time: entry.author_time,
4205 author_tz: entry.author_tz.clone(),
4206 committer: entry.committer_name.clone(),
4207 committer_mail: entry.committer_email.clone(),
4208 committer_time: entry.committer_time,
4209 committer_tz: entry.committer_tz.clone(),
4210 summary: entry.summary.clone(),
4211 previous: entry.previous.clone(),
4212 filename: entry.filename.clone(),
4213 })
4214 .collect::<Vec<_>>();
4215
4216 let messages = blame
4217 .messages
4218 .into_iter()
4219 .map(|(oid, message)| proto::CommitMessage {
4220 oid: oid.as_bytes().into(),
4221 message,
4222 })
4223 .collect::<Vec<_>>();
4224
4225 proto::BlameBufferResponse {
4226 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4227 entries,
4228 messages,
4229 remote_url: blame.remote_url,
4230 }),
4231 }
4232}
4233
4234fn deserialize_blame_buffer_response(
4235 response: proto::BlameBufferResponse,
4236) -> Option<git::blame::Blame> {
4237 let response = response.blame_response?;
4238 let entries = response
4239 .entries
4240 .into_iter()
4241 .filter_map(|entry| {
4242 Some(git::blame::BlameEntry {
4243 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4244 range: entry.start_line..entry.end_line,
4245 original_line_number: entry.original_line_number,
4246 committer_name: entry.committer,
4247 committer_time: entry.committer_time,
4248 committer_tz: entry.committer_tz,
4249 committer_email: entry.committer_mail,
4250 author: entry.author,
4251 author_mail: entry.author_mail,
4252 author_time: entry.author_time,
4253 author_tz: entry.author_tz,
4254 summary: entry.summary,
4255 previous: entry.previous,
4256 filename: entry.filename,
4257 })
4258 })
4259 .collect::<Vec<_>>();
4260
4261 let messages = response
4262 .messages
4263 .into_iter()
4264 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4265 .collect::<HashMap<_, _>>();
4266
4267 Some(Blame {
4268 entries,
4269 messages,
4270 remote_url: response.remote_url,
4271 })
4272}
4273
4274fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4275 proto::Branch {
4276 is_head: branch.is_head,
4277 name: branch.name.to_string(),
4278 unix_timestamp: branch
4279 .most_recent_commit
4280 .as_ref()
4281 .map(|commit| commit.commit_timestamp as u64),
4282 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4283 ref_name: upstream.ref_name.to_string(),
4284 tracking: upstream
4285 .tracking
4286 .status()
4287 .map(|upstream| proto::UpstreamTracking {
4288 ahead: upstream.ahead as u64,
4289 behind: upstream.behind as u64,
4290 }),
4291 }),
4292 most_recent_commit: branch
4293 .most_recent_commit
4294 .as_ref()
4295 .map(|commit| proto::CommitSummary {
4296 sha: commit.sha.to_string(),
4297 subject: commit.subject.to_string(),
4298 commit_timestamp: commit.commit_timestamp,
4299 }),
4300 }
4301}
4302
4303fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4304 git::repository::Branch {
4305 is_head: proto.is_head,
4306 name: proto.name.clone().into(),
4307 upstream: proto
4308 .upstream
4309 .as_ref()
4310 .map(|upstream| git::repository::Upstream {
4311 ref_name: upstream.ref_name.to_string().into(),
4312 tracking: upstream
4313 .tracking
4314 .as_ref()
4315 .map(|tracking| {
4316 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4317 ahead: tracking.ahead as u32,
4318 behind: tracking.behind as u32,
4319 })
4320 })
4321 .unwrap_or(git::repository::UpstreamTracking::Gone),
4322 }),
4323 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4324 git::repository::CommitSummary {
4325 sha: commit.sha.to_string().into(),
4326 subject: commit.subject.to_string().into(),
4327 commit_timestamp: commit.commit_timestamp,
4328 has_parent: true,
4329 }
4330 }),
4331 }
4332}
4333
4334fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4335 proto::GitCommitDetails {
4336 sha: commit.sha.to_string(),
4337 message: commit.message.to_string(),
4338 commit_timestamp: commit.commit_timestamp,
4339 author_email: commit.author_email.to_string(),
4340 author_name: commit.author_name.to_string(),
4341 }
4342}
4343
4344fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4345 CommitDetails {
4346 sha: proto.sha.clone().into(),
4347 message: proto.message.clone().into(),
4348 commit_timestamp: proto.commit_timestamp,
4349 author_email: proto.author_email.clone().into(),
4350 author_name: proto.author_name.clone().into(),
4351 }
4352}
4353
4354async fn compute_snapshot(
4355 id: RepositoryId,
4356 work_directory_abs_path: Arc<Path>,
4357 prev_snapshot: RepositorySnapshot,
4358 backend: Arc<dyn GitRepository>,
4359) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4360 let mut events = Vec::new();
4361 let branches = backend.branches().await?;
4362 let branch = branches.into_iter().find(|branch| branch.is_head);
4363 let statuses = backend.status(&[WORK_DIRECTORY_REPO_PATH.clone()]).await?;
4364 let merge_message = backend
4365 .merge_message()
4366 .await
4367 .and_then(|msg| Some(msg.lines().nth(0)?.to_owned().into()));
4368 let merge_head_shas = backend
4369 .merge_head_shas()
4370 .into_iter()
4371 .map(SharedString::from)
4372 .collect();
4373
4374 let statuses_by_path = SumTree::from_iter(
4375 statuses
4376 .entries
4377 .iter()
4378 .map(|(repo_path, status)| StatusEntry {
4379 repo_path: repo_path.clone(),
4380 status: *status,
4381 }),
4382 &(),
4383 );
4384
4385 let merge_head_shas_changed = merge_head_shas != prev_snapshot.merge_head_shas;
4386
4387 if merge_head_shas_changed
4388 || branch != prev_snapshot.branch
4389 || statuses_by_path != prev_snapshot.statuses_by_path
4390 {
4391 events.push(RepositoryEvent::Updated { full_scan: true });
4392 }
4393
4394 let mut current_merge_conflicts = TreeSet::default();
4395 for (repo_path, status) in statuses.entries.iter() {
4396 if status.is_conflicted() {
4397 current_merge_conflicts.insert(repo_path.clone());
4398 }
4399 }
4400
4401 // Cache merge conflict paths so they don't change from staging/unstaging,
4402 // until the merge heads change (at commit time, etc.).
4403 let mut merge_conflicts = prev_snapshot.merge_conflicts.clone();
4404 if merge_head_shas_changed {
4405 merge_conflicts = current_merge_conflicts;
4406 events.push(RepositoryEvent::MergeHeadsChanged);
4407 }
4408
4409 // Useful when branch is None in detached head state
4410 let head_commit = match backend.head_sha() {
4411 Some(head_sha) => backend.show(head_sha).await.ok(),
4412 None => None,
4413 };
4414
4415 let snapshot = RepositorySnapshot {
4416 id,
4417 merge_message,
4418 statuses_by_path,
4419 work_directory_abs_path,
4420 scan_id: prev_snapshot.scan_id + 1,
4421 branch,
4422 head_commit,
4423 merge_conflicts,
4424 merge_head_shas,
4425 };
4426
4427 Ok((snapshot, events))
4428}
4429
4430fn status_from_proto(
4431 simple_status: i32,
4432 status: Option<proto::GitFileStatus>,
4433) -> anyhow::Result<FileStatus> {
4434 use proto::git_file_status::Variant;
4435
4436 let Some(variant) = status.and_then(|status| status.variant) else {
4437 let code = proto::GitStatus::from_i32(simple_status)
4438 .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
4439 let result = match code {
4440 proto::GitStatus::Added => TrackedStatus {
4441 worktree_status: StatusCode::Added,
4442 index_status: StatusCode::Unmodified,
4443 }
4444 .into(),
4445 proto::GitStatus::Modified => TrackedStatus {
4446 worktree_status: StatusCode::Modified,
4447 index_status: StatusCode::Unmodified,
4448 }
4449 .into(),
4450 proto::GitStatus::Conflict => UnmergedStatus {
4451 first_head: UnmergedStatusCode::Updated,
4452 second_head: UnmergedStatusCode::Updated,
4453 }
4454 .into(),
4455 proto::GitStatus::Deleted => TrackedStatus {
4456 worktree_status: StatusCode::Deleted,
4457 index_status: StatusCode::Unmodified,
4458 }
4459 .into(),
4460 _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
4461 };
4462 return Ok(result);
4463 };
4464
4465 let result = match variant {
4466 Variant::Untracked(_) => FileStatus::Untracked,
4467 Variant::Ignored(_) => FileStatus::Ignored,
4468 Variant::Unmerged(unmerged) => {
4469 let [first_head, second_head] =
4470 [unmerged.first_head, unmerged.second_head].map(|head| {
4471 let code = proto::GitStatus::from_i32(head)
4472 .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
4473 let result = match code {
4474 proto::GitStatus::Added => UnmergedStatusCode::Added,
4475 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4476 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4477 _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
4478 };
4479 Ok(result)
4480 });
4481 let [first_head, second_head] = [first_head?, second_head?];
4482 UnmergedStatus {
4483 first_head,
4484 second_head,
4485 }
4486 .into()
4487 }
4488 Variant::Tracked(tracked) => {
4489 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4490 .map(|status| {
4491 let code = proto::GitStatus::from_i32(status)
4492 .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
4493 let result = match code {
4494 proto::GitStatus::Modified => StatusCode::Modified,
4495 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4496 proto::GitStatus::Added => StatusCode::Added,
4497 proto::GitStatus::Deleted => StatusCode::Deleted,
4498 proto::GitStatus::Renamed => StatusCode::Renamed,
4499 proto::GitStatus::Copied => StatusCode::Copied,
4500 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4501 _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
4502 };
4503 Ok(result)
4504 });
4505 let [index_status, worktree_status] = [index_status?, worktree_status?];
4506 TrackedStatus {
4507 index_status,
4508 worktree_status,
4509 }
4510 .into()
4511 }
4512 };
4513 Ok(result)
4514}
4515
4516fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4517 use proto::git_file_status::{Tracked, Unmerged, Variant};
4518
4519 let variant = match status {
4520 FileStatus::Untracked => Variant::Untracked(Default::default()),
4521 FileStatus::Ignored => Variant::Ignored(Default::default()),
4522 FileStatus::Unmerged(UnmergedStatus {
4523 first_head,
4524 second_head,
4525 }) => Variant::Unmerged(Unmerged {
4526 first_head: unmerged_status_to_proto(first_head),
4527 second_head: unmerged_status_to_proto(second_head),
4528 }),
4529 FileStatus::Tracked(TrackedStatus {
4530 index_status,
4531 worktree_status,
4532 }) => Variant::Tracked(Tracked {
4533 index_status: tracked_status_to_proto(index_status),
4534 worktree_status: tracked_status_to_proto(worktree_status),
4535 }),
4536 };
4537 proto::GitFileStatus {
4538 variant: Some(variant),
4539 }
4540}
4541
4542fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4543 match code {
4544 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4545 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4546 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4547 }
4548}
4549
4550fn tracked_status_to_proto(code: StatusCode) -> i32 {
4551 match code {
4552 StatusCode::Added => proto::GitStatus::Added as _,
4553 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4554 StatusCode::Modified => proto::GitStatus::Modified as _,
4555 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4556 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4557 StatusCode::Copied => proto::GitStatus::Copied as _,
4558 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4559 }
4560}