1pub mod git_traversal;
2
3use crate::{
4 ProjectEnvironment, ProjectItem, ProjectPath,
5 buffer_store::{BufferStore, BufferStoreEvent},
6 worktree_store::{WorktreeStore, WorktreeStoreEvent},
7};
8use anyhow::{Context as _, Result, anyhow, bail};
9use askpass::AskPassDelegate;
10use buffer_diff::{BufferDiff, BufferDiffEvent};
11use client::ProjectId;
12use collections::HashMap;
13use fs::Fs;
14use futures::{
15 FutureExt as _, StreamExt as _,
16 channel::{mpsc, oneshot},
17 future::{self, Shared},
18};
19use git::{
20 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
21 blame::Blame,
22 parse_git_remote_url,
23 repository::{
24 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, GitRepository,
25 GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
26 UpstreamTrackingStatus,
27 },
28 status::{
29 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
30 },
31};
32use gpui::{
33 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
34 WeakEntity,
35};
36use language::{
37 Buffer, BufferEvent, Language, LanguageRegistry,
38 proto::{deserialize_version, serialize_version},
39};
40use parking_lot::Mutex;
41use postage::stream::Stream as _;
42use rpc::{
43 AnyProtoClient, TypedEnvelope,
44 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
45};
46use serde::Deserialize;
47use std::{
48 cmp::Ordering,
49 collections::{BTreeSet, VecDeque},
50 future::Future,
51 mem,
52 ops::Range,
53 path::{Path, PathBuf},
54 sync::{
55 Arc,
56 atomic::{self, AtomicU64},
57 },
58 time::Instant,
59};
60use sum_tree::{Edit, SumTree, TreeSet};
61use text::{Bias, BufferId};
62use util::{ResultExt, debug_panic, post_inc};
63use worktree::{
64 File, PathKey, PathProgress, PathSummary, PathTarget, UpdatedGitRepositoriesSet,
65 UpdatedGitRepository, Worktree,
66};
67
68pub struct GitStore {
69 state: GitStoreState,
70 buffer_store: Entity<BufferStore>,
71 worktree_store: Entity<WorktreeStore>,
72 repositories: HashMap<RepositoryId, Entity<Repository>>,
73 active_repo_id: Option<RepositoryId>,
74 #[allow(clippy::type_complexity)]
75 loading_diffs:
76 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
77 diffs: HashMap<BufferId, Entity<BufferDiffState>>,
78 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
79 _subscriptions: Vec<Subscription>,
80}
81
82#[derive(Default)]
83struct SharedDiffs {
84 unstaged: Option<Entity<BufferDiff>>,
85 uncommitted: Option<Entity<BufferDiff>>,
86}
87
88struct BufferDiffState {
89 unstaged_diff: Option<WeakEntity<BufferDiff>>,
90 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
91 recalculate_diff_task: Option<Task<Result<()>>>,
92 language: Option<Arc<Language>>,
93 language_registry: Option<Arc<LanguageRegistry>>,
94 recalculating_tx: postage::watch::Sender<bool>,
95
96 /// These operation counts are used to ensure that head and index text
97 /// values read from the git repository are up-to-date with any hunk staging
98 /// operations that have been performed on the BufferDiff.
99 ///
100 /// The operation count is incremented immediately when the user initiates a
101 /// hunk stage/unstage operation. Then, upon finishing writing the new index
102 /// text do disk, the `operation count as of write` is updated to reflect
103 /// the operation count that prompted the write.
104 hunk_staging_operation_count: usize,
105 hunk_staging_operation_count_as_of_write: usize,
106
107 head_text: Option<Arc<String>>,
108 index_text: Option<Arc<String>>,
109 head_changed: bool,
110 index_changed: bool,
111 language_changed: bool,
112}
113
114#[derive(Clone, Debug)]
115enum DiffBasesChange {
116 SetIndex(Option<String>),
117 SetHead(Option<String>),
118 SetEach {
119 index: Option<String>,
120 head: Option<String>,
121 },
122 SetBoth(Option<String>),
123}
124
125#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
126enum DiffKind {
127 Unstaged,
128 Uncommitted,
129}
130
131enum GitStoreState {
132 Local {
133 next_repository_id: Arc<AtomicU64>,
134 downstream: Option<LocalDownstreamState>,
135 project_environment: Entity<ProjectEnvironment>,
136 fs: Arc<dyn Fs>,
137 },
138 Ssh {
139 upstream_client: AnyProtoClient,
140 upstream_project_id: ProjectId,
141 downstream: Option<(AnyProtoClient, ProjectId)>,
142 },
143 Remote {
144 upstream_client: AnyProtoClient,
145 upstream_project_id: ProjectId,
146 },
147}
148
149enum DownstreamUpdate {
150 UpdateRepository(RepositorySnapshot),
151 RemoveRepository(RepositoryId),
152}
153
154struct LocalDownstreamState {
155 client: AnyProtoClient,
156 project_id: ProjectId,
157 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
158 _task: Task<Result<()>>,
159}
160
161#[derive(Clone)]
162pub struct GitStoreCheckpoint {
163 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
164}
165
166#[derive(Clone, Debug, PartialEq, Eq)]
167pub struct StatusEntry {
168 pub repo_path: RepoPath,
169 pub status: FileStatus,
170}
171
172impl StatusEntry {
173 fn to_proto(&self) -> proto::StatusEntry {
174 let simple_status = match self.status {
175 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
176 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
177 FileStatus::Tracked(TrackedStatus {
178 index_status,
179 worktree_status,
180 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
181 worktree_status
182 } else {
183 index_status
184 }),
185 };
186
187 proto::StatusEntry {
188 repo_path: self.repo_path.as_ref().to_proto(),
189 simple_status,
190 status: Some(status_to_proto(self.status)),
191 }
192 }
193}
194
195impl TryFrom<proto::StatusEntry> for StatusEntry {
196 type Error = anyhow::Error;
197
198 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
199 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
200 let status = status_from_proto(value.simple_status, value.status)?;
201 Ok(Self { repo_path, status })
202 }
203}
204
205impl sum_tree::Item for StatusEntry {
206 type Summary = PathSummary<GitSummary>;
207
208 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
209 PathSummary {
210 max_path: self.repo_path.0.clone(),
211 item_summary: self.status.summary(),
212 }
213 }
214}
215
216impl sum_tree::KeyedItem for StatusEntry {
217 type Key = PathKey;
218
219 fn key(&self) -> Self::Key {
220 PathKey(self.repo_path.0.clone())
221 }
222}
223
224#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
225pub struct RepositoryId(pub u64);
226
227#[derive(Clone, Debug, PartialEq, Eq)]
228pub struct RepositorySnapshot {
229 pub id: RepositoryId,
230 pub merge_message: Option<SharedString>,
231 pub statuses_by_path: SumTree<StatusEntry>,
232 pub work_directory_abs_path: Arc<Path>,
233 pub branch: Option<Branch>,
234 pub head_commit: Option<CommitDetails>,
235 pub merge_conflicts: TreeSet<RepoPath>,
236 pub merge_head_shas: Vec<SharedString>,
237 pub scan_id: u64,
238}
239
240type JobId = u64;
241
242#[derive(Clone, Debug, PartialEq, Eq)]
243pub struct JobInfo {
244 pub start: Instant,
245 pub message: SharedString,
246}
247
248pub struct Repository {
249 this: WeakEntity<Self>,
250 snapshot: RepositorySnapshot,
251 commit_message_buffer: Option<Entity<Buffer>>,
252 git_store: WeakEntity<GitStore>,
253 // For a local repository, holds paths that have had worktree events since the last status scan completed,
254 // and that should be examined during the next status scan.
255 paths_needing_status_update: BTreeSet<RepoPath>,
256 job_sender: mpsc::UnboundedSender<GitJob>,
257 active_jobs: HashMap<JobId, JobInfo>,
258 job_id: JobId,
259 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
260 latest_askpass_id: u64,
261}
262
263impl std::ops::Deref for Repository {
264 type Target = RepositorySnapshot;
265
266 fn deref(&self) -> &Self::Target {
267 &self.snapshot
268 }
269}
270
271#[derive(Clone)]
272pub enum RepositoryState {
273 Local {
274 backend: Arc<dyn GitRepository>,
275 environment: Arc<HashMap<String, String>>,
276 },
277 Remote {
278 project_id: ProjectId,
279 client: AnyProtoClient,
280 },
281}
282
283#[derive(Clone, Debug)]
284pub enum RepositoryEvent {
285 Updated { full_scan: bool },
286 MergeHeadsChanged,
287}
288
289#[derive(Clone, Debug)]
290pub struct JobsUpdated;
291
292#[derive(Debug)]
293pub enum GitStoreEvent {
294 ActiveRepositoryChanged(Option<RepositoryId>),
295 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
296 RepositoryAdded(RepositoryId),
297 RepositoryRemoved(RepositoryId),
298 IndexWriteError(anyhow::Error),
299 JobsUpdated,
300}
301
302impl EventEmitter<RepositoryEvent> for Repository {}
303impl EventEmitter<JobsUpdated> for Repository {}
304impl EventEmitter<GitStoreEvent> for GitStore {}
305
306pub struct GitJob {
307 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
308 key: Option<GitJobKey>,
309}
310
311#[derive(PartialEq, Eq)]
312enum GitJobKey {
313 WriteIndex(RepoPath),
314 ReloadBufferDiffBases,
315 RefreshStatuses,
316 ReloadGitState,
317}
318
319impl GitStore {
320 pub fn local(
321 worktree_store: &Entity<WorktreeStore>,
322 buffer_store: Entity<BufferStore>,
323 environment: Entity<ProjectEnvironment>,
324 fs: Arc<dyn Fs>,
325 cx: &mut Context<Self>,
326 ) -> Self {
327 Self::new(
328 worktree_store.clone(),
329 buffer_store,
330 GitStoreState::Local {
331 next_repository_id: Arc::new(AtomicU64::new(1)),
332 downstream: None,
333 project_environment: environment,
334 fs,
335 },
336 cx,
337 )
338 }
339
340 pub fn remote(
341 worktree_store: &Entity<WorktreeStore>,
342 buffer_store: Entity<BufferStore>,
343 upstream_client: AnyProtoClient,
344 project_id: ProjectId,
345 cx: &mut Context<Self>,
346 ) -> Self {
347 Self::new(
348 worktree_store.clone(),
349 buffer_store,
350 GitStoreState::Remote {
351 upstream_client,
352 upstream_project_id: project_id,
353 },
354 cx,
355 )
356 }
357
358 pub fn ssh(
359 worktree_store: &Entity<WorktreeStore>,
360 buffer_store: Entity<BufferStore>,
361 upstream_client: AnyProtoClient,
362 cx: &mut Context<Self>,
363 ) -> Self {
364 Self::new(
365 worktree_store.clone(),
366 buffer_store,
367 GitStoreState::Ssh {
368 upstream_client,
369 upstream_project_id: ProjectId(SSH_PROJECT_ID),
370 downstream: None,
371 },
372 cx,
373 )
374 }
375
376 fn new(
377 worktree_store: Entity<WorktreeStore>,
378 buffer_store: Entity<BufferStore>,
379 state: GitStoreState,
380 cx: &mut Context<Self>,
381 ) -> Self {
382 let _subscriptions = vec![
383 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
384 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
385 ];
386
387 GitStore {
388 state,
389 buffer_store,
390 worktree_store,
391 repositories: HashMap::default(),
392 active_repo_id: None,
393 _subscriptions,
394 loading_diffs: HashMap::default(),
395 shared_diffs: HashMap::default(),
396 diffs: HashMap::default(),
397 }
398 }
399
400 pub fn init(client: &AnyProtoClient) {
401 client.add_entity_request_handler(Self::handle_get_remotes);
402 client.add_entity_request_handler(Self::handle_get_branches);
403 client.add_entity_request_handler(Self::handle_change_branch);
404 client.add_entity_request_handler(Self::handle_create_branch);
405 client.add_entity_request_handler(Self::handle_git_init);
406 client.add_entity_request_handler(Self::handle_push);
407 client.add_entity_request_handler(Self::handle_pull);
408 client.add_entity_request_handler(Self::handle_fetch);
409 client.add_entity_request_handler(Self::handle_stage);
410 client.add_entity_request_handler(Self::handle_unstage);
411 client.add_entity_request_handler(Self::handle_commit);
412 client.add_entity_request_handler(Self::handle_reset);
413 client.add_entity_request_handler(Self::handle_show);
414 client.add_entity_request_handler(Self::handle_load_commit_diff);
415 client.add_entity_request_handler(Self::handle_checkout_files);
416 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
417 client.add_entity_request_handler(Self::handle_set_index_text);
418 client.add_entity_request_handler(Self::handle_askpass);
419 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
420 client.add_entity_request_handler(Self::handle_git_diff);
421 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
422 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
423 client.add_entity_message_handler(Self::handle_update_diff_bases);
424 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
425 client.add_entity_request_handler(Self::handle_blame_buffer);
426 client.add_entity_message_handler(Self::handle_update_repository);
427 client.add_entity_message_handler(Self::handle_remove_repository);
428 }
429
430 pub fn is_local(&self) -> bool {
431 matches!(self.state, GitStoreState::Local { .. })
432 }
433
434 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
435 match &mut self.state {
436 GitStoreState::Ssh {
437 downstream: downstream_client,
438 ..
439 } => {
440 for repo in self.repositories.values() {
441 let update = repo.read(cx).snapshot.initial_update(project_id);
442 for update in split_repository_update(update) {
443 client.send(update).log_err();
444 }
445 }
446 *downstream_client = Some((client, ProjectId(project_id)));
447 }
448 GitStoreState::Local {
449 downstream: downstream_client,
450 ..
451 } => {
452 let mut snapshots = HashMap::default();
453 let (updates_tx, mut updates_rx) = mpsc::unbounded();
454 for repo in self.repositories.values() {
455 updates_tx
456 .unbounded_send(DownstreamUpdate::UpdateRepository(
457 repo.read(cx).snapshot.clone(),
458 ))
459 .ok();
460 }
461 *downstream_client = Some(LocalDownstreamState {
462 client: client.clone(),
463 project_id: ProjectId(project_id),
464 updates_tx,
465 _task: cx.spawn(async move |this, cx| {
466 cx.background_spawn(async move {
467 while let Some(update) = updates_rx.next().await {
468 match update {
469 DownstreamUpdate::UpdateRepository(snapshot) => {
470 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
471 {
472 let update =
473 snapshot.build_update(old_snapshot, project_id);
474 *old_snapshot = snapshot;
475 for update in split_repository_update(update) {
476 client.send(update)?;
477 }
478 } else {
479 let update = snapshot.initial_update(project_id);
480 for update in split_repository_update(update) {
481 client.send(update)?;
482 }
483 snapshots.insert(snapshot.id, snapshot);
484 }
485 }
486 DownstreamUpdate::RemoveRepository(id) => {
487 client.send(proto::RemoveRepository {
488 project_id,
489 id: id.to_proto(),
490 })?;
491 }
492 }
493 }
494 anyhow::Ok(())
495 })
496 .await
497 .ok();
498 this.update(cx, |this, _| {
499 if let GitStoreState::Local {
500 downstream: downstream_client,
501 ..
502 } = &mut this.state
503 {
504 downstream_client.take();
505 } else {
506 unreachable!("unshared called on remote store");
507 }
508 })
509 }),
510 });
511 }
512 GitStoreState::Remote { .. } => {
513 debug_panic!("shared called on remote store");
514 }
515 }
516 }
517
518 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
519 match &mut self.state {
520 GitStoreState::Local {
521 downstream: downstream_client,
522 ..
523 } => {
524 downstream_client.take();
525 }
526 GitStoreState::Ssh {
527 downstream: downstream_client,
528 ..
529 } => {
530 downstream_client.take();
531 }
532 GitStoreState::Remote { .. } => {
533 debug_panic!("unshared called on remote store");
534 }
535 }
536 self.shared_diffs.clear();
537 }
538
539 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
540 self.shared_diffs.remove(peer_id);
541 }
542
543 pub fn active_repository(&self) -> Option<Entity<Repository>> {
544 self.active_repo_id
545 .as_ref()
546 .map(|id| self.repositories[&id].clone())
547 }
548
549 pub fn open_unstaged_diff(
550 &mut self,
551 buffer: Entity<Buffer>,
552 cx: &mut Context<Self>,
553 ) -> Task<Result<Entity<BufferDiff>>> {
554 let buffer_id = buffer.read(cx).remote_id();
555 if let Some(diff_state) = self.diffs.get(&buffer_id) {
556 if let Some(unstaged_diff) = diff_state
557 .read(cx)
558 .unstaged_diff
559 .as_ref()
560 .and_then(|weak| weak.upgrade())
561 {
562 if let Some(task) =
563 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
564 {
565 return cx.background_executor().spawn(async move {
566 task.await;
567 Ok(unstaged_diff)
568 });
569 }
570 return Task::ready(Ok(unstaged_diff));
571 }
572 }
573
574 let Some((repo, repo_path)) =
575 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
576 else {
577 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
578 };
579
580 let task = self
581 .loading_diffs
582 .entry((buffer_id, DiffKind::Unstaged))
583 .or_insert_with(|| {
584 let staged_text = repo.update(cx, |repo, cx| {
585 repo.load_staged_text(buffer_id, repo_path, cx)
586 });
587 cx.spawn(async move |this, cx| {
588 Self::open_diff_internal(
589 this,
590 DiffKind::Unstaged,
591 staged_text.await.map(DiffBasesChange::SetIndex),
592 buffer,
593 cx,
594 )
595 .await
596 .map_err(Arc::new)
597 })
598 .shared()
599 })
600 .clone();
601
602 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
603 }
604
605 pub fn open_uncommitted_diff(
606 &mut self,
607 buffer: Entity<Buffer>,
608 cx: &mut Context<Self>,
609 ) -> Task<Result<Entity<BufferDiff>>> {
610 let buffer_id = buffer.read(cx).remote_id();
611
612 if let Some(diff_state) = self.diffs.get(&buffer_id) {
613 if let Some(uncommitted_diff) = diff_state
614 .read(cx)
615 .uncommitted_diff
616 .as_ref()
617 .and_then(|weak| weak.upgrade())
618 {
619 if let Some(task) =
620 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
621 {
622 return cx.background_executor().spawn(async move {
623 task.await;
624 Ok(uncommitted_diff)
625 });
626 }
627 return Task::ready(Ok(uncommitted_diff));
628 }
629 }
630
631 let Some((repo, repo_path)) =
632 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
633 else {
634 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
635 };
636
637 let task = self
638 .loading_diffs
639 .entry((buffer_id, DiffKind::Uncommitted))
640 .or_insert_with(|| {
641 let changes = repo.update(cx, |repo, cx| {
642 repo.load_committed_text(buffer_id, repo_path, cx)
643 });
644
645 cx.spawn(async move |this, cx| {
646 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
647 .await
648 .map_err(Arc::new)
649 })
650 .shared()
651 })
652 .clone();
653
654 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
655 }
656
657 async fn open_diff_internal(
658 this: WeakEntity<Self>,
659 kind: DiffKind,
660 texts: Result<DiffBasesChange>,
661 buffer_entity: Entity<Buffer>,
662 cx: &mut AsyncApp,
663 ) -> Result<Entity<BufferDiff>> {
664 let diff_bases_change = match texts {
665 Err(e) => {
666 this.update(cx, |this, cx| {
667 let buffer = buffer_entity.read(cx);
668 let buffer_id = buffer.remote_id();
669 this.loading_diffs.remove(&(buffer_id, kind));
670 })?;
671 return Err(e);
672 }
673 Ok(change) => change,
674 };
675
676 this.update(cx, |this, cx| {
677 let buffer = buffer_entity.read(cx);
678 let buffer_id = buffer.remote_id();
679 let language = buffer.language().cloned();
680 let language_registry = buffer.language_registry();
681 let text_snapshot = buffer.text_snapshot();
682 this.loading_diffs.remove(&(buffer_id, kind));
683
684 let diff_state = this
685 .diffs
686 .entry(buffer_id)
687 .or_insert_with(|| cx.new(|_| BufferDiffState::default()));
688
689 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
690
691 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
692 diff_state.update(cx, |diff_state, cx| {
693 diff_state.language = language;
694 diff_state.language_registry = language_registry;
695
696 match kind {
697 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
698 DiffKind::Uncommitted => {
699 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
700 diff
701 } else {
702 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
703 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
704 unstaged_diff
705 };
706
707 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
708 diff_state.uncommitted_diff = Some(diff.downgrade())
709 }
710 }
711
712 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
713 let rx = diff_state.wait_for_recalculation();
714
715 anyhow::Ok(async move {
716 if let Some(rx) = rx {
717 rx.await;
718 }
719 Ok(diff)
720 })
721 })
722 })??
723 .await
724 }
725
726 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
727 let diff_state = self.diffs.get(&buffer_id)?;
728 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
729 }
730
731 pub fn get_uncommitted_diff(
732 &self,
733 buffer_id: BufferId,
734 cx: &App,
735 ) -> Option<Entity<BufferDiff>> {
736 let diff_state = self.diffs.get(&buffer_id)?;
737 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
738 }
739
740 pub fn project_path_git_status(
741 &self,
742 project_path: &ProjectPath,
743 cx: &App,
744 ) -> Option<FileStatus> {
745 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
746 Some(repo.read(cx).status_for_path(&repo_path)?.status)
747 }
748
749 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
750 let mut work_directory_abs_paths = Vec::new();
751 let mut checkpoints = Vec::new();
752 for repository in self.repositories.values() {
753 repository.update(cx, |repository, _| {
754 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
755 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
756 });
757 }
758
759 cx.background_executor().spawn(async move {
760 let checkpoints = future::try_join_all(checkpoints).await?;
761 Ok(GitStoreCheckpoint {
762 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
763 .into_iter()
764 .zip(checkpoints)
765 .collect(),
766 })
767 })
768 }
769
770 pub fn restore_checkpoint(
771 &self,
772 checkpoint: GitStoreCheckpoint,
773 cx: &mut App,
774 ) -> Task<Result<()>> {
775 let repositories_by_work_dir_abs_path = self
776 .repositories
777 .values()
778 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
779 .collect::<HashMap<_, _>>();
780
781 let mut tasks = Vec::new();
782 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
783 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
784 let restore = repository.update(cx, |repository, _| {
785 repository.restore_checkpoint(checkpoint)
786 });
787 tasks.push(async move { restore.await? });
788 }
789 }
790 cx.background_spawn(async move {
791 future::try_join_all(tasks).await?;
792 Ok(())
793 })
794 }
795
796 /// Compares two checkpoints, returning true if they are equal.
797 pub fn compare_checkpoints(
798 &self,
799 left: GitStoreCheckpoint,
800 mut right: GitStoreCheckpoint,
801 cx: &mut App,
802 ) -> Task<Result<bool>> {
803 let repositories_by_work_dir_abs_path = self
804 .repositories
805 .values()
806 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
807 .collect::<HashMap<_, _>>();
808
809 let mut tasks = Vec::new();
810 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
811 if let Some(right_checkpoint) = right
812 .checkpoints_by_work_dir_abs_path
813 .remove(&work_dir_abs_path)
814 {
815 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
816 {
817 let compare = repository.update(cx, |repository, _| {
818 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
819 });
820
821 tasks.push(async move { compare.await? });
822 }
823 } else {
824 return Task::ready(Ok(false));
825 }
826 }
827 cx.background_spawn(async move {
828 Ok(future::try_join_all(tasks)
829 .await?
830 .into_iter()
831 .all(|result| result))
832 })
833 }
834
835 /// Blames a buffer.
836 pub fn blame_buffer(
837 &self,
838 buffer: &Entity<Buffer>,
839 version: Option<clock::Global>,
840 cx: &mut App,
841 ) -> Task<Result<Option<Blame>>> {
842 let buffer = buffer.read(cx);
843 let Some((repo, repo_path)) =
844 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
845 else {
846 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
847 };
848 let content = match &version {
849 Some(version) => buffer.rope_for_version(version).clone(),
850 None => buffer.as_rope().clone(),
851 };
852 let version = version.unwrap_or(buffer.version());
853 let buffer_id = buffer.remote_id();
854
855 let rx = repo.update(cx, |repo, _| {
856 repo.send_job(None, move |state, _| async move {
857 match state {
858 RepositoryState::Local { backend, .. } => backend
859 .blame(repo_path.clone(), content)
860 .await
861 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
862 .map(Some),
863 RepositoryState::Remote { project_id, client } => {
864 let response = client
865 .request(proto::BlameBuffer {
866 project_id: project_id.to_proto(),
867 buffer_id: buffer_id.into(),
868 version: serialize_version(&version),
869 })
870 .await?;
871 Ok(deserialize_blame_buffer_response(response))
872 }
873 }
874 })
875 });
876
877 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
878 }
879
880 pub fn get_permalink_to_line(
881 &self,
882 buffer: &Entity<Buffer>,
883 selection: Range<u32>,
884 cx: &mut App,
885 ) -> Task<Result<url::Url>> {
886 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
887 return Task::ready(Err(anyhow!("buffer has no file")));
888 };
889
890 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
891 &(file.worktree.read(cx).id(), file.path.clone()).into(),
892 cx,
893 ) else {
894 // If we're not in a Git repo, check whether this is a Rust source
895 // file in the Cargo registry (presumably opened with go-to-definition
896 // from a normal Rust file). If so, we can put together a permalink
897 // using crate metadata.
898 if buffer
899 .read(cx)
900 .language()
901 .is_none_or(|lang| lang.name() != "Rust".into())
902 {
903 return Task::ready(Err(anyhow!("no permalink available")));
904 }
905 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
906 return Task::ready(Err(anyhow!("no permalink available")));
907 };
908 return cx.spawn(async move |cx| {
909 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
910 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
911 .map_err(|_| anyhow!("no permalink available"))
912 });
913
914 // TODO remote case
915 };
916
917 let buffer_id = buffer.read(cx).remote_id();
918 let branch = repo.read(cx).branch.clone();
919 let remote = branch
920 .as_ref()
921 .and_then(|b| b.upstream.as_ref())
922 .and_then(|b| b.remote_name())
923 .unwrap_or("origin")
924 .to_string();
925
926 let rx = repo.update(cx, |repo, _| {
927 repo.send_job(None, move |state, cx| async move {
928 match state {
929 RepositoryState::Local { backend, .. } => {
930 let origin_url = backend
931 .remote_url(&remote)
932 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
933
934 let sha = backend
935 .head_sha()
936 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
937
938 let provider_registry =
939 cx.update(GitHostingProviderRegistry::default_global)?;
940
941 let (provider, remote) =
942 parse_git_remote_url(provider_registry, &origin_url)
943 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
944
945 let path = repo_path
946 .to_str()
947 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
948
949 Ok(provider.build_permalink(
950 remote,
951 BuildPermalinkParams {
952 sha: &sha,
953 path,
954 selection: Some(selection),
955 },
956 ))
957 }
958 RepositoryState::Remote { project_id, client } => {
959 let response = client
960 .request(proto::GetPermalinkToLine {
961 project_id: project_id.to_proto(),
962 buffer_id: buffer_id.into(),
963 selection: Some(proto::Range {
964 start: selection.start as u64,
965 end: selection.end as u64,
966 }),
967 })
968 .await?;
969
970 url::Url::parse(&response.permalink).context("failed to parse permalink")
971 }
972 }
973 })
974 });
975 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
976 }
977
978 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
979 match &self.state {
980 GitStoreState::Local {
981 downstream: downstream_client,
982 ..
983 } => downstream_client
984 .as_ref()
985 .map(|state| (state.client.clone(), state.project_id)),
986 GitStoreState::Ssh {
987 downstream: downstream_client,
988 ..
989 } => downstream_client.clone(),
990 GitStoreState::Remote { .. } => None,
991 }
992 }
993
994 fn upstream_client(&self) -> Option<AnyProtoClient> {
995 match &self.state {
996 GitStoreState::Local { .. } => None,
997 GitStoreState::Ssh {
998 upstream_client, ..
999 }
1000 | GitStoreState::Remote {
1001 upstream_client, ..
1002 } => Some(upstream_client.clone()),
1003 }
1004 }
1005
1006 fn on_worktree_store_event(
1007 &mut self,
1008 worktree_store: Entity<WorktreeStore>,
1009 event: &WorktreeStoreEvent,
1010 cx: &mut Context<Self>,
1011 ) {
1012 let GitStoreState::Local {
1013 project_environment,
1014 downstream,
1015 next_repository_id,
1016 fs,
1017 } = &self.state
1018 else {
1019 return;
1020 };
1021
1022 match event {
1023 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1024 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
1025 for (relative_path, _, _) in updated_entries.iter() {
1026 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1027 &(*worktree_id, relative_path.clone()).into(),
1028 cx,
1029 ) else {
1030 continue;
1031 };
1032 paths_by_git_repo.entry(repo).or_default().push(repo_path)
1033 }
1034
1035 for (repo, paths) in paths_by_git_repo {
1036 repo.update(cx, |repo, cx| {
1037 repo.paths_changed(
1038 paths,
1039 downstream
1040 .as_ref()
1041 .map(|downstream| downstream.updates_tx.clone()),
1042 cx,
1043 );
1044 });
1045 }
1046 }
1047 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1048 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1049 else {
1050 return;
1051 };
1052 if !worktree.read(cx).is_visible() {
1053 log::debug!(
1054 "not adding repositories for local worktree {:?} because it's not visible",
1055 worktree.read(cx).abs_path()
1056 );
1057 return;
1058 }
1059 self.update_repositories_from_worktree(
1060 project_environment.clone(),
1061 next_repository_id.clone(),
1062 downstream
1063 .as_ref()
1064 .map(|downstream| downstream.updates_tx.clone()),
1065 changed_repos.clone(),
1066 fs.clone(),
1067 cx,
1068 );
1069 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1070 }
1071 _ => {}
1072 }
1073 }
1074
1075 fn on_repository_event(
1076 &mut self,
1077 repo: Entity<Repository>,
1078 event: &RepositoryEvent,
1079 cx: &mut Context<Self>,
1080 ) {
1081 let id = repo.read(cx).id;
1082 cx.emit(GitStoreEvent::RepositoryUpdated(
1083 id,
1084 event.clone(),
1085 self.active_repo_id == Some(id),
1086 ))
1087 }
1088
1089 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1090 cx.emit(GitStoreEvent::JobsUpdated)
1091 }
1092
1093 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1094 fn update_repositories_from_worktree(
1095 &mut self,
1096 project_environment: Entity<ProjectEnvironment>,
1097 next_repository_id: Arc<AtomicU64>,
1098 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1099 updated_git_repositories: UpdatedGitRepositoriesSet,
1100 fs: Arc<dyn Fs>,
1101 cx: &mut Context<Self>,
1102 ) {
1103 let mut removed_ids = Vec::new();
1104 for update in updated_git_repositories.iter() {
1105 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1106 let existing_work_directory_abs_path =
1107 repo.read(cx).work_directory_abs_path.clone();
1108 Some(&existing_work_directory_abs_path)
1109 == update.old_work_directory_abs_path.as_ref()
1110 || Some(&existing_work_directory_abs_path)
1111 == update.new_work_directory_abs_path.as_ref()
1112 }) {
1113 if let Some(new_work_directory_abs_path) =
1114 update.new_work_directory_abs_path.clone()
1115 {
1116 existing.update(cx, |existing, cx| {
1117 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1118 existing.schedule_scan(updates_tx.clone(), cx);
1119 });
1120 } else {
1121 removed_ids.push(*id);
1122 }
1123 } else if let UpdatedGitRepository {
1124 new_work_directory_abs_path: Some(work_directory_abs_path),
1125 dot_git_abs_path: Some(dot_git_abs_path),
1126 repository_dir_abs_path: Some(repository_dir_abs_path),
1127 common_dir_abs_path: Some(common_dir_abs_path),
1128 ..
1129 } = update
1130 {
1131 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1132 let git_store = cx.weak_entity();
1133 let repo = cx.new(|cx| {
1134 let mut repo = Repository::local(
1135 id,
1136 work_directory_abs_path.clone(),
1137 dot_git_abs_path.clone(),
1138 repository_dir_abs_path.clone(),
1139 common_dir_abs_path.clone(),
1140 project_environment.downgrade(),
1141 fs.clone(),
1142 git_store,
1143 cx,
1144 );
1145 repo.schedule_scan(updates_tx.clone(), cx);
1146 repo
1147 });
1148 self._subscriptions
1149 .push(cx.subscribe(&repo, Self::on_repository_event));
1150 self._subscriptions
1151 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1152 self.repositories.insert(id, repo);
1153 cx.emit(GitStoreEvent::RepositoryAdded(id));
1154 self.active_repo_id.get_or_insert_with(|| {
1155 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1156 id
1157 });
1158 }
1159 }
1160
1161 for id in removed_ids {
1162 if self.active_repo_id == Some(id) {
1163 self.active_repo_id = None;
1164 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1165 }
1166 self.repositories.remove(&id);
1167 if let Some(updates_tx) = updates_tx.as_ref() {
1168 updates_tx
1169 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1170 .ok();
1171 }
1172 }
1173 }
1174
1175 fn on_buffer_store_event(
1176 &mut self,
1177 _: Entity<BufferStore>,
1178 event: &BufferStoreEvent,
1179 cx: &mut Context<Self>,
1180 ) {
1181 match event {
1182 BufferStoreEvent::BufferAdded(buffer) => {
1183 cx.subscribe(&buffer, |this, buffer, event, cx| {
1184 if let BufferEvent::LanguageChanged = event {
1185 let buffer_id = buffer.read(cx).remote_id();
1186 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1187 diff_state.update(cx, |diff_state, cx| {
1188 diff_state.buffer_language_changed(buffer, cx);
1189 });
1190 }
1191 }
1192 })
1193 .detach();
1194 }
1195 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1196 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1197 diffs.remove(buffer_id);
1198 }
1199 }
1200 BufferStoreEvent::BufferDropped(buffer_id) => {
1201 self.diffs.remove(&buffer_id);
1202 for diffs in self.shared_diffs.values_mut() {
1203 diffs.remove(buffer_id);
1204 }
1205 }
1206
1207 _ => {}
1208 }
1209 }
1210
1211 pub fn recalculate_buffer_diffs(
1212 &mut self,
1213 buffers: Vec<Entity<Buffer>>,
1214 cx: &mut Context<Self>,
1215 ) -> impl Future<Output = ()> + use<> {
1216 let mut futures = Vec::new();
1217 for buffer in buffers {
1218 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1219 let buffer = buffer.read(cx).text_snapshot();
1220 diff_state.update(cx, |diff_state, cx| {
1221 diff_state.recalculate_diffs(buffer, cx);
1222 futures.extend(diff_state.wait_for_recalculation());
1223 });
1224 }
1225 }
1226 async move {
1227 futures::future::join_all(futures).await;
1228 }
1229 }
1230
1231 fn on_buffer_diff_event(
1232 &mut self,
1233 diff: Entity<buffer_diff::BufferDiff>,
1234 event: &BufferDiffEvent,
1235 cx: &mut Context<Self>,
1236 ) {
1237 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1238 let buffer_id = diff.read(cx).buffer_id;
1239 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1240 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1241 diff_state.hunk_staging_operation_count += 1;
1242 diff_state.hunk_staging_operation_count
1243 });
1244 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1245 let recv = repo.update(cx, |repo, cx| {
1246 log::debug!("hunks changed for {}", path.display());
1247 repo.spawn_set_index_text_job(
1248 path,
1249 new_index_text.as_ref().map(|rope| rope.to_string()),
1250 Some(hunk_staging_operation_count),
1251 cx,
1252 )
1253 });
1254 let diff = diff.downgrade();
1255 cx.spawn(async move |this, cx| {
1256 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1257 diff.update(cx, |diff, cx| {
1258 diff.clear_pending_hunks(cx);
1259 })
1260 .ok();
1261 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1262 .ok();
1263 }
1264 })
1265 .detach();
1266 }
1267 }
1268 }
1269 }
1270
1271 fn local_worktree_git_repos_changed(
1272 &mut self,
1273 worktree: Entity<Worktree>,
1274 changed_repos: &UpdatedGitRepositoriesSet,
1275 cx: &mut Context<Self>,
1276 ) {
1277 log::debug!("local worktree repos changed");
1278 debug_assert!(worktree.read(cx).is_local());
1279
1280 for repository in self.repositories.values() {
1281 repository.update(cx, |repository, cx| {
1282 let repo_abs_path = &repository.work_directory_abs_path;
1283 if changed_repos.iter().any(|update| {
1284 update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1285 || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1286 }) {
1287 repository.reload_buffer_diff_bases(cx);
1288 }
1289 });
1290 }
1291 }
1292
1293 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1294 &self.repositories
1295 }
1296
1297 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1298 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1299 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1300 Some(status.status)
1301 }
1302
1303 pub fn repository_and_path_for_buffer_id(
1304 &self,
1305 buffer_id: BufferId,
1306 cx: &App,
1307 ) -> Option<(Entity<Repository>, RepoPath)> {
1308 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1309 let project_path = buffer.read(cx).project_path(cx)?;
1310 self.repository_and_path_for_project_path(&project_path, cx)
1311 }
1312
1313 pub fn repository_and_path_for_project_path(
1314 &self,
1315 path: &ProjectPath,
1316 cx: &App,
1317 ) -> Option<(Entity<Repository>, RepoPath)> {
1318 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1319 self.repositories
1320 .values()
1321 .filter_map(|repo| {
1322 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1323 Some((repo.clone(), repo_path))
1324 })
1325 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1326 }
1327
1328 pub fn git_init(
1329 &self,
1330 path: Arc<Path>,
1331 fallback_branch_name: String,
1332 cx: &App,
1333 ) -> Task<Result<()>> {
1334 match &self.state {
1335 GitStoreState::Local { fs, .. } => {
1336 let fs = fs.clone();
1337 cx.background_executor()
1338 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1339 }
1340 GitStoreState::Ssh {
1341 upstream_client,
1342 upstream_project_id: project_id,
1343 ..
1344 }
1345 | GitStoreState::Remote {
1346 upstream_client,
1347 upstream_project_id: project_id,
1348 ..
1349 } => {
1350 let client = upstream_client.clone();
1351 let project_id = *project_id;
1352 cx.background_executor().spawn(async move {
1353 client
1354 .request(proto::GitInit {
1355 project_id: project_id.0,
1356 abs_path: path.to_string_lossy().to_string(),
1357 fallback_branch_name,
1358 })
1359 .await?;
1360 Ok(())
1361 })
1362 }
1363 }
1364 }
1365
1366 async fn handle_update_repository(
1367 this: Entity<Self>,
1368 envelope: TypedEnvelope<proto::UpdateRepository>,
1369 mut cx: AsyncApp,
1370 ) -> Result<()> {
1371 this.update(&mut cx, |this, cx| {
1372 let mut update = envelope.payload;
1373
1374 let id = RepositoryId::from_proto(update.id);
1375 let client = this
1376 .upstream_client()
1377 .context("no upstream client")?
1378 .clone();
1379
1380 let mut is_new = false;
1381 let repo = this.repositories.entry(id).or_insert_with(|| {
1382 is_new = true;
1383 let git_store = cx.weak_entity();
1384 cx.new(|cx| {
1385 Repository::remote(
1386 id,
1387 Path::new(&update.abs_path).into(),
1388 ProjectId(update.project_id),
1389 client,
1390 git_store,
1391 cx,
1392 )
1393 })
1394 });
1395 if is_new {
1396 this._subscriptions
1397 .push(cx.subscribe(&repo, Self::on_repository_event))
1398 }
1399
1400 repo.update(cx, {
1401 let update = update.clone();
1402 |repo, cx| repo.apply_remote_update(update, cx)
1403 })?;
1404
1405 this.active_repo_id.get_or_insert_with(|| {
1406 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1407 id
1408 });
1409
1410 if let Some((client, project_id)) = this.downstream_client() {
1411 update.project_id = project_id.to_proto();
1412 client.send(update).log_err();
1413 }
1414 Ok(())
1415 })?
1416 }
1417
1418 async fn handle_remove_repository(
1419 this: Entity<Self>,
1420 envelope: TypedEnvelope<proto::RemoveRepository>,
1421 mut cx: AsyncApp,
1422 ) -> Result<()> {
1423 this.update(&mut cx, |this, cx| {
1424 let mut update = envelope.payload;
1425 let id = RepositoryId::from_proto(update.id);
1426 this.repositories.remove(&id);
1427 if let Some((client, project_id)) = this.downstream_client() {
1428 update.project_id = project_id.to_proto();
1429 client.send(update).log_err();
1430 }
1431 if this.active_repo_id == Some(id) {
1432 this.active_repo_id = None;
1433 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1434 }
1435 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1436 })
1437 }
1438
1439 async fn handle_git_init(
1440 this: Entity<Self>,
1441 envelope: TypedEnvelope<proto::GitInit>,
1442 cx: AsyncApp,
1443 ) -> Result<proto::Ack> {
1444 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1445 let name = envelope.payload.fallback_branch_name;
1446 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1447 .await?;
1448
1449 Ok(proto::Ack {})
1450 }
1451
1452 async fn handle_fetch(
1453 this: Entity<Self>,
1454 envelope: TypedEnvelope<proto::Fetch>,
1455 mut cx: AsyncApp,
1456 ) -> Result<proto::RemoteMessageResponse> {
1457 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1458 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1459 let askpass_id = envelope.payload.askpass_id;
1460
1461 let askpass = make_remote_delegate(
1462 this,
1463 envelope.payload.project_id,
1464 repository_id,
1465 askpass_id,
1466 &mut cx,
1467 );
1468
1469 let remote_output = repository_handle
1470 .update(&mut cx, |repository_handle, cx| {
1471 repository_handle.fetch(askpass, cx)
1472 })?
1473 .await??;
1474
1475 Ok(proto::RemoteMessageResponse {
1476 stdout: remote_output.stdout,
1477 stderr: remote_output.stderr,
1478 })
1479 }
1480
1481 async fn handle_push(
1482 this: Entity<Self>,
1483 envelope: TypedEnvelope<proto::Push>,
1484 mut cx: AsyncApp,
1485 ) -> Result<proto::RemoteMessageResponse> {
1486 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1487 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1488
1489 let askpass_id = envelope.payload.askpass_id;
1490 let askpass = make_remote_delegate(
1491 this,
1492 envelope.payload.project_id,
1493 repository_id,
1494 askpass_id,
1495 &mut cx,
1496 );
1497
1498 let options = envelope
1499 .payload
1500 .options
1501 .as_ref()
1502 .map(|_| match envelope.payload.options() {
1503 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1504 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1505 });
1506
1507 let branch_name = envelope.payload.branch_name.into();
1508 let remote_name = envelope.payload.remote_name.into();
1509
1510 let remote_output = repository_handle
1511 .update(&mut cx, |repository_handle, cx| {
1512 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1513 })?
1514 .await??;
1515 Ok(proto::RemoteMessageResponse {
1516 stdout: remote_output.stdout,
1517 stderr: remote_output.stderr,
1518 })
1519 }
1520
1521 async fn handle_pull(
1522 this: Entity<Self>,
1523 envelope: TypedEnvelope<proto::Pull>,
1524 mut cx: AsyncApp,
1525 ) -> Result<proto::RemoteMessageResponse> {
1526 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1527 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1528 let askpass_id = envelope.payload.askpass_id;
1529 let askpass = make_remote_delegate(
1530 this,
1531 envelope.payload.project_id,
1532 repository_id,
1533 askpass_id,
1534 &mut cx,
1535 );
1536
1537 let branch_name = envelope.payload.branch_name.into();
1538 let remote_name = envelope.payload.remote_name.into();
1539
1540 let remote_message = repository_handle
1541 .update(&mut cx, |repository_handle, cx| {
1542 repository_handle.pull(branch_name, remote_name, askpass, cx)
1543 })?
1544 .await??;
1545
1546 Ok(proto::RemoteMessageResponse {
1547 stdout: remote_message.stdout,
1548 stderr: remote_message.stderr,
1549 })
1550 }
1551
1552 async fn handle_stage(
1553 this: Entity<Self>,
1554 envelope: TypedEnvelope<proto::Stage>,
1555 mut cx: AsyncApp,
1556 ) -> Result<proto::Ack> {
1557 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1558 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1559
1560 let entries = envelope
1561 .payload
1562 .paths
1563 .into_iter()
1564 .map(PathBuf::from)
1565 .map(RepoPath::new)
1566 .collect();
1567
1568 repository_handle
1569 .update(&mut cx, |repository_handle, cx| {
1570 repository_handle.stage_entries(entries, cx)
1571 })?
1572 .await?;
1573 Ok(proto::Ack {})
1574 }
1575
1576 async fn handle_unstage(
1577 this: Entity<Self>,
1578 envelope: TypedEnvelope<proto::Unstage>,
1579 mut cx: AsyncApp,
1580 ) -> Result<proto::Ack> {
1581 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1582 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1583
1584 let entries = envelope
1585 .payload
1586 .paths
1587 .into_iter()
1588 .map(PathBuf::from)
1589 .map(RepoPath::new)
1590 .collect();
1591
1592 repository_handle
1593 .update(&mut cx, |repository_handle, cx| {
1594 repository_handle.unstage_entries(entries, cx)
1595 })?
1596 .await?;
1597
1598 Ok(proto::Ack {})
1599 }
1600
1601 async fn handle_set_index_text(
1602 this: Entity<Self>,
1603 envelope: TypedEnvelope<proto::SetIndexText>,
1604 mut cx: AsyncApp,
1605 ) -> Result<proto::Ack> {
1606 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1607 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1608 let repo_path = RepoPath::from_str(&envelope.payload.path);
1609
1610 repository_handle
1611 .update(&mut cx, |repository_handle, cx| {
1612 repository_handle.spawn_set_index_text_job(
1613 repo_path,
1614 envelope.payload.text,
1615 None,
1616 cx,
1617 )
1618 })?
1619 .await??;
1620 Ok(proto::Ack {})
1621 }
1622
1623 async fn handle_commit(
1624 this: Entity<Self>,
1625 envelope: TypedEnvelope<proto::Commit>,
1626 mut cx: AsyncApp,
1627 ) -> Result<proto::Ack> {
1628 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1629 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1630
1631 let message = SharedString::from(envelope.payload.message);
1632 let name = envelope.payload.name.map(SharedString::from);
1633 let email = envelope.payload.email.map(SharedString::from);
1634 let options = envelope.payload.options.unwrap_or_default();
1635
1636 repository_handle
1637 .update(&mut cx, |repository_handle, cx| {
1638 repository_handle.commit(
1639 message,
1640 name.zip(email),
1641 CommitOptions {
1642 amend: options.amend,
1643 },
1644 cx,
1645 )
1646 })?
1647 .await??;
1648 Ok(proto::Ack {})
1649 }
1650
1651 async fn handle_get_remotes(
1652 this: Entity<Self>,
1653 envelope: TypedEnvelope<proto::GetRemotes>,
1654 mut cx: AsyncApp,
1655 ) -> Result<proto::GetRemotesResponse> {
1656 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1657 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1658
1659 let branch_name = envelope.payload.branch_name;
1660
1661 let remotes = repository_handle
1662 .update(&mut cx, |repository_handle, _| {
1663 repository_handle.get_remotes(branch_name)
1664 })?
1665 .await??;
1666
1667 Ok(proto::GetRemotesResponse {
1668 remotes: remotes
1669 .into_iter()
1670 .map(|remotes| proto::get_remotes_response::Remote {
1671 name: remotes.name.to_string(),
1672 })
1673 .collect::<Vec<_>>(),
1674 })
1675 }
1676
1677 async fn handle_get_branches(
1678 this: Entity<Self>,
1679 envelope: TypedEnvelope<proto::GitGetBranches>,
1680 mut cx: AsyncApp,
1681 ) -> Result<proto::GitBranchesResponse> {
1682 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1683 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1684
1685 let branches = repository_handle
1686 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1687 .await??;
1688
1689 Ok(proto::GitBranchesResponse {
1690 branches: branches
1691 .into_iter()
1692 .map(|branch| branch_to_proto(&branch))
1693 .collect::<Vec<_>>(),
1694 })
1695 }
1696 async fn handle_create_branch(
1697 this: Entity<Self>,
1698 envelope: TypedEnvelope<proto::GitCreateBranch>,
1699 mut cx: AsyncApp,
1700 ) -> Result<proto::Ack> {
1701 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1702 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1703 let branch_name = envelope.payload.branch_name;
1704
1705 repository_handle
1706 .update(&mut cx, |repository_handle, _| {
1707 repository_handle.create_branch(branch_name)
1708 })?
1709 .await??;
1710
1711 Ok(proto::Ack {})
1712 }
1713
1714 async fn handle_change_branch(
1715 this: Entity<Self>,
1716 envelope: TypedEnvelope<proto::GitChangeBranch>,
1717 mut cx: AsyncApp,
1718 ) -> Result<proto::Ack> {
1719 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1720 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1721 let branch_name = envelope.payload.branch_name;
1722
1723 repository_handle
1724 .update(&mut cx, |repository_handle, _| {
1725 repository_handle.change_branch(branch_name)
1726 })?
1727 .await??;
1728
1729 Ok(proto::Ack {})
1730 }
1731
1732 async fn handle_show(
1733 this: Entity<Self>,
1734 envelope: TypedEnvelope<proto::GitShow>,
1735 mut cx: AsyncApp,
1736 ) -> Result<proto::GitCommitDetails> {
1737 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1738 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1739
1740 let commit = repository_handle
1741 .update(&mut cx, |repository_handle, _| {
1742 repository_handle.show(envelope.payload.commit)
1743 })?
1744 .await??;
1745 Ok(proto::GitCommitDetails {
1746 sha: commit.sha.into(),
1747 message: commit.message.into(),
1748 commit_timestamp: commit.commit_timestamp,
1749 author_email: commit.author_email.into(),
1750 author_name: commit.author_name.into(),
1751 })
1752 }
1753
1754 async fn handle_load_commit_diff(
1755 this: Entity<Self>,
1756 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1757 mut cx: AsyncApp,
1758 ) -> Result<proto::LoadCommitDiffResponse> {
1759 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1760 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1761
1762 let commit_diff = repository_handle
1763 .update(&mut cx, |repository_handle, _| {
1764 repository_handle.load_commit_diff(envelope.payload.commit)
1765 })?
1766 .await??;
1767 Ok(proto::LoadCommitDiffResponse {
1768 files: commit_diff
1769 .files
1770 .into_iter()
1771 .map(|file| proto::CommitFile {
1772 path: file.path.to_string(),
1773 old_text: file.old_text,
1774 new_text: file.new_text,
1775 })
1776 .collect(),
1777 })
1778 }
1779
1780 async fn handle_reset(
1781 this: Entity<Self>,
1782 envelope: TypedEnvelope<proto::GitReset>,
1783 mut cx: AsyncApp,
1784 ) -> Result<proto::Ack> {
1785 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1786 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1787
1788 let mode = match envelope.payload.mode() {
1789 git_reset::ResetMode::Soft => ResetMode::Soft,
1790 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1791 };
1792
1793 repository_handle
1794 .update(&mut cx, |repository_handle, cx| {
1795 repository_handle.reset(envelope.payload.commit, mode, cx)
1796 })?
1797 .await??;
1798 Ok(proto::Ack {})
1799 }
1800
1801 async fn handle_checkout_files(
1802 this: Entity<Self>,
1803 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1804 mut cx: AsyncApp,
1805 ) -> Result<proto::Ack> {
1806 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1807 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1808 let paths = envelope
1809 .payload
1810 .paths
1811 .iter()
1812 .map(|s| RepoPath::from_str(s))
1813 .collect();
1814
1815 repository_handle
1816 .update(&mut cx, |repository_handle, cx| {
1817 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1818 })?
1819 .await??;
1820 Ok(proto::Ack {})
1821 }
1822
1823 async fn handle_open_commit_message_buffer(
1824 this: Entity<Self>,
1825 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1826 mut cx: AsyncApp,
1827 ) -> Result<proto::OpenBufferResponse> {
1828 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1829 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1830 let buffer = repository
1831 .update(&mut cx, |repository, cx| {
1832 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1833 })?
1834 .await?;
1835
1836 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1837 this.update(&mut cx, |this, cx| {
1838 this.buffer_store.update(cx, |buffer_store, cx| {
1839 buffer_store
1840 .create_buffer_for_peer(
1841 &buffer,
1842 envelope.original_sender_id.unwrap_or(envelope.sender_id),
1843 cx,
1844 )
1845 .detach_and_log_err(cx);
1846 })
1847 })?;
1848
1849 Ok(proto::OpenBufferResponse {
1850 buffer_id: buffer_id.to_proto(),
1851 })
1852 }
1853
1854 async fn handle_askpass(
1855 this: Entity<Self>,
1856 envelope: TypedEnvelope<proto::AskPassRequest>,
1857 mut cx: AsyncApp,
1858 ) -> Result<proto::AskPassResponse> {
1859 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1860 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1861
1862 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
1863 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
1864 debug_panic!("no askpass found");
1865 return Err(anyhow::anyhow!("no askpass found"));
1866 };
1867
1868 let response = askpass.ask_password(envelope.payload.prompt).await?;
1869
1870 delegates
1871 .lock()
1872 .insert(envelope.payload.askpass_id, askpass);
1873
1874 Ok(proto::AskPassResponse { response })
1875 }
1876
1877 async fn handle_check_for_pushed_commits(
1878 this: Entity<Self>,
1879 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
1880 mut cx: AsyncApp,
1881 ) -> Result<proto::CheckForPushedCommitsResponse> {
1882 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1883 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1884
1885 let branches = repository_handle
1886 .update(&mut cx, |repository_handle, _| {
1887 repository_handle.check_for_pushed_commits()
1888 })?
1889 .await??;
1890 Ok(proto::CheckForPushedCommitsResponse {
1891 pushed_to: branches
1892 .into_iter()
1893 .map(|commit| commit.to_string())
1894 .collect(),
1895 })
1896 }
1897
1898 async fn handle_git_diff(
1899 this: Entity<Self>,
1900 envelope: TypedEnvelope<proto::GitDiff>,
1901 mut cx: AsyncApp,
1902 ) -> Result<proto::GitDiffResponse> {
1903 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1904 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1905 let diff_type = match envelope.payload.diff_type() {
1906 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
1907 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
1908 };
1909
1910 let mut diff = repository_handle
1911 .update(&mut cx, |repository_handle, cx| {
1912 repository_handle.diff(diff_type, cx)
1913 })?
1914 .await??;
1915 const ONE_MB: usize = 1_000_000;
1916 if diff.len() > ONE_MB {
1917 diff = diff.chars().take(ONE_MB).collect()
1918 }
1919
1920 Ok(proto::GitDiffResponse { diff })
1921 }
1922
1923 async fn handle_open_unstaged_diff(
1924 this: Entity<Self>,
1925 request: TypedEnvelope<proto::OpenUnstagedDiff>,
1926 mut cx: AsyncApp,
1927 ) -> Result<proto::OpenUnstagedDiffResponse> {
1928 let buffer_id = BufferId::new(request.payload.buffer_id)?;
1929 let diff = this
1930 .update(&mut cx, |this, cx| {
1931 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
1932 Some(this.open_unstaged_diff(buffer, cx))
1933 })?
1934 .ok_or_else(|| anyhow!("no such buffer"))?
1935 .await?;
1936 this.update(&mut cx, |this, _| {
1937 let shared_diffs = this
1938 .shared_diffs
1939 .entry(request.original_sender_id.unwrap_or(request.sender_id))
1940 .or_default();
1941 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
1942 })?;
1943 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
1944 Ok(proto::OpenUnstagedDiffResponse { staged_text })
1945 }
1946
1947 async fn handle_open_uncommitted_diff(
1948 this: Entity<Self>,
1949 request: TypedEnvelope<proto::OpenUncommittedDiff>,
1950 mut cx: AsyncApp,
1951 ) -> Result<proto::OpenUncommittedDiffResponse> {
1952 let buffer_id = BufferId::new(request.payload.buffer_id)?;
1953 let diff = this
1954 .update(&mut cx, |this, cx| {
1955 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
1956 Some(this.open_uncommitted_diff(buffer, cx))
1957 })?
1958 .ok_or_else(|| anyhow!("no such buffer"))?
1959 .await?;
1960 this.update(&mut cx, |this, _| {
1961 let shared_diffs = this
1962 .shared_diffs
1963 .entry(request.original_sender_id.unwrap_or(request.sender_id))
1964 .or_default();
1965 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
1966 })?;
1967 diff.read_with(&cx, |diff, cx| {
1968 use proto::open_uncommitted_diff_response::Mode;
1969
1970 let unstaged_diff = diff.secondary_diff();
1971 let index_snapshot = unstaged_diff.and_then(|diff| {
1972 let diff = diff.read(cx);
1973 diff.base_text_exists().then(|| diff.base_text())
1974 });
1975
1976 let mode;
1977 let staged_text;
1978 let committed_text;
1979 if diff.base_text_exists() {
1980 let committed_snapshot = diff.base_text();
1981 committed_text = Some(committed_snapshot.text());
1982 if let Some(index_text) = index_snapshot {
1983 if index_text.remote_id() == committed_snapshot.remote_id() {
1984 mode = Mode::IndexMatchesHead;
1985 staged_text = None;
1986 } else {
1987 mode = Mode::IndexAndHead;
1988 staged_text = Some(index_text.text());
1989 }
1990 } else {
1991 mode = Mode::IndexAndHead;
1992 staged_text = None;
1993 }
1994 } else {
1995 mode = Mode::IndexAndHead;
1996 committed_text = None;
1997 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
1998 }
1999
2000 proto::OpenUncommittedDiffResponse {
2001 committed_text,
2002 staged_text,
2003 mode: mode.into(),
2004 }
2005 })
2006 }
2007
2008 async fn handle_update_diff_bases(
2009 this: Entity<Self>,
2010 request: TypedEnvelope<proto::UpdateDiffBases>,
2011 mut cx: AsyncApp,
2012 ) -> Result<()> {
2013 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2014 this.update(&mut cx, |this, cx| {
2015 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2016 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2017 let buffer = buffer.read(cx).text_snapshot();
2018 diff_state.update(cx, |diff_state, cx| {
2019 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2020 })
2021 }
2022 }
2023 })
2024 }
2025
2026 async fn handle_blame_buffer(
2027 this: Entity<Self>,
2028 envelope: TypedEnvelope<proto::BlameBuffer>,
2029 mut cx: AsyncApp,
2030 ) -> Result<proto::BlameBufferResponse> {
2031 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2032 let version = deserialize_version(&envelope.payload.version);
2033 let buffer = this.read_with(&cx, |this, cx| {
2034 this.buffer_store.read(cx).get_existing(buffer_id)
2035 })??;
2036 buffer
2037 .update(&mut cx, |buffer, _| {
2038 buffer.wait_for_version(version.clone())
2039 })?
2040 .await?;
2041 let blame = this
2042 .update(&mut cx, |this, cx| {
2043 this.blame_buffer(&buffer, Some(version), cx)
2044 })?
2045 .await?;
2046 Ok(serialize_blame_buffer_response(blame))
2047 }
2048
2049 async fn handle_get_permalink_to_line(
2050 this: Entity<Self>,
2051 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2052 mut cx: AsyncApp,
2053 ) -> Result<proto::GetPermalinkToLineResponse> {
2054 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2055 // let version = deserialize_version(&envelope.payload.version);
2056 let selection = {
2057 let proto_selection = envelope
2058 .payload
2059 .selection
2060 .context("no selection to get permalink for defined")?;
2061 proto_selection.start as u32..proto_selection.end as u32
2062 };
2063 let buffer = this.read_with(&cx, |this, cx| {
2064 this.buffer_store.read(cx).get_existing(buffer_id)
2065 })??;
2066 let permalink = this
2067 .update(&mut cx, |this, cx| {
2068 this.get_permalink_to_line(&buffer, selection, cx)
2069 })?
2070 .await?;
2071 Ok(proto::GetPermalinkToLineResponse {
2072 permalink: permalink.to_string(),
2073 })
2074 }
2075
2076 fn repository_for_request(
2077 this: &Entity<Self>,
2078 id: RepositoryId,
2079 cx: &mut AsyncApp,
2080 ) -> Result<Entity<Repository>> {
2081 this.update(cx, |this, _| {
2082 this.repositories
2083 .get(&id)
2084 .context("missing repository handle")
2085 .cloned()
2086 })?
2087 }
2088
2089 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2090 self.repositories
2091 .iter()
2092 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2093 .collect()
2094 }
2095}
2096
2097impl BufferDiffState {
2098 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2099 self.language = buffer.read(cx).language().cloned();
2100 self.language_changed = true;
2101 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2102 }
2103
2104 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2105 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2106 }
2107
2108 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2109 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2110 }
2111
2112 fn handle_base_texts_updated(
2113 &mut self,
2114 buffer: text::BufferSnapshot,
2115 message: proto::UpdateDiffBases,
2116 cx: &mut Context<Self>,
2117 ) {
2118 use proto::update_diff_bases::Mode;
2119
2120 let Some(mode) = Mode::from_i32(message.mode) else {
2121 return;
2122 };
2123
2124 let diff_bases_change = match mode {
2125 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2126 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2127 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2128 Mode::IndexAndHead => DiffBasesChange::SetEach {
2129 index: message.staged_text,
2130 head: message.committed_text,
2131 },
2132 };
2133
2134 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2135 }
2136
2137 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2138 if *self.recalculating_tx.borrow() {
2139 let mut rx = self.recalculating_tx.subscribe();
2140 return Some(async move {
2141 loop {
2142 let is_recalculating = rx.recv().await;
2143 if is_recalculating != Some(true) {
2144 break;
2145 }
2146 }
2147 });
2148 } else {
2149 None
2150 }
2151 }
2152
2153 fn diff_bases_changed(
2154 &mut self,
2155 buffer: text::BufferSnapshot,
2156 diff_bases_change: Option<DiffBasesChange>,
2157 cx: &mut Context<Self>,
2158 ) {
2159 match diff_bases_change {
2160 Some(DiffBasesChange::SetIndex(index)) => {
2161 self.index_text = index.map(|mut index| {
2162 text::LineEnding::normalize(&mut index);
2163 Arc::new(index)
2164 });
2165 self.index_changed = true;
2166 }
2167 Some(DiffBasesChange::SetHead(head)) => {
2168 self.head_text = head.map(|mut head| {
2169 text::LineEnding::normalize(&mut head);
2170 Arc::new(head)
2171 });
2172 self.head_changed = true;
2173 }
2174 Some(DiffBasesChange::SetBoth(text)) => {
2175 let text = text.map(|mut text| {
2176 text::LineEnding::normalize(&mut text);
2177 Arc::new(text)
2178 });
2179 self.head_text = text.clone();
2180 self.index_text = text;
2181 self.head_changed = true;
2182 self.index_changed = true;
2183 }
2184 Some(DiffBasesChange::SetEach { index, head }) => {
2185 self.index_text = index.map(|mut index| {
2186 text::LineEnding::normalize(&mut index);
2187 Arc::new(index)
2188 });
2189 self.index_changed = true;
2190 self.head_text = head.map(|mut head| {
2191 text::LineEnding::normalize(&mut head);
2192 Arc::new(head)
2193 });
2194 self.head_changed = true;
2195 }
2196 None => {}
2197 }
2198
2199 self.recalculate_diffs(buffer, cx)
2200 }
2201
2202 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2203 *self.recalculating_tx.borrow_mut() = true;
2204
2205 let language = self.language.clone();
2206 let language_registry = self.language_registry.clone();
2207 let unstaged_diff = self.unstaged_diff();
2208 let uncommitted_diff = self.uncommitted_diff();
2209 let head = self.head_text.clone();
2210 let index = self.index_text.clone();
2211 let index_changed = self.index_changed;
2212 let head_changed = self.head_changed;
2213 let language_changed = self.language_changed;
2214 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2215 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2216 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2217 (None, None) => true,
2218 _ => false,
2219 };
2220 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2221 log::debug!(
2222 "start recalculating diffs for buffer {}",
2223 buffer.remote_id()
2224 );
2225
2226 let mut new_unstaged_diff = None;
2227 if let Some(unstaged_diff) = &unstaged_diff {
2228 new_unstaged_diff = Some(
2229 BufferDiff::update_diff(
2230 unstaged_diff.clone(),
2231 buffer.clone(),
2232 index,
2233 index_changed,
2234 language_changed,
2235 language.clone(),
2236 language_registry.clone(),
2237 cx,
2238 )
2239 .await?,
2240 );
2241 }
2242
2243 let mut new_uncommitted_diff = None;
2244 if let Some(uncommitted_diff) = &uncommitted_diff {
2245 new_uncommitted_diff = if index_matches_head {
2246 new_unstaged_diff.clone()
2247 } else {
2248 Some(
2249 BufferDiff::update_diff(
2250 uncommitted_diff.clone(),
2251 buffer.clone(),
2252 head,
2253 head_changed,
2254 language_changed,
2255 language.clone(),
2256 language_registry.clone(),
2257 cx,
2258 )
2259 .await?,
2260 )
2261 }
2262 }
2263
2264 let cancel = this.update(cx, |this, _| {
2265 // This checks whether all pending stage/unstage operations
2266 // have quiesced (i.e. both the corresponding write and the
2267 // read of that write have completed). If not, then we cancel
2268 // this recalculation attempt to avoid invalidating pending
2269 // state too quickly; another recalculation will come along
2270 // later and clear the pending state once the state of the index has settled.
2271 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2272 *this.recalculating_tx.borrow_mut() = false;
2273 true
2274 } else {
2275 false
2276 }
2277 })?;
2278 if cancel {
2279 log::debug!(
2280 concat!(
2281 "aborting recalculating diffs for buffer {}",
2282 "due to subsequent hunk operations",
2283 ),
2284 buffer.remote_id()
2285 );
2286 return Ok(());
2287 }
2288
2289 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2290 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2291 {
2292 unstaged_diff.update(cx, |diff, cx| {
2293 if language_changed {
2294 diff.language_changed(cx);
2295 }
2296 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2297 })?
2298 } else {
2299 None
2300 };
2301
2302 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2303 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2304 {
2305 uncommitted_diff.update(cx, |diff, cx| {
2306 if language_changed {
2307 diff.language_changed(cx);
2308 }
2309 diff.set_snapshot_with_secondary(
2310 new_uncommitted_diff,
2311 &buffer,
2312 unstaged_changed_range,
2313 true,
2314 cx,
2315 );
2316 })?;
2317 }
2318
2319 log::debug!(
2320 "finished recalculating diffs for buffer {}",
2321 buffer.remote_id()
2322 );
2323
2324 if let Some(this) = this.upgrade() {
2325 this.update(cx, |this, _| {
2326 this.index_changed = false;
2327 this.head_changed = false;
2328 this.language_changed = false;
2329 *this.recalculating_tx.borrow_mut() = false;
2330 })?;
2331 }
2332
2333 Ok(())
2334 }));
2335 }
2336}
2337
2338impl Default for BufferDiffState {
2339 fn default() -> Self {
2340 Self {
2341 unstaged_diff: Default::default(),
2342 uncommitted_diff: Default::default(),
2343 recalculate_diff_task: Default::default(),
2344 language: Default::default(),
2345 language_registry: Default::default(),
2346 recalculating_tx: postage::watch::channel_with(false).0,
2347 hunk_staging_operation_count: 0,
2348 hunk_staging_operation_count_as_of_write: 0,
2349 head_text: Default::default(),
2350 index_text: Default::default(),
2351 head_changed: Default::default(),
2352 index_changed: Default::default(),
2353 language_changed: Default::default(),
2354 }
2355 }
2356}
2357
2358fn make_remote_delegate(
2359 this: Entity<GitStore>,
2360 project_id: u64,
2361 repository_id: RepositoryId,
2362 askpass_id: u64,
2363 cx: &mut AsyncApp,
2364) -> AskPassDelegate {
2365 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2366 this.update(cx, |this, cx| {
2367 let Some((client, _)) = this.downstream_client() else {
2368 return;
2369 };
2370 let response = client.request(proto::AskPassRequest {
2371 project_id,
2372 repository_id: repository_id.to_proto(),
2373 askpass_id,
2374 prompt,
2375 });
2376 cx.spawn(async move |_, _| {
2377 tx.send(response.await?.response).ok();
2378 anyhow::Ok(())
2379 })
2380 .detach_and_log_err(cx);
2381 })
2382 .log_err();
2383 })
2384}
2385
2386impl RepositoryId {
2387 pub fn to_proto(self) -> u64 {
2388 self.0
2389 }
2390
2391 pub fn from_proto(id: u64) -> Self {
2392 RepositoryId(id)
2393 }
2394}
2395
2396impl RepositorySnapshot {
2397 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2398 Self {
2399 id,
2400 merge_message: None,
2401 statuses_by_path: Default::default(),
2402 work_directory_abs_path,
2403 branch: None,
2404 head_commit: None,
2405 merge_conflicts: Default::default(),
2406 merge_head_shas: Default::default(),
2407 scan_id: 0,
2408 }
2409 }
2410
2411 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2412 proto::UpdateRepository {
2413 branch_summary: self.branch.as_ref().map(branch_to_proto),
2414 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2415 updated_statuses: self
2416 .statuses_by_path
2417 .iter()
2418 .map(|entry| entry.to_proto())
2419 .collect(),
2420 removed_statuses: Default::default(),
2421 current_merge_conflicts: self
2422 .merge_conflicts
2423 .iter()
2424 .map(|repo_path| repo_path.to_proto())
2425 .collect(),
2426 project_id,
2427 id: self.id.to_proto(),
2428 abs_path: self.work_directory_abs_path.to_proto(),
2429 entry_ids: vec![self.id.to_proto()],
2430 scan_id: self.scan_id,
2431 is_last_update: true,
2432 }
2433 }
2434
2435 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2436 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2437 let mut removed_statuses: Vec<String> = Vec::new();
2438
2439 let mut new_statuses = self.statuses_by_path.iter().peekable();
2440 let mut old_statuses = old.statuses_by_path.iter().peekable();
2441
2442 let mut current_new_entry = new_statuses.next();
2443 let mut current_old_entry = old_statuses.next();
2444 loop {
2445 match (current_new_entry, current_old_entry) {
2446 (Some(new_entry), Some(old_entry)) => {
2447 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2448 Ordering::Less => {
2449 updated_statuses.push(new_entry.to_proto());
2450 current_new_entry = new_statuses.next();
2451 }
2452 Ordering::Equal => {
2453 if new_entry.status != old_entry.status {
2454 updated_statuses.push(new_entry.to_proto());
2455 }
2456 current_old_entry = old_statuses.next();
2457 current_new_entry = new_statuses.next();
2458 }
2459 Ordering::Greater => {
2460 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2461 current_old_entry = old_statuses.next();
2462 }
2463 }
2464 }
2465 (None, Some(old_entry)) => {
2466 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2467 current_old_entry = old_statuses.next();
2468 }
2469 (Some(new_entry), None) => {
2470 updated_statuses.push(new_entry.to_proto());
2471 current_new_entry = new_statuses.next();
2472 }
2473 (None, None) => break,
2474 }
2475 }
2476
2477 proto::UpdateRepository {
2478 branch_summary: self.branch.as_ref().map(branch_to_proto),
2479 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2480 updated_statuses,
2481 removed_statuses,
2482 current_merge_conflicts: self
2483 .merge_conflicts
2484 .iter()
2485 .map(|path| path.as_ref().to_proto())
2486 .collect(),
2487 project_id,
2488 id: self.id.to_proto(),
2489 abs_path: self.work_directory_abs_path.to_proto(),
2490 entry_ids: vec![],
2491 scan_id: self.scan_id,
2492 is_last_update: true,
2493 }
2494 }
2495
2496 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2497 self.statuses_by_path.iter().cloned()
2498 }
2499
2500 pub fn status_summary(&self) -> GitSummary {
2501 self.statuses_by_path.summary().item_summary
2502 }
2503
2504 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2505 self.statuses_by_path
2506 .get(&PathKey(path.0.clone()), &())
2507 .cloned()
2508 }
2509
2510 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2511 abs_path
2512 .strip_prefix(&self.work_directory_abs_path)
2513 .map(RepoPath::from)
2514 .ok()
2515 }
2516
2517 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2518 self.merge_conflicts.contains(repo_path)
2519 }
2520
2521 /// This is the name that will be displayed in the repository selector for this repository.
2522 pub fn display_name(&self) -> SharedString {
2523 self.work_directory_abs_path
2524 .file_name()
2525 .unwrap_or_default()
2526 .to_string_lossy()
2527 .to_string()
2528 .into()
2529 }
2530}
2531
2532impl Repository {
2533 fn local(
2534 id: RepositoryId,
2535 work_directory_abs_path: Arc<Path>,
2536 dot_git_abs_path: Arc<Path>,
2537 repository_dir_abs_path: Arc<Path>,
2538 common_dir_abs_path: Arc<Path>,
2539 project_environment: WeakEntity<ProjectEnvironment>,
2540 fs: Arc<dyn Fs>,
2541 git_store: WeakEntity<GitStore>,
2542 cx: &mut Context<Self>,
2543 ) -> Self {
2544 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2545 Repository {
2546 this: cx.weak_entity(),
2547 git_store,
2548 snapshot,
2549 commit_message_buffer: None,
2550 askpass_delegates: Default::default(),
2551 paths_needing_status_update: Default::default(),
2552 latest_askpass_id: 0,
2553 job_sender: Repository::spawn_local_git_worker(
2554 work_directory_abs_path,
2555 dot_git_abs_path,
2556 repository_dir_abs_path,
2557 common_dir_abs_path,
2558 project_environment,
2559 fs,
2560 cx,
2561 ),
2562 job_id: 0,
2563 active_jobs: Default::default(),
2564 }
2565 }
2566
2567 fn remote(
2568 id: RepositoryId,
2569 work_directory_abs_path: Arc<Path>,
2570 project_id: ProjectId,
2571 client: AnyProtoClient,
2572 git_store: WeakEntity<GitStore>,
2573 cx: &mut Context<Self>,
2574 ) -> Self {
2575 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2576 Self {
2577 this: cx.weak_entity(),
2578 snapshot,
2579 commit_message_buffer: None,
2580 git_store,
2581 paths_needing_status_update: Default::default(),
2582 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2583 askpass_delegates: Default::default(),
2584 latest_askpass_id: 0,
2585 active_jobs: Default::default(),
2586 job_id: 0,
2587 }
2588 }
2589
2590 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2591 self.git_store.upgrade()
2592 }
2593
2594 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
2595 let this = cx.weak_entity();
2596 let git_store = self.git_store.clone();
2597 let _ = self.send_keyed_job(
2598 Some(GitJobKey::ReloadBufferDiffBases),
2599 None,
2600 |state, mut cx| async move {
2601 let RepositoryState::Local { backend, .. } = state else {
2602 log::error!("tried to recompute diffs for a non-local repository");
2603 return Ok(());
2604 };
2605
2606 let Some(this) = this.upgrade() else {
2607 return Ok(());
2608 };
2609
2610 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
2611 git_store.update(cx, |git_store, cx| {
2612 git_store
2613 .diffs
2614 .iter()
2615 .filter_map(|(buffer_id, diff_state)| {
2616 let buffer_store = git_store.buffer_store.read(cx);
2617 let buffer = buffer_store.get(*buffer_id)?;
2618 let file = File::from_dyn(buffer.read(cx).file())?;
2619 let abs_path =
2620 file.worktree.read(cx).absolutize(&file.path).ok()?;
2621 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
2622 log::debug!(
2623 "start reload diff bases for repo path {}",
2624 repo_path.0.display()
2625 );
2626 diff_state.update(cx, |diff_state, _| {
2627 let has_unstaged_diff = diff_state
2628 .unstaged_diff
2629 .as_ref()
2630 .is_some_and(|diff| diff.is_upgradable());
2631 let has_uncommitted_diff = diff_state
2632 .uncommitted_diff
2633 .as_ref()
2634 .is_some_and(|set| set.is_upgradable());
2635
2636 Some((
2637 buffer,
2638 repo_path,
2639 has_unstaged_diff.then(|| diff_state.index_text.clone()),
2640 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
2641 ))
2642 })
2643 })
2644 .collect::<Vec<_>>()
2645 })
2646 })??;
2647
2648 let buffer_diff_base_changes = cx
2649 .background_spawn(async move {
2650 let mut changes = Vec::new();
2651 for (buffer, repo_path, current_index_text, current_head_text) in
2652 &repo_diff_state_updates
2653 {
2654 let index_text = if current_index_text.is_some() {
2655 backend.load_index_text(repo_path.clone()).await
2656 } else {
2657 None
2658 };
2659 let head_text = if current_head_text.is_some() {
2660 backend.load_committed_text(repo_path.clone()).await
2661 } else {
2662 None
2663 };
2664
2665 let change =
2666 match (current_index_text.as_ref(), current_head_text.as_ref()) {
2667 (Some(current_index), Some(current_head)) => {
2668 let index_changed =
2669 index_text.as_ref() != current_index.as_deref();
2670 let head_changed =
2671 head_text.as_ref() != current_head.as_deref();
2672 if index_changed && head_changed {
2673 if index_text == head_text {
2674 Some(DiffBasesChange::SetBoth(head_text))
2675 } else {
2676 Some(DiffBasesChange::SetEach {
2677 index: index_text,
2678 head: head_text,
2679 })
2680 }
2681 } else if index_changed {
2682 Some(DiffBasesChange::SetIndex(index_text))
2683 } else if head_changed {
2684 Some(DiffBasesChange::SetHead(head_text))
2685 } else {
2686 None
2687 }
2688 }
2689 (Some(current_index), None) => {
2690 let index_changed =
2691 index_text.as_ref() != current_index.as_deref();
2692 index_changed
2693 .then_some(DiffBasesChange::SetIndex(index_text))
2694 }
2695 (None, Some(current_head)) => {
2696 let head_changed =
2697 head_text.as_ref() != current_head.as_deref();
2698 head_changed.then_some(DiffBasesChange::SetHead(head_text))
2699 }
2700 (None, None) => None,
2701 };
2702
2703 changes.push((buffer.clone(), change))
2704 }
2705 changes
2706 })
2707 .await;
2708
2709 git_store.update(&mut cx, |git_store, cx| {
2710 for (buffer, diff_bases_change) in buffer_diff_base_changes {
2711 let buffer_snapshot = buffer.read(cx).text_snapshot();
2712 let buffer_id = buffer_snapshot.remote_id();
2713 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
2714 continue;
2715 };
2716
2717 let downstream_client = git_store.downstream_client();
2718 diff_state.update(cx, |diff_state, cx| {
2719 use proto::update_diff_bases::Mode;
2720
2721 if let Some((diff_bases_change, (client, project_id))) =
2722 diff_bases_change.clone().zip(downstream_client)
2723 {
2724 let (staged_text, committed_text, mode) = match diff_bases_change {
2725 DiffBasesChange::SetIndex(index) => {
2726 (index, None, Mode::IndexOnly)
2727 }
2728 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
2729 DiffBasesChange::SetEach { index, head } => {
2730 (index, head, Mode::IndexAndHead)
2731 }
2732 DiffBasesChange::SetBoth(text) => {
2733 (None, text, Mode::IndexMatchesHead)
2734 }
2735 };
2736 client
2737 .send(proto::UpdateDiffBases {
2738 project_id: project_id.to_proto(),
2739 buffer_id: buffer_id.to_proto(),
2740 staged_text,
2741 committed_text,
2742 mode: mode as i32,
2743 })
2744 .log_err();
2745 }
2746
2747 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
2748 });
2749 }
2750 })
2751 },
2752 );
2753 }
2754
2755 pub fn send_job<F, Fut, R>(
2756 &mut self,
2757 status: Option<SharedString>,
2758 job: F,
2759 ) -> oneshot::Receiver<R>
2760 where
2761 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2762 Fut: Future<Output = R> + 'static,
2763 R: Send + 'static,
2764 {
2765 self.send_keyed_job(None, status, job)
2766 }
2767
2768 fn send_keyed_job<F, Fut, R>(
2769 &mut self,
2770 key: Option<GitJobKey>,
2771 status: Option<SharedString>,
2772 job: F,
2773 ) -> oneshot::Receiver<R>
2774 where
2775 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2776 Fut: Future<Output = R> + 'static,
2777 R: Send + 'static,
2778 {
2779 let (result_tx, result_rx) = futures::channel::oneshot::channel();
2780 let job_id = post_inc(&mut self.job_id);
2781 let this = self.this.clone();
2782 self.job_sender
2783 .unbounded_send(GitJob {
2784 key,
2785 job: Box::new(move |state, cx: &mut AsyncApp| {
2786 let job = job(state, cx.clone());
2787 cx.spawn(async move |cx| {
2788 if let Some(s) = status.clone() {
2789 this.update(cx, |this, cx| {
2790 this.active_jobs.insert(
2791 job_id,
2792 JobInfo {
2793 start: Instant::now(),
2794 message: s.clone(),
2795 },
2796 );
2797
2798 cx.notify();
2799 })
2800 .ok();
2801 }
2802 let result = job.await;
2803
2804 this.update(cx, |this, cx| {
2805 this.active_jobs.remove(&job_id);
2806 cx.notify();
2807 })
2808 .ok();
2809
2810 result_tx.send(result).ok();
2811 })
2812 }),
2813 })
2814 .ok();
2815 result_rx
2816 }
2817
2818 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
2819 let Some(git_store) = self.git_store.upgrade() else {
2820 return;
2821 };
2822 let entity = cx.entity();
2823 git_store.update(cx, |git_store, cx| {
2824 let Some((&id, _)) = git_store
2825 .repositories
2826 .iter()
2827 .find(|(_, handle)| *handle == &entity)
2828 else {
2829 return;
2830 };
2831 git_store.active_repo_id = Some(id);
2832 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2833 });
2834 }
2835
2836 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
2837 self.snapshot.status()
2838 }
2839
2840 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
2841 let git_store = self.git_store.upgrade()?;
2842 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2843 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
2844 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
2845 Some(ProjectPath {
2846 worktree_id: worktree.read(cx).id(),
2847 path: relative_path.into(),
2848 })
2849 }
2850
2851 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
2852 let git_store = self.git_store.upgrade()?;
2853 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2854 let abs_path = worktree_store.absolutize(path, cx)?;
2855 self.snapshot.abs_path_to_repo_path(&abs_path)
2856 }
2857
2858 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
2859 other
2860 .read(cx)
2861 .snapshot
2862 .work_directory_abs_path
2863 .starts_with(&self.snapshot.work_directory_abs_path)
2864 }
2865
2866 pub fn open_commit_buffer(
2867 &mut self,
2868 languages: Option<Arc<LanguageRegistry>>,
2869 buffer_store: Entity<BufferStore>,
2870 cx: &mut Context<Self>,
2871 ) -> Task<Result<Entity<Buffer>>> {
2872 let id = self.id;
2873 if let Some(buffer) = self.commit_message_buffer.clone() {
2874 return Task::ready(Ok(buffer));
2875 }
2876 let this = cx.weak_entity();
2877
2878 let rx = self.send_job(None, move |state, mut cx| async move {
2879 let Some(this) = this.upgrade() else {
2880 bail!("git store was dropped");
2881 };
2882 match state {
2883 RepositoryState::Local { .. } => {
2884 this.update(&mut cx, |_, cx| {
2885 Self::open_local_commit_buffer(languages, buffer_store, cx)
2886 })?
2887 .await
2888 }
2889 RepositoryState::Remote { project_id, client } => {
2890 let request = client.request(proto::OpenCommitMessageBuffer {
2891 project_id: project_id.0,
2892 repository_id: id.to_proto(),
2893 });
2894 let response = request.await.context("requesting to open commit buffer")?;
2895 let buffer_id = BufferId::new(response.buffer_id)?;
2896 let buffer = buffer_store
2897 .update(&mut cx, |buffer_store, cx| {
2898 buffer_store.wait_for_remote_buffer(buffer_id, cx)
2899 })?
2900 .await?;
2901 if let Some(language_registry) = languages {
2902 let git_commit_language =
2903 language_registry.language_for_name("Git Commit").await?;
2904 buffer.update(&mut cx, |buffer, cx| {
2905 buffer.set_language(Some(git_commit_language), cx);
2906 })?;
2907 }
2908 this.update(&mut cx, |this, _| {
2909 this.commit_message_buffer = Some(buffer.clone());
2910 })?;
2911 Ok(buffer)
2912 }
2913 }
2914 });
2915
2916 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
2917 }
2918
2919 fn open_local_commit_buffer(
2920 language_registry: Option<Arc<LanguageRegistry>>,
2921 buffer_store: Entity<BufferStore>,
2922 cx: &mut Context<Self>,
2923 ) -> Task<Result<Entity<Buffer>>> {
2924 cx.spawn(async move |repository, cx| {
2925 let buffer = buffer_store
2926 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
2927 .await?;
2928
2929 if let Some(language_registry) = language_registry {
2930 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
2931 buffer.update(cx, |buffer, cx| {
2932 buffer.set_language(Some(git_commit_language), cx);
2933 })?;
2934 }
2935
2936 repository.update(cx, |repository, _| {
2937 repository.commit_message_buffer = Some(buffer.clone());
2938 })?;
2939 Ok(buffer)
2940 })
2941 }
2942
2943 pub fn checkout_files(
2944 &mut self,
2945 commit: &str,
2946 paths: Vec<RepoPath>,
2947 _cx: &mut App,
2948 ) -> oneshot::Receiver<Result<()>> {
2949 let commit = commit.to_string();
2950 let id = self.id;
2951
2952 self.send_job(
2953 Some(format!("git checkout {}", commit).into()),
2954 move |git_repo, _| async move {
2955 match git_repo {
2956 RepositoryState::Local {
2957 backend,
2958 environment,
2959 ..
2960 } => {
2961 backend
2962 .checkout_files(commit, paths, environment.clone())
2963 .await
2964 }
2965 RepositoryState::Remote { project_id, client } => {
2966 client
2967 .request(proto::GitCheckoutFiles {
2968 project_id: project_id.0,
2969 repository_id: id.to_proto(),
2970 commit,
2971 paths: paths
2972 .into_iter()
2973 .map(|p| p.to_string_lossy().to_string())
2974 .collect(),
2975 })
2976 .await?;
2977
2978 Ok(())
2979 }
2980 }
2981 },
2982 )
2983 }
2984
2985 pub fn reset(
2986 &mut self,
2987 commit: String,
2988 reset_mode: ResetMode,
2989 _cx: &mut App,
2990 ) -> oneshot::Receiver<Result<()>> {
2991 let commit = commit.to_string();
2992 let id = self.id;
2993
2994 self.send_job(None, move |git_repo, _| async move {
2995 match git_repo {
2996 RepositoryState::Local {
2997 backend,
2998 environment,
2999 ..
3000 } => backend.reset(commit, reset_mode, environment).await,
3001 RepositoryState::Remote { project_id, client } => {
3002 client
3003 .request(proto::GitReset {
3004 project_id: project_id.0,
3005 repository_id: id.to_proto(),
3006 commit,
3007 mode: match reset_mode {
3008 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3009 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3010 },
3011 })
3012 .await?;
3013
3014 Ok(())
3015 }
3016 }
3017 })
3018 }
3019
3020 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3021 let id = self.id;
3022 self.send_job(None, move |git_repo, _cx| async move {
3023 match git_repo {
3024 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3025 RepositoryState::Remote { project_id, client } => {
3026 let resp = client
3027 .request(proto::GitShow {
3028 project_id: project_id.0,
3029 repository_id: id.to_proto(),
3030 commit,
3031 })
3032 .await?;
3033
3034 Ok(CommitDetails {
3035 sha: resp.sha.into(),
3036 message: resp.message.into(),
3037 commit_timestamp: resp.commit_timestamp,
3038 author_email: resp.author_email.into(),
3039 author_name: resp.author_name.into(),
3040 })
3041 }
3042 }
3043 })
3044 }
3045
3046 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3047 let id = self.id;
3048 self.send_job(None, move |git_repo, cx| async move {
3049 match git_repo {
3050 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3051 RepositoryState::Remote {
3052 client, project_id, ..
3053 } => {
3054 let response = client
3055 .request(proto::LoadCommitDiff {
3056 project_id: project_id.0,
3057 repository_id: id.to_proto(),
3058 commit,
3059 })
3060 .await?;
3061 Ok(CommitDiff {
3062 files: response
3063 .files
3064 .into_iter()
3065 .map(|file| CommitFile {
3066 path: Path::new(&file.path).into(),
3067 old_text: file.old_text,
3068 new_text: file.new_text,
3069 })
3070 .collect(),
3071 })
3072 }
3073 }
3074 })
3075 }
3076
3077 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3078 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3079 }
3080
3081 pub fn stage_entries(
3082 &self,
3083 entries: Vec<RepoPath>,
3084 cx: &mut Context<Self>,
3085 ) -> Task<anyhow::Result<()>> {
3086 if entries.is_empty() {
3087 return Task::ready(Ok(()));
3088 }
3089 let id = self.id;
3090
3091 let mut save_futures = Vec::new();
3092 if let Some(buffer_store) = self.buffer_store(cx) {
3093 buffer_store.update(cx, |buffer_store, cx| {
3094 for path in &entries {
3095 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3096 continue;
3097 };
3098 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3099 if buffer
3100 .read(cx)
3101 .file()
3102 .map_or(false, |file| file.disk_state().exists())
3103 {
3104 save_futures.push(buffer_store.save_buffer(buffer, cx));
3105 }
3106 }
3107 }
3108 })
3109 }
3110
3111 cx.spawn(async move |this, cx| {
3112 for save_future in save_futures {
3113 save_future.await?;
3114 }
3115
3116 this.update(cx, |this, _| {
3117 this.send_job(None, move |git_repo, _cx| async move {
3118 match git_repo {
3119 RepositoryState::Local {
3120 backend,
3121 environment,
3122 ..
3123 } => backend.stage_paths(entries, environment.clone()).await,
3124 RepositoryState::Remote { project_id, client } => {
3125 client
3126 .request(proto::Stage {
3127 project_id: project_id.0,
3128 repository_id: id.to_proto(),
3129 paths: entries
3130 .into_iter()
3131 .map(|repo_path| repo_path.as_ref().to_proto())
3132 .collect(),
3133 })
3134 .await
3135 .context("sending stage request")?;
3136
3137 Ok(())
3138 }
3139 }
3140 })
3141 })?
3142 .await??;
3143
3144 Ok(())
3145 })
3146 }
3147
3148 pub fn unstage_entries(
3149 &self,
3150 entries: Vec<RepoPath>,
3151 cx: &mut Context<Self>,
3152 ) -> Task<anyhow::Result<()>> {
3153 if entries.is_empty() {
3154 return Task::ready(Ok(()));
3155 }
3156 let id = self.id;
3157
3158 let mut save_futures = Vec::new();
3159 if let Some(buffer_store) = self.buffer_store(cx) {
3160 buffer_store.update(cx, |buffer_store, cx| {
3161 for path in &entries {
3162 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3163 continue;
3164 };
3165 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3166 if buffer
3167 .read(cx)
3168 .file()
3169 .map_or(false, |file| file.disk_state().exists())
3170 {
3171 save_futures.push(buffer_store.save_buffer(buffer, cx));
3172 }
3173 }
3174 }
3175 })
3176 }
3177
3178 cx.spawn(async move |this, cx| {
3179 for save_future in save_futures {
3180 save_future.await?;
3181 }
3182
3183 this.update(cx, |this, _| {
3184 this.send_job(None, move |git_repo, _cx| async move {
3185 match git_repo {
3186 RepositoryState::Local {
3187 backend,
3188 environment,
3189 ..
3190 } => backend.unstage_paths(entries, environment).await,
3191 RepositoryState::Remote { project_id, client } => {
3192 client
3193 .request(proto::Unstage {
3194 project_id: project_id.0,
3195 repository_id: id.to_proto(),
3196 paths: entries
3197 .into_iter()
3198 .map(|repo_path| repo_path.as_ref().to_proto())
3199 .collect(),
3200 })
3201 .await
3202 .context("sending unstage request")?;
3203
3204 Ok(())
3205 }
3206 }
3207 })
3208 })?
3209 .await??;
3210
3211 Ok(())
3212 })
3213 }
3214
3215 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3216 let to_stage = self
3217 .cached_status()
3218 .filter(|entry| !entry.status.staging().is_fully_staged())
3219 .map(|entry| entry.repo_path.clone())
3220 .collect();
3221 self.stage_entries(to_stage, cx)
3222 }
3223
3224 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3225 let to_unstage = self
3226 .cached_status()
3227 .filter(|entry| entry.status.staging().has_staged())
3228 .map(|entry| entry.repo_path.clone())
3229 .collect();
3230 self.unstage_entries(to_unstage, cx)
3231 }
3232
3233 pub fn commit(
3234 &mut self,
3235 message: SharedString,
3236 name_and_email: Option<(SharedString, SharedString)>,
3237 options: CommitOptions,
3238 _cx: &mut App,
3239 ) -> oneshot::Receiver<Result<()>> {
3240 let id = self.id;
3241
3242 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3243 match git_repo {
3244 RepositoryState::Local {
3245 backend,
3246 environment,
3247 ..
3248 } => {
3249 backend
3250 .commit(message, name_and_email, options, environment)
3251 .await
3252 }
3253 RepositoryState::Remote { project_id, client } => {
3254 let (name, email) = name_and_email.unzip();
3255 client
3256 .request(proto::Commit {
3257 project_id: project_id.0,
3258 repository_id: id.to_proto(),
3259 message: String::from(message),
3260 name: name.map(String::from),
3261 email: email.map(String::from),
3262 options: Some(proto::commit::CommitOptions {
3263 amend: options.amend,
3264 }),
3265 })
3266 .await
3267 .context("sending commit request")?;
3268
3269 Ok(())
3270 }
3271 }
3272 })
3273 }
3274
3275 pub fn fetch(
3276 &mut self,
3277 askpass: AskPassDelegate,
3278 _cx: &mut App,
3279 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3280 let askpass_delegates = self.askpass_delegates.clone();
3281 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3282 let id = self.id;
3283
3284 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3285 match git_repo {
3286 RepositoryState::Local {
3287 backend,
3288 environment,
3289 ..
3290 } => backend.fetch(askpass, environment, cx).await,
3291 RepositoryState::Remote { project_id, client } => {
3292 askpass_delegates.lock().insert(askpass_id, askpass);
3293 let _defer = util::defer(|| {
3294 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3295 debug_assert!(askpass_delegate.is_some());
3296 });
3297
3298 let response = client
3299 .request(proto::Fetch {
3300 project_id: project_id.0,
3301 repository_id: id.to_proto(),
3302 askpass_id,
3303 })
3304 .await
3305 .context("sending fetch request")?;
3306
3307 Ok(RemoteCommandOutput {
3308 stdout: response.stdout,
3309 stderr: response.stderr,
3310 })
3311 }
3312 }
3313 })
3314 }
3315
3316 pub fn push(
3317 &mut self,
3318 branch: SharedString,
3319 remote: SharedString,
3320 options: Option<PushOptions>,
3321 askpass: AskPassDelegate,
3322 cx: &mut Context<Self>,
3323 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3324 let askpass_delegates = self.askpass_delegates.clone();
3325 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3326 let id = self.id;
3327
3328 let args = options
3329 .map(|option| match option {
3330 PushOptions::SetUpstream => " --set-upstream",
3331 PushOptions::Force => " --force",
3332 })
3333 .unwrap_or("");
3334
3335 let updates_tx = self
3336 .git_store()
3337 .and_then(|git_store| match &git_store.read(cx).state {
3338 GitStoreState::Local { downstream, .. } => downstream
3339 .as_ref()
3340 .map(|downstream| downstream.updates_tx.clone()),
3341 _ => None,
3342 });
3343
3344 let this = cx.weak_entity();
3345 self.send_job(
3346 Some(format!("git push {} {} {}", args, branch, remote).into()),
3347 move |git_repo, mut cx| async move {
3348 match git_repo {
3349 RepositoryState::Local {
3350 backend,
3351 environment,
3352 ..
3353 } => {
3354 let result = backend
3355 .push(
3356 branch.to_string(),
3357 remote.to_string(),
3358 options,
3359 askpass,
3360 environment.clone(),
3361 cx.clone(),
3362 )
3363 .await;
3364 if result.is_ok() {
3365 let branches = backend.branches().await?;
3366 let branch = branches.into_iter().find(|branch| branch.is_head);
3367 log::info!("head branch after scan is {branch:?}");
3368 let snapshot = this.update(&mut cx, |this, cx| {
3369 this.snapshot.branch = branch;
3370 let snapshot = this.snapshot.clone();
3371 cx.emit(RepositoryEvent::Updated { full_scan: false });
3372 snapshot
3373 })?;
3374 if let Some(updates_tx) = updates_tx {
3375 updates_tx
3376 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3377 .ok();
3378 }
3379 }
3380 result
3381 }
3382 RepositoryState::Remote { project_id, client } => {
3383 askpass_delegates.lock().insert(askpass_id, askpass);
3384 let _defer = util::defer(|| {
3385 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3386 debug_assert!(askpass_delegate.is_some());
3387 });
3388 let response = client
3389 .request(proto::Push {
3390 project_id: project_id.0,
3391 repository_id: id.to_proto(),
3392 askpass_id,
3393 branch_name: branch.to_string(),
3394 remote_name: remote.to_string(),
3395 options: options.map(|options| match options {
3396 PushOptions::Force => proto::push::PushOptions::Force,
3397 PushOptions::SetUpstream => {
3398 proto::push::PushOptions::SetUpstream
3399 }
3400 }
3401 as i32),
3402 })
3403 .await
3404 .context("sending push request")?;
3405
3406 Ok(RemoteCommandOutput {
3407 stdout: response.stdout,
3408 stderr: response.stderr,
3409 })
3410 }
3411 }
3412 },
3413 )
3414 }
3415
3416 pub fn pull(
3417 &mut self,
3418 branch: SharedString,
3419 remote: SharedString,
3420 askpass: AskPassDelegate,
3421 _cx: &mut App,
3422 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3423 let askpass_delegates = self.askpass_delegates.clone();
3424 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3425 let id = self.id;
3426
3427 self.send_job(
3428 Some(format!("git pull {} {}", remote, branch).into()),
3429 move |git_repo, cx| async move {
3430 match git_repo {
3431 RepositoryState::Local {
3432 backend,
3433 environment,
3434 ..
3435 } => {
3436 backend
3437 .pull(
3438 branch.to_string(),
3439 remote.to_string(),
3440 askpass,
3441 environment.clone(),
3442 cx,
3443 )
3444 .await
3445 }
3446 RepositoryState::Remote { project_id, client } => {
3447 askpass_delegates.lock().insert(askpass_id, askpass);
3448 let _defer = util::defer(|| {
3449 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3450 debug_assert!(askpass_delegate.is_some());
3451 });
3452 let response = client
3453 .request(proto::Pull {
3454 project_id: project_id.0,
3455 repository_id: id.to_proto(),
3456 askpass_id,
3457 branch_name: branch.to_string(),
3458 remote_name: remote.to_string(),
3459 })
3460 .await
3461 .context("sending pull request")?;
3462
3463 Ok(RemoteCommandOutput {
3464 stdout: response.stdout,
3465 stderr: response.stderr,
3466 })
3467 }
3468 }
3469 },
3470 )
3471 }
3472
3473 fn spawn_set_index_text_job(
3474 &mut self,
3475 path: RepoPath,
3476 content: Option<String>,
3477 hunk_staging_operation_count: Option<usize>,
3478 cx: &mut Context<Self>,
3479 ) -> oneshot::Receiver<anyhow::Result<()>> {
3480 let id = self.id;
3481 let this = cx.weak_entity();
3482 let git_store = self.git_store.clone();
3483 self.send_keyed_job(
3484 Some(GitJobKey::WriteIndex(path.clone())),
3485 None,
3486 move |git_repo, mut cx| async move {
3487 log::debug!("start updating index text for buffer {}", path.display());
3488 match git_repo {
3489 RepositoryState::Local {
3490 backend,
3491 environment,
3492 ..
3493 } => {
3494 backend
3495 .set_index_text(path.clone(), content, environment.clone())
3496 .await?;
3497 }
3498 RepositoryState::Remote { project_id, client } => {
3499 client
3500 .request(proto::SetIndexText {
3501 project_id: project_id.0,
3502 repository_id: id.to_proto(),
3503 path: path.as_ref().to_proto(),
3504 text: content,
3505 })
3506 .await?;
3507 }
3508 }
3509 log::debug!("finish updating index text for buffer {}", path.display());
3510
3511 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
3512 let project_path = this
3513 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
3514 .ok()
3515 .flatten();
3516 git_store.update(&mut cx, |git_store, cx| {
3517 let buffer_id = git_store
3518 .buffer_store
3519 .read(cx)
3520 .get_by_path(&project_path?, cx)?
3521 .read(cx)
3522 .remote_id();
3523 let diff_state = git_store.diffs.get(&buffer_id)?;
3524 diff_state.update(cx, |diff_state, _| {
3525 diff_state.hunk_staging_operation_count_as_of_write =
3526 hunk_staging_operation_count;
3527 });
3528 Some(())
3529 })?;
3530 }
3531 Ok(())
3532 },
3533 )
3534 }
3535
3536 pub fn get_remotes(
3537 &mut self,
3538 branch_name: Option<String>,
3539 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3540 let id = self.id;
3541 self.send_job(None, move |repo, _cx| async move {
3542 match repo {
3543 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3544 RepositoryState::Remote { project_id, client } => {
3545 let response = client
3546 .request(proto::GetRemotes {
3547 project_id: project_id.0,
3548 repository_id: id.to_proto(),
3549 branch_name,
3550 })
3551 .await?;
3552
3553 let remotes = response
3554 .remotes
3555 .into_iter()
3556 .map(|remotes| git::repository::Remote {
3557 name: remotes.name.into(),
3558 })
3559 .collect();
3560
3561 Ok(remotes)
3562 }
3563 }
3564 })
3565 }
3566
3567 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3568 let id = self.id;
3569 self.send_job(None, move |repo, cx| async move {
3570 match repo {
3571 RepositoryState::Local { backend, .. } => {
3572 let backend = backend.clone();
3573 cx.background_spawn(async move { backend.branches().await })
3574 .await
3575 }
3576 RepositoryState::Remote { project_id, client } => {
3577 let response = client
3578 .request(proto::GitGetBranches {
3579 project_id: project_id.0,
3580 repository_id: id.to_proto(),
3581 })
3582 .await?;
3583
3584 let branches = response
3585 .branches
3586 .into_iter()
3587 .map(|branch| proto_to_branch(&branch))
3588 .collect();
3589
3590 Ok(branches)
3591 }
3592 }
3593 })
3594 }
3595
3596 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3597 let id = self.id;
3598 self.send_job(None, move |repo, _cx| async move {
3599 match repo {
3600 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
3601 RepositoryState::Remote { project_id, client } => {
3602 let response = client
3603 .request(proto::GitDiff {
3604 project_id: project_id.0,
3605 repository_id: id.to_proto(),
3606 diff_type: match diff_type {
3607 DiffType::HeadToIndex => {
3608 proto::git_diff::DiffType::HeadToIndex.into()
3609 }
3610 DiffType::HeadToWorktree => {
3611 proto::git_diff::DiffType::HeadToWorktree.into()
3612 }
3613 },
3614 })
3615 .await?;
3616
3617 Ok(response.diff)
3618 }
3619 }
3620 })
3621 }
3622
3623 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3624 let id = self.id;
3625 self.send_job(
3626 Some(format!("git switch -c {branch_name}").into()),
3627 move |repo, _cx| async move {
3628 match repo {
3629 RepositoryState::Local { backend, .. } => {
3630 backend.create_branch(branch_name).await
3631 }
3632 RepositoryState::Remote { project_id, client } => {
3633 client
3634 .request(proto::GitCreateBranch {
3635 project_id: project_id.0,
3636 repository_id: id.to_proto(),
3637 branch_name,
3638 })
3639 .await?;
3640
3641 Ok(())
3642 }
3643 }
3644 },
3645 )
3646 }
3647
3648 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3649 let id = self.id;
3650 self.send_job(
3651 Some(format!("git switch {branch_name}").into()),
3652 move |repo, _cx| async move {
3653 match repo {
3654 RepositoryState::Local { backend, .. } => {
3655 backend.change_branch(branch_name).await
3656 }
3657 RepositoryState::Remote { project_id, client } => {
3658 client
3659 .request(proto::GitChangeBranch {
3660 project_id: project_id.0,
3661 repository_id: id.to_proto(),
3662 branch_name,
3663 })
3664 .await?;
3665
3666 Ok(())
3667 }
3668 }
3669 },
3670 )
3671 }
3672
3673 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3674 let id = self.id;
3675 self.send_job(None, move |repo, _cx| async move {
3676 match repo {
3677 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
3678 RepositoryState::Remote { project_id, client } => {
3679 let response = client
3680 .request(proto::CheckForPushedCommits {
3681 project_id: project_id.0,
3682 repository_id: id.to_proto(),
3683 })
3684 .await?;
3685
3686 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3687
3688 Ok(branches)
3689 }
3690 }
3691 })
3692 }
3693
3694 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
3695 self.send_job(None, |repo, _cx| async move {
3696 match repo {
3697 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
3698 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3699 }
3700 })
3701 }
3702
3703 pub fn restore_checkpoint(
3704 &mut self,
3705 checkpoint: GitRepositoryCheckpoint,
3706 ) -> oneshot::Receiver<Result<()>> {
3707 self.send_job(None, move |repo, _cx| async move {
3708 match repo {
3709 RepositoryState::Local { backend, .. } => {
3710 backend.restore_checkpoint(checkpoint).await
3711 }
3712 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3713 }
3714 })
3715 }
3716
3717 pub(crate) fn apply_remote_update(
3718 &mut self,
3719 update: proto::UpdateRepository,
3720 cx: &mut Context<Self>,
3721 ) -> Result<()> {
3722 let conflicted_paths = TreeSet::from_ordered_entries(
3723 update
3724 .current_merge_conflicts
3725 .into_iter()
3726 .map(|path| RepoPath(Path::new(&path).into())),
3727 );
3728 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
3729 self.snapshot.head_commit = update
3730 .head_commit_details
3731 .as_ref()
3732 .map(proto_to_commit_details);
3733
3734 self.snapshot.merge_conflicts = conflicted_paths;
3735
3736 let edits = update
3737 .removed_statuses
3738 .into_iter()
3739 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
3740 .chain(
3741 update
3742 .updated_statuses
3743 .into_iter()
3744 .filter_map(|updated_status| {
3745 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
3746 }),
3747 )
3748 .collect::<Vec<_>>();
3749 self.snapshot.statuses_by_path.edit(edits, &());
3750 if update.is_last_update {
3751 self.snapshot.scan_id = update.scan_id;
3752 }
3753 cx.emit(RepositoryEvent::Updated { full_scan: true });
3754 Ok(())
3755 }
3756
3757 pub fn compare_checkpoints(
3758 &mut self,
3759 left: GitRepositoryCheckpoint,
3760 right: GitRepositoryCheckpoint,
3761 ) -> oneshot::Receiver<Result<bool>> {
3762 self.send_job(None, move |repo, _cx| async move {
3763 match repo {
3764 RepositoryState::Local { backend, .. } => {
3765 backend.compare_checkpoints(left, right).await
3766 }
3767 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3768 }
3769 })
3770 }
3771
3772 pub fn diff_checkpoints(
3773 &mut self,
3774 base_checkpoint: GitRepositoryCheckpoint,
3775 target_checkpoint: GitRepositoryCheckpoint,
3776 ) -> oneshot::Receiver<Result<String>> {
3777 self.send_job(None, move |repo, _cx| async move {
3778 match repo {
3779 RepositoryState::Local { backend, .. } => {
3780 backend
3781 .diff_checkpoints(base_checkpoint, target_checkpoint)
3782 .await
3783 }
3784 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3785 }
3786 })
3787 }
3788
3789 fn schedule_scan(
3790 &mut self,
3791 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3792 cx: &mut Context<Self>,
3793 ) {
3794 let this = cx.weak_entity();
3795 let _ = self.send_keyed_job(
3796 Some(GitJobKey::ReloadGitState),
3797 None,
3798 |state, mut cx| async move {
3799 let Some(this) = this.upgrade() else {
3800 return Ok(());
3801 };
3802 let RepositoryState::Local { backend, .. } = state else {
3803 bail!("not a local repository")
3804 };
3805 let (snapshot, events) = this
3806 .update(&mut cx, |this, _| {
3807 compute_snapshot(
3808 this.id,
3809 this.work_directory_abs_path.clone(),
3810 this.snapshot.clone(),
3811 backend.clone(),
3812 )
3813 })?
3814 .await?;
3815 this.update(&mut cx, |this, cx| {
3816 this.snapshot = snapshot.clone();
3817 for event in events {
3818 cx.emit(event);
3819 }
3820 })?;
3821 if let Some(updates_tx) = updates_tx {
3822 updates_tx
3823 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3824 .ok();
3825 }
3826 Ok(())
3827 },
3828 );
3829 }
3830
3831 fn spawn_local_git_worker(
3832 work_directory_abs_path: Arc<Path>,
3833 dot_git_abs_path: Arc<Path>,
3834 _repository_dir_abs_path: Arc<Path>,
3835 _common_dir_abs_path: Arc<Path>,
3836 project_environment: WeakEntity<ProjectEnvironment>,
3837 fs: Arc<dyn Fs>,
3838 cx: &mut Context<Self>,
3839 ) -> mpsc::UnboundedSender<GitJob> {
3840 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3841
3842 cx.spawn(async move |_, cx| {
3843 let environment = project_environment
3844 .upgrade()
3845 .ok_or_else(|| anyhow!("missing project environment"))?
3846 .update(cx, |project_environment, cx| {
3847 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
3848 })?
3849 .await
3850 .unwrap_or_else(|| {
3851 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
3852 HashMap::default()
3853 });
3854 let backend = cx
3855 .background_spawn(async move {
3856 fs.open_repo(&dot_git_abs_path)
3857 .ok_or_else(|| anyhow!("failed to build repository"))
3858 })
3859 .await?;
3860
3861 if let Some(git_hosting_provider_registry) =
3862 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
3863 {
3864 git_hosting_providers::register_additional_providers(
3865 git_hosting_provider_registry,
3866 backend.clone(),
3867 );
3868 }
3869
3870 let state = RepositoryState::Local {
3871 backend,
3872 environment: Arc::new(environment),
3873 };
3874 let mut jobs = VecDeque::new();
3875 loop {
3876 while let Ok(Some(next_job)) = job_rx.try_next() {
3877 jobs.push_back(next_job);
3878 }
3879
3880 if let Some(job) = jobs.pop_front() {
3881 if let Some(current_key) = &job.key {
3882 if jobs
3883 .iter()
3884 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3885 {
3886 continue;
3887 }
3888 }
3889 (job.job)(state.clone(), cx).await;
3890 } else if let Some(job) = job_rx.next().await {
3891 jobs.push_back(job);
3892 } else {
3893 break;
3894 }
3895 }
3896 anyhow::Ok(())
3897 })
3898 .detach_and_log_err(cx);
3899
3900 job_tx
3901 }
3902
3903 fn spawn_remote_git_worker(
3904 project_id: ProjectId,
3905 client: AnyProtoClient,
3906 cx: &mut Context<Self>,
3907 ) -> mpsc::UnboundedSender<GitJob> {
3908 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3909
3910 cx.spawn(async move |_, cx| {
3911 let state = RepositoryState::Remote { project_id, client };
3912 let mut jobs = VecDeque::new();
3913 loop {
3914 while let Ok(Some(next_job)) = job_rx.try_next() {
3915 jobs.push_back(next_job);
3916 }
3917
3918 if let Some(job) = jobs.pop_front() {
3919 if let Some(current_key) = &job.key {
3920 if jobs
3921 .iter()
3922 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3923 {
3924 continue;
3925 }
3926 }
3927 (job.job)(state.clone(), cx).await;
3928 } else if let Some(job) = job_rx.next().await {
3929 jobs.push_back(job);
3930 } else {
3931 break;
3932 }
3933 }
3934 anyhow::Ok(())
3935 })
3936 .detach_and_log_err(cx);
3937
3938 job_tx
3939 }
3940
3941 fn load_staged_text(
3942 &mut self,
3943 buffer_id: BufferId,
3944 repo_path: RepoPath,
3945 cx: &App,
3946 ) -> Task<Result<Option<String>>> {
3947 let rx = self.send_job(None, move |state, _| async move {
3948 match state {
3949 RepositoryState::Local { backend, .. } => {
3950 anyhow::Ok(backend.load_index_text(repo_path).await)
3951 }
3952 RepositoryState::Remote { project_id, client } => {
3953 let response = client
3954 .request(proto::OpenUnstagedDiff {
3955 project_id: project_id.to_proto(),
3956 buffer_id: buffer_id.to_proto(),
3957 })
3958 .await?;
3959 Ok(response.staged_text)
3960 }
3961 }
3962 });
3963 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3964 }
3965
3966 fn load_committed_text(
3967 &mut self,
3968 buffer_id: BufferId,
3969 repo_path: RepoPath,
3970 cx: &App,
3971 ) -> Task<Result<DiffBasesChange>> {
3972 let rx = self.send_job(None, move |state, _| async move {
3973 match state {
3974 RepositoryState::Local { backend, .. } => {
3975 let committed_text = backend.load_committed_text(repo_path.clone()).await;
3976 let staged_text = backend.load_index_text(repo_path).await;
3977 let diff_bases_change = if committed_text == staged_text {
3978 DiffBasesChange::SetBoth(committed_text)
3979 } else {
3980 DiffBasesChange::SetEach {
3981 index: staged_text,
3982 head: committed_text,
3983 }
3984 };
3985 anyhow::Ok(diff_bases_change)
3986 }
3987 RepositoryState::Remote { project_id, client } => {
3988 use proto::open_uncommitted_diff_response::Mode;
3989
3990 let response = client
3991 .request(proto::OpenUncommittedDiff {
3992 project_id: project_id.to_proto(),
3993 buffer_id: buffer_id.to_proto(),
3994 })
3995 .await?;
3996 let mode =
3997 Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
3998 let bases = match mode {
3999 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4000 Mode::IndexAndHead => DiffBasesChange::SetEach {
4001 head: response.committed_text,
4002 index: response.staged_text,
4003 },
4004 };
4005 Ok(bases)
4006 }
4007 }
4008 });
4009
4010 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4011 }
4012
4013 fn paths_changed(
4014 &mut self,
4015 paths: Vec<RepoPath>,
4016 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4017 cx: &mut Context<Self>,
4018 ) {
4019 self.paths_needing_status_update.extend(paths);
4020
4021 let this = cx.weak_entity();
4022 let _ = self.send_keyed_job(
4023 Some(GitJobKey::RefreshStatuses),
4024 None,
4025 |state, mut cx| async move {
4026 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4027 (
4028 this.snapshot.clone(),
4029 mem::take(&mut this.paths_needing_status_update),
4030 )
4031 })?;
4032 let RepositoryState::Local { backend, .. } = state else {
4033 bail!("not a local repository")
4034 };
4035
4036 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4037 let statuses = backend.status(&paths).await?;
4038
4039 let changed_path_statuses = cx
4040 .background_spawn(async move {
4041 let mut changed_path_statuses = Vec::new();
4042 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4043 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4044
4045 for (repo_path, status) in &*statuses.entries {
4046 changed_paths.remove(repo_path);
4047 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
4048 if cursor.item().is_some_and(|entry| entry.status == *status) {
4049 continue;
4050 }
4051 }
4052
4053 changed_path_statuses.push(Edit::Insert(StatusEntry {
4054 repo_path: repo_path.clone(),
4055 status: *status,
4056 }));
4057 }
4058 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4059 for path in changed_paths.iter() {
4060 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
4061 changed_path_statuses.push(Edit::Remove(PathKey(path.0.clone())));
4062 }
4063 }
4064 changed_path_statuses
4065 })
4066 .await;
4067
4068 this.update(&mut cx, |this, cx| {
4069 if !changed_path_statuses.is_empty() {
4070 this.snapshot
4071 .statuses_by_path
4072 .edit(changed_path_statuses, &());
4073 this.snapshot.scan_id += 1;
4074 if let Some(updates_tx) = updates_tx {
4075 updates_tx
4076 .unbounded_send(DownstreamUpdate::UpdateRepository(
4077 this.snapshot.clone(),
4078 ))
4079 .ok();
4080 }
4081 }
4082 cx.emit(RepositoryEvent::Updated { full_scan: false });
4083 })
4084 },
4085 );
4086 }
4087
4088 /// currently running git command and when it started
4089 pub fn current_job(&self) -> Option<JobInfo> {
4090 self.active_jobs.values().next().cloned()
4091 }
4092
4093 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4094 self.send_job(None, |_, _| async {})
4095 }
4096}
4097
4098fn get_permalink_in_rust_registry_src(
4099 provider_registry: Arc<GitHostingProviderRegistry>,
4100 path: PathBuf,
4101 selection: Range<u32>,
4102) -> Result<url::Url> {
4103 #[derive(Deserialize)]
4104 struct CargoVcsGit {
4105 sha1: String,
4106 }
4107
4108 #[derive(Deserialize)]
4109 struct CargoVcsInfo {
4110 git: CargoVcsGit,
4111 path_in_vcs: String,
4112 }
4113
4114 #[derive(Deserialize)]
4115 struct CargoPackage {
4116 repository: String,
4117 }
4118
4119 #[derive(Deserialize)]
4120 struct CargoToml {
4121 package: CargoPackage,
4122 }
4123
4124 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4125 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4126 Some((dir, json))
4127 }) else {
4128 bail!("No .cargo_vcs_info.json found in parent directories")
4129 };
4130 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4131 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4132 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4133 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4134 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
4135 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4136 let permalink = provider.build_permalink(
4137 remote,
4138 BuildPermalinkParams {
4139 sha: &cargo_vcs_info.git.sha1,
4140 path: &path.to_string_lossy(),
4141 selection: Some(selection),
4142 },
4143 );
4144 Ok(permalink)
4145}
4146
4147fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4148 let Some(blame) = blame else {
4149 return proto::BlameBufferResponse {
4150 blame_response: None,
4151 };
4152 };
4153
4154 let entries = blame
4155 .entries
4156 .into_iter()
4157 .map(|entry| proto::BlameEntry {
4158 sha: entry.sha.as_bytes().into(),
4159 start_line: entry.range.start,
4160 end_line: entry.range.end,
4161 original_line_number: entry.original_line_number,
4162 author: entry.author.clone(),
4163 author_mail: entry.author_mail.clone(),
4164 author_time: entry.author_time,
4165 author_tz: entry.author_tz.clone(),
4166 committer: entry.committer_name.clone(),
4167 committer_mail: entry.committer_email.clone(),
4168 committer_time: entry.committer_time,
4169 committer_tz: entry.committer_tz.clone(),
4170 summary: entry.summary.clone(),
4171 previous: entry.previous.clone(),
4172 filename: entry.filename.clone(),
4173 })
4174 .collect::<Vec<_>>();
4175
4176 let messages = blame
4177 .messages
4178 .into_iter()
4179 .map(|(oid, message)| proto::CommitMessage {
4180 oid: oid.as_bytes().into(),
4181 message,
4182 })
4183 .collect::<Vec<_>>();
4184
4185 proto::BlameBufferResponse {
4186 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4187 entries,
4188 messages,
4189 remote_url: blame.remote_url,
4190 }),
4191 }
4192}
4193
4194fn deserialize_blame_buffer_response(
4195 response: proto::BlameBufferResponse,
4196) -> Option<git::blame::Blame> {
4197 let response = response.blame_response?;
4198 let entries = response
4199 .entries
4200 .into_iter()
4201 .filter_map(|entry| {
4202 Some(git::blame::BlameEntry {
4203 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4204 range: entry.start_line..entry.end_line,
4205 original_line_number: entry.original_line_number,
4206 committer_name: entry.committer,
4207 committer_time: entry.committer_time,
4208 committer_tz: entry.committer_tz,
4209 committer_email: entry.committer_mail,
4210 author: entry.author,
4211 author_mail: entry.author_mail,
4212 author_time: entry.author_time,
4213 author_tz: entry.author_tz,
4214 summary: entry.summary,
4215 previous: entry.previous,
4216 filename: entry.filename,
4217 })
4218 })
4219 .collect::<Vec<_>>();
4220
4221 let messages = response
4222 .messages
4223 .into_iter()
4224 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4225 .collect::<HashMap<_, _>>();
4226
4227 Some(Blame {
4228 entries,
4229 messages,
4230 remote_url: response.remote_url,
4231 })
4232}
4233
4234fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4235 proto::Branch {
4236 is_head: branch.is_head,
4237 name: branch.name.to_string(),
4238 unix_timestamp: branch
4239 .most_recent_commit
4240 .as_ref()
4241 .map(|commit| commit.commit_timestamp as u64),
4242 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4243 ref_name: upstream.ref_name.to_string(),
4244 tracking: upstream
4245 .tracking
4246 .status()
4247 .map(|upstream| proto::UpstreamTracking {
4248 ahead: upstream.ahead as u64,
4249 behind: upstream.behind as u64,
4250 }),
4251 }),
4252 most_recent_commit: branch
4253 .most_recent_commit
4254 .as_ref()
4255 .map(|commit| proto::CommitSummary {
4256 sha: commit.sha.to_string(),
4257 subject: commit.subject.to_string(),
4258 commit_timestamp: commit.commit_timestamp,
4259 }),
4260 }
4261}
4262
4263fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4264 git::repository::Branch {
4265 is_head: proto.is_head,
4266 name: proto.name.clone().into(),
4267 upstream: proto
4268 .upstream
4269 .as_ref()
4270 .map(|upstream| git::repository::Upstream {
4271 ref_name: upstream.ref_name.to_string().into(),
4272 tracking: upstream
4273 .tracking
4274 .as_ref()
4275 .map(|tracking| {
4276 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4277 ahead: tracking.ahead as u32,
4278 behind: tracking.behind as u32,
4279 })
4280 })
4281 .unwrap_or(git::repository::UpstreamTracking::Gone),
4282 }),
4283 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4284 git::repository::CommitSummary {
4285 sha: commit.sha.to_string().into(),
4286 subject: commit.subject.to_string().into(),
4287 commit_timestamp: commit.commit_timestamp,
4288 has_parent: true,
4289 }
4290 }),
4291 }
4292}
4293
4294fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4295 proto::GitCommitDetails {
4296 sha: commit.sha.to_string(),
4297 message: commit.message.to_string(),
4298 commit_timestamp: commit.commit_timestamp,
4299 author_email: commit.author_email.to_string(),
4300 author_name: commit.author_name.to_string(),
4301 }
4302}
4303
4304fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4305 CommitDetails {
4306 sha: proto.sha.clone().into(),
4307 message: proto.message.clone().into(),
4308 commit_timestamp: proto.commit_timestamp,
4309 author_email: proto.author_email.clone().into(),
4310 author_name: proto.author_name.clone().into(),
4311 }
4312}
4313
4314async fn compute_snapshot(
4315 id: RepositoryId,
4316 work_directory_abs_path: Arc<Path>,
4317 prev_snapshot: RepositorySnapshot,
4318 backend: Arc<dyn GitRepository>,
4319) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4320 let mut events = Vec::new();
4321 let branches = backend.branches().await?;
4322 let branch = branches.into_iter().find(|branch| branch.is_head);
4323 let statuses = backend.status(&[WORK_DIRECTORY_REPO_PATH.clone()]).await?;
4324 let merge_message = backend
4325 .merge_message()
4326 .await
4327 .and_then(|msg| Some(msg.lines().nth(0)?.to_owned().into()));
4328 let merge_head_shas = backend
4329 .merge_head_shas()
4330 .into_iter()
4331 .map(SharedString::from)
4332 .collect();
4333
4334 let statuses_by_path = SumTree::from_iter(
4335 statuses
4336 .entries
4337 .iter()
4338 .map(|(repo_path, status)| StatusEntry {
4339 repo_path: repo_path.clone(),
4340 status: *status,
4341 }),
4342 &(),
4343 );
4344
4345 let merge_head_shas_changed = merge_head_shas != prev_snapshot.merge_head_shas;
4346
4347 if merge_head_shas_changed
4348 || branch != prev_snapshot.branch
4349 || statuses_by_path != prev_snapshot.statuses_by_path
4350 {
4351 events.push(RepositoryEvent::Updated { full_scan: true });
4352 }
4353
4354 let mut current_merge_conflicts = TreeSet::default();
4355 for (repo_path, status) in statuses.entries.iter() {
4356 if status.is_conflicted() {
4357 current_merge_conflicts.insert(repo_path.clone());
4358 }
4359 }
4360
4361 // Cache merge conflict paths so they don't change from staging/unstaging,
4362 // until the merge heads change (at commit time, etc.).
4363 let mut merge_conflicts = prev_snapshot.merge_conflicts.clone();
4364 if merge_head_shas_changed {
4365 merge_conflicts = current_merge_conflicts;
4366 events.push(RepositoryEvent::MergeHeadsChanged);
4367 }
4368
4369 // Useful when branch is None in detached head state
4370 let head_commit = match backend.head_sha() {
4371 Some(head_sha) => backend.show(head_sha).await.ok(),
4372 None => None,
4373 };
4374
4375 let snapshot = RepositorySnapshot {
4376 id,
4377 merge_message,
4378 statuses_by_path,
4379 work_directory_abs_path,
4380 scan_id: prev_snapshot.scan_id + 1,
4381 branch,
4382 head_commit,
4383 merge_conflicts,
4384 merge_head_shas,
4385 };
4386
4387 Ok((snapshot, events))
4388}
4389
4390fn status_from_proto(
4391 simple_status: i32,
4392 status: Option<proto::GitFileStatus>,
4393) -> anyhow::Result<FileStatus> {
4394 use proto::git_file_status::Variant;
4395
4396 let Some(variant) = status.and_then(|status| status.variant) else {
4397 let code = proto::GitStatus::from_i32(simple_status)
4398 .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
4399 let result = match code {
4400 proto::GitStatus::Added => TrackedStatus {
4401 worktree_status: StatusCode::Added,
4402 index_status: StatusCode::Unmodified,
4403 }
4404 .into(),
4405 proto::GitStatus::Modified => TrackedStatus {
4406 worktree_status: StatusCode::Modified,
4407 index_status: StatusCode::Unmodified,
4408 }
4409 .into(),
4410 proto::GitStatus::Conflict => UnmergedStatus {
4411 first_head: UnmergedStatusCode::Updated,
4412 second_head: UnmergedStatusCode::Updated,
4413 }
4414 .into(),
4415 proto::GitStatus::Deleted => TrackedStatus {
4416 worktree_status: StatusCode::Deleted,
4417 index_status: StatusCode::Unmodified,
4418 }
4419 .into(),
4420 _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
4421 };
4422 return Ok(result);
4423 };
4424
4425 let result = match variant {
4426 Variant::Untracked(_) => FileStatus::Untracked,
4427 Variant::Ignored(_) => FileStatus::Ignored,
4428 Variant::Unmerged(unmerged) => {
4429 let [first_head, second_head] =
4430 [unmerged.first_head, unmerged.second_head].map(|head| {
4431 let code = proto::GitStatus::from_i32(head)
4432 .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
4433 let result = match code {
4434 proto::GitStatus::Added => UnmergedStatusCode::Added,
4435 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4436 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4437 _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
4438 };
4439 Ok(result)
4440 });
4441 let [first_head, second_head] = [first_head?, second_head?];
4442 UnmergedStatus {
4443 first_head,
4444 second_head,
4445 }
4446 .into()
4447 }
4448 Variant::Tracked(tracked) => {
4449 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4450 .map(|status| {
4451 let code = proto::GitStatus::from_i32(status)
4452 .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
4453 let result = match code {
4454 proto::GitStatus::Modified => StatusCode::Modified,
4455 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4456 proto::GitStatus::Added => StatusCode::Added,
4457 proto::GitStatus::Deleted => StatusCode::Deleted,
4458 proto::GitStatus::Renamed => StatusCode::Renamed,
4459 proto::GitStatus::Copied => StatusCode::Copied,
4460 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4461 _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
4462 };
4463 Ok(result)
4464 });
4465 let [index_status, worktree_status] = [index_status?, worktree_status?];
4466 TrackedStatus {
4467 index_status,
4468 worktree_status,
4469 }
4470 .into()
4471 }
4472 };
4473 Ok(result)
4474}
4475
4476fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4477 use proto::git_file_status::{Tracked, Unmerged, Variant};
4478
4479 let variant = match status {
4480 FileStatus::Untracked => Variant::Untracked(Default::default()),
4481 FileStatus::Ignored => Variant::Ignored(Default::default()),
4482 FileStatus::Unmerged(UnmergedStatus {
4483 first_head,
4484 second_head,
4485 }) => Variant::Unmerged(Unmerged {
4486 first_head: unmerged_status_to_proto(first_head),
4487 second_head: unmerged_status_to_proto(second_head),
4488 }),
4489 FileStatus::Tracked(TrackedStatus {
4490 index_status,
4491 worktree_status,
4492 }) => Variant::Tracked(Tracked {
4493 index_status: tracked_status_to_proto(index_status),
4494 worktree_status: tracked_status_to_proto(worktree_status),
4495 }),
4496 };
4497 proto::GitFileStatus {
4498 variant: Some(variant),
4499 }
4500}
4501
4502fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4503 match code {
4504 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4505 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4506 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4507 }
4508}
4509
4510fn tracked_status_to_proto(code: StatusCode) -> i32 {
4511 match code {
4512 StatusCode::Added => proto::GitStatus::Added as _,
4513 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4514 StatusCode::Modified => proto::GitStatus::Modified as _,
4515 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4516 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4517 StatusCode::Copied => proto::GitStatus::Copied as _,
4518 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4519 }
4520}