1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use task::Shell;
66use text::{Bias, BufferId};
67use util::{
68 ResultExt, debug_panic,
69 paths::{PathStyle, SanitizedPath},
70 post_inc,
71 rel_path::RelPath,
72};
73use worktree::{
74 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
75 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
76};
77use zeroize::Zeroize;
78
79pub struct GitStore {
80 state: GitStoreState,
81 buffer_store: Entity<BufferStore>,
82 worktree_store: Entity<WorktreeStore>,
83 repositories: HashMap<RepositoryId, Entity<Repository>>,
84 active_repo_id: Option<RepositoryId>,
85 #[allow(clippy::type_complexity)]
86 loading_diffs:
87 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
88 diffs: HashMap<BufferId, Entity<BufferGitState>>,
89 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
90 _subscriptions: Vec<Subscription>,
91}
92
93#[derive(Default)]
94struct SharedDiffs {
95 unstaged: Option<Entity<BufferDiff>>,
96 uncommitted: Option<Entity<BufferDiff>>,
97}
98
99struct BufferGitState {
100 unstaged_diff: Option<WeakEntity<BufferDiff>>,
101 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
102 conflict_set: Option<WeakEntity<ConflictSet>>,
103 recalculate_diff_task: Option<Task<Result<()>>>,
104 reparse_conflict_markers_task: Option<Task<Result<()>>>,
105 language: Option<Arc<Language>>,
106 language_registry: Option<Arc<LanguageRegistry>>,
107 conflict_updated_futures: Vec<oneshot::Sender<()>>,
108 recalculating_tx: postage::watch::Sender<bool>,
109
110 /// These operation counts are used to ensure that head and index text
111 /// values read from the git repository are up-to-date with any hunk staging
112 /// operations that have been performed on the BufferDiff.
113 ///
114 /// The operation count is incremented immediately when the user initiates a
115 /// hunk stage/unstage operation. Then, upon finishing writing the new index
116 /// text do disk, the `operation count as of write` is updated to reflect
117 /// the operation count that prompted the write.
118 hunk_staging_operation_count: usize,
119 hunk_staging_operation_count_as_of_write: usize,
120
121 head_text: Option<Arc<String>>,
122 index_text: Option<Arc<String>>,
123 head_changed: bool,
124 index_changed: bool,
125 language_changed: bool,
126}
127
128#[derive(Clone, Debug)]
129enum DiffBasesChange {
130 SetIndex(Option<String>),
131 SetHead(Option<String>),
132 SetEach {
133 index: Option<String>,
134 head: Option<String>,
135 },
136 SetBoth(Option<String>),
137}
138
139#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
140enum DiffKind {
141 Unstaged,
142 Uncommitted,
143}
144
145enum GitStoreState {
146 Local {
147 next_repository_id: Arc<AtomicU64>,
148 downstream: Option<LocalDownstreamState>,
149 project_environment: Entity<ProjectEnvironment>,
150 fs: Arc<dyn Fs>,
151 },
152 Remote {
153 upstream_client: AnyProtoClient,
154 upstream_project_id: u64,
155 downstream: Option<(AnyProtoClient, ProjectId)>,
156 },
157}
158
159enum DownstreamUpdate {
160 UpdateRepository(RepositorySnapshot),
161 RemoveRepository(RepositoryId),
162}
163
164struct LocalDownstreamState {
165 client: AnyProtoClient,
166 project_id: ProjectId,
167 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
168 _task: Task<Result<()>>,
169}
170
171#[derive(Clone, Debug)]
172pub struct GitStoreCheckpoint {
173 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
174}
175
176#[derive(Clone, Debug, PartialEq, Eq)]
177pub struct StatusEntry {
178 pub repo_path: RepoPath,
179 pub status: FileStatus,
180}
181
182impl StatusEntry {
183 fn to_proto(&self) -> proto::StatusEntry {
184 let simple_status = match self.status {
185 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
186 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
187 FileStatus::Tracked(TrackedStatus {
188 index_status,
189 worktree_status,
190 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
191 worktree_status
192 } else {
193 index_status
194 }),
195 };
196
197 proto::StatusEntry {
198 repo_path: self.repo_path.to_proto(),
199 simple_status,
200 status: Some(status_to_proto(self.status)),
201 }
202 }
203}
204
205impl TryFrom<proto::StatusEntry> for StatusEntry {
206 type Error = anyhow::Error;
207
208 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
209 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
210 let status = status_from_proto(value.simple_status, value.status)?;
211 Ok(Self { repo_path, status })
212 }
213}
214
215impl sum_tree::Item for StatusEntry {
216 type Summary = PathSummary<GitSummary>;
217
218 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
219 PathSummary {
220 max_path: self.repo_path.0.clone(),
221 item_summary: self.status.summary(),
222 }
223 }
224}
225
226impl sum_tree::KeyedItem for StatusEntry {
227 type Key = PathKey;
228
229 fn key(&self) -> Self::Key {
230 PathKey(self.repo_path.0.clone())
231 }
232}
233
234#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
235pub struct RepositoryId(pub u64);
236
237#[derive(Clone, Debug, Default, PartialEq, Eq)]
238pub struct MergeDetails {
239 pub conflicted_paths: TreeSet<RepoPath>,
240 pub message: Option<SharedString>,
241 pub heads: Vec<Option<SharedString>>,
242}
243
244#[derive(Clone, Debug, PartialEq, Eq)]
245pub struct RepositorySnapshot {
246 pub id: RepositoryId,
247 pub statuses_by_path: SumTree<StatusEntry>,
248 pub work_directory_abs_path: Arc<Path>,
249 pub path_style: PathStyle,
250 pub branch: Option<Branch>,
251 pub head_commit: Option<CommitDetails>,
252 pub scan_id: u64,
253 pub merge: MergeDetails,
254 pub remote_origin_url: Option<String>,
255 pub remote_upstream_url: Option<String>,
256 pub stash_entries: GitStash,
257}
258
259type JobId = u64;
260
261#[derive(Clone, Debug, PartialEq, Eq)]
262pub struct JobInfo {
263 pub start: Instant,
264 pub message: SharedString,
265}
266
267pub struct Repository {
268 this: WeakEntity<Self>,
269 snapshot: RepositorySnapshot,
270 commit_message_buffer: Option<Entity<Buffer>>,
271 git_store: WeakEntity<GitStore>,
272 // For a local repository, holds paths that have had worktree events since the last status scan completed,
273 // and that should be examined during the next status scan.
274 paths_needing_status_update: BTreeSet<RepoPath>,
275 job_sender: mpsc::UnboundedSender<GitJob>,
276 active_jobs: HashMap<JobId, JobInfo>,
277 job_id: JobId,
278 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
279 latest_askpass_id: u64,
280}
281
282impl std::ops::Deref for Repository {
283 type Target = RepositorySnapshot;
284
285 fn deref(&self) -> &Self::Target {
286 &self.snapshot
287 }
288}
289
290#[derive(Clone)]
291pub enum RepositoryState {
292 Local {
293 backend: Arc<dyn GitRepository>,
294 environment: Arc<HashMap<String, String>>,
295 },
296 Remote {
297 project_id: ProjectId,
298 client: AnyProtoClient,
299 },
300}
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub enum RepositoryEvent {
304 StatusesChanged {
305 // TODO could report which statuses changed here
306 full_scan: bool,
307 },
308 MergeHeadsChanged,
309 BranchChanged,
310 StashEntriesChanged,
311}
312
313#[derive(Clone, Debug)]
314pub struct JobsUpdated;
315
316#[derive(Debug)]
317pub enum GitStoreEvent {
318 ActiveRepositoryChanged(Option<RepositoryId>),
319 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
320 RepositoryAdded,
321 RepositoryRemoved(RepositoryId),
322 IndexWriteError(anyhow::Error),
323 JobsUpdated,
324 ConflictsUpdated,
325}
326
327impl EventEmitter<RepositoryEvent> for Repository {}
328impl EventEmitter<JobsUpdated> for Repository {}
329impl EventEmitter<GitStoreEvent> for GitStore {}
330
331pub struct GitJob {
332 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
333 key: Option<GitJobKey>,
334}
335
336#[derive(PartialEq, Eq)]
337enum GitJobKey {
338 WriteIndex(RepoPath),
339 ReloadBufferDiffBases,
340 RefreshStatuses,
341 ReloadGitState,
342}
343
344impl GitStore {
345 pub fn local(
346 worktree_store: &Entity<WorktreeStore>,
347 buffer_store: Entity<BufferStore>,
348 environment: Entity<ProjectEnvironment>,
349 fs: Arc<dyn Fs>,
350 cx: &mut Context<Self>,
351 ) -> Self {
352 Self::new(
353 worktree_store.clone(),
354 buffer_store,
355 GitStoreState::Local {
356 next_repository_id: Arc::new(AtomicU64::new(1)),
357 downstream: None,
358 project_environment: environment,
359 fs,
360 },
361 cx,
362 )
363 }
364
365 pub fn remote(
366 worktree_store: &Entity<WorktreeStore>,
367 buffer_store: Entity<BufferStore>,
368 upstream_client: AnyProtoClient,
369 project_id: u64,
370 cx: &mut Context<Self>,
371 ) -> Self {
372 Self::new(
373 worktree_store.clone(),
374 buffer_store,
375 GitStoreState::Remote {
376 upstream_client,
377 upstream_project_id: project_id,
378 downstream: None,
379 },
380 cx,
381 )
382 }
383
384 fn new(
385 worktree_store: Entity<WorktreeStore>,
386 buffer_store: Entity<BufferStore>,
387 state: GitStoreState,
388 cx: &mut Context<Self>,
389 ) -> Self {
390 let _subscriptions = vec![
391 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
392 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
393 ];
394
395 GitStore {
396 state,
397 buffer_store,
398 worktree_store,
399 repositories: HashMap::default(),
400 active_repo_id: None,
401 _subscriptions,
402 loading_diffs: HashMap::default(),
403 shared_diffs: HashMap::default(),
404 diffs: HashMap::default(),
405 }
406 }
407
408 pub fn init(client: &AnyProtoClient) {
409 client.add_entity_request_handler(Self::handle_get_remotes);
410 client.add_entity_request_handler(Self::handle_get_branches);
411 client.add_entity_request_handler(Self::handle_get_default_branch);
412 client.add_entity_request_handler(Self::handle_change_branch);
413 client.add_entity_request_handler(Self::handle_create_branch);
414 client.add_entity_request_handler(Self::handle_rename_branch);
415 client.add_entity_request_handler(Self::handle_git_init);
416 client.add_entity_request_handler(Self::handle_push);
417 client.add_entity_request_handler(Self::handle_pull);
418 client.add_entity_request_handler(Self::handle_fetch);
419 client.add_entity_request_handler(Self::handle_stage);
420 client.add_entity_request_handler(Self::handle_unstage);
421 client.add_entity_request_handler(Self::handle_stash);
422 client.add_entity_request_handler(Self::handle_stash_pop);
423 client.add_entity_request_handler(Self::handle_stash_apply);
424 client.add_entity_request_handler(Self::handle_stash_drop);
425 client.add_entity_request_handler(Self::handle_commit);
426 client.add_entity_request_handler(Self::handle_reset);
427 client.add_entity_request_handler(Self::handle_show);
428 client.add_entity_request_handler(Self::handle_load_commit_diff);
429 client.add_entity_request_handler(Self::handle_checkout_files);
430 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
431 client.add_entity_request_handler(Self::handle_set_index_text);
432 client.add_entity_request_handler(Self::handle_askpass);
433 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
434 client.add_entity_request_handler(Self::handle_git_diff);
435 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
436 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
437 client.add_entity_message_handler(Self::handle_update_diff_bases);
438 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
439 client.add_entity_request_handler(Self::handle_blame_buffer);
440 client.add_entity_message_handler(Self::handle_update_repository);
441 client.add_entity_message_handler(Self::handle_remove_repository);
442 client.add_entity_request_handler(Self::handle_git_clone);
443 }
444
445 pub fn is_local(&self) -> bool {
446 matches!(self.state, GitStoreState::Local { .. })
447 }
448 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
449 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
450 let id = repo.read(cx).id;
451 if self.active_repo_id != Some(id) {
452 self.active_repo_id = Some(id);
453 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
454 }
455 }
456 }
457
458 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
459 match &mut self.state {
460 GitStoreState::Remote {
461 downstream: downstream_client,
462 ..
463 } => {
464 for repo in self.repositories.values() {
465 let update = repo.read(cx).snapshot.initial_update(project_id);
466 for update in split_repository_update(update) {
467 client.send(update).log_err();
468 }
469 }
470 *downstream_client = Some((client, ProjectId(project_id)));
471 }
472 GitStoreState::Local {
473 downstream: downstream_client,
474 ..
475 } => {
476 let mut snapshots = HashMap::default();
477 let (updates_tx, mut updates_rx) = mpsc::unbounded();
478 for repo in self.repositories.values() {
479 updates_tx
480 .unbounded_send(DownstreamUpdate::UpdateRepository(
481 repo.read(cx).snapshot.clone(),
482 ))
483 .ok();
484 }
485 *downstream_client = Some(LocalDownstreamState {
486 client: client.clone(),
487 project_id: ProjectId(project_id),
488 updates_tx,
489 _task: cx.spawn(async move |this, cx| {
490 cx.background_spawn(async move {
491 while let Some(update) = updates_rx.next().await {
492 match update {
493 DownstreamUpdate::UpdateRepository(snapshot) => {
494 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
495 {
496 let update =
497 snapshot.build_update(old_snapshot, project_id);
498 *old_snapshot = snapshot;
499 for update in split_repository_update(update) {
500 client.send(update)?;
501 }
502 } else {
503 let update = snapshot.initial_update(project_id);
504 for update in split_repository_update(update) {
505 client.send(update)?;
506 }
507 snapshots.insert(snapshot.id, snapshot);
508 }
509 }
510 DownstreamUpdate::RemoveRepository(id) => {
511 client.send(proto::RemoveRepository {
512 project_id,
513 id: id.to_proto(),
514 })?;
515 }
516 }
517 }
518 anyhow::Ok(())
519 })
520 .await
521 .ok();
522 this.update(cx, |this, _| {
523 if let GitStoreState::Local {
524 downstream: downstream_client,
525 ..
526 } = &mut this.state
527 {
528 downstream_client.take();
529 } else {
530 unreachable!("unshared called on remote store");
531 }
532 })
533 }),
534 });
535 }
536 }
537 }
538
539 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
540 match &mut self.state {
541 GitStoreState::Local {
542 downstream: downstream_client,
543 ..
544 } => {
545 downstream_client.take();
546 }
547 GitStoreState::Remote {
548 downstream: downstream_client,
549 ..
550 } => {
551 downstream_client.take();
552 }
553 }
554 self.shared_diffs.clear();
555 }
556
557 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
558 self.shared_diffs.remove(peer_id);
559 }
560
561 pub fn active_repository(&self) -> Option<Entity<Repository>> {
562 self.active_repo_id
563 .as_ref()
564 .map(|id| self.repositories[id].clone())
565 }
566
567 pub fn open_unstaged_diff(
568 &mut self,
569 buffer: Entity<Buffer>,
570 cx: &mut Context<Self>,
571 ) -> Task<Result<Entity<BufferDiff>>> {
572 let buffer_id = buffer.read(cx).remote_id();
573 if let Some(diff_state) = self.diffs.get(&buffer_id)
574 && let Some(unstaged_diff) = diff_state
575 .read(cx)
576 .unstaged_diff
577 .as_ref()
578 .and_then(|weak| weak.upgrade())
579 {
580 if let Some(task) =
581 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
582 {
583 return cx.background_executor().spawn(async move {
584 task.await;
585 Ok(unstaged_diff)
586 });
587 }
588 return Task::ready(Ok(unstaged_diff));
589 }
590
591 let Some((repo, repo_path)) =
592 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
593 else {
594 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
595 };
596
597 let task = self
598 .loading_diffs
599 .entry((buffer_id, DiffKind::Unstaged))
600 .or_insert_with(|| {
601 let staged_text = repo.update(cx, |repo, cx| {
602 repo.load_staged_text(buffer_id, repo_path, cx)
603 });
604 cx.spawn(async move |this, cx| {
605 Self::open_diff_internal(
606 this,
607 DiffKind::Unstaged,
608 staged_text.await.map(DiffBasesChange::SetIndex),
609 buffer,
610 cx,
611 )
612 .await
613 .map_err(Arc::new)
614 })
615 .shared()
616 })
617 .clone();
618
619 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
620 }
621
622 pub fn open_uncommitted_diff(
623 &mut self,
624 buffer: Entity<Buffer>,
625 cx: &mut Context<Self>,
626 ) -> Task<Result<Entity<BufferDiff>>> {
627 let buffer_id = buffer.read(cx).remote_id();
628
629 if let Some(diff_state) = self.diffs.get(&buffer_id)
630 && let Some(uncommitted_diff) = diff_state
631 .read(cx)
632 .uncommitted_diff
633 .as_ref()
634 .and_then(|weak| weak.upgrade())
635 {
636 if let Some(task) =
637 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
638 {
639 return cx.background_executor().spawn(async move {
640 task.await;
641 Ok(uncommitted_diff)
642 });
643 }
644 return Task::ready(Ok(uncommitted_diff));
645 }
646
647 let Some((repo, repo_path)) =
648 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
649 else {
650 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
651 };
652
653 let task = self
654 .loading_diffs
655 .entry((buffer_id, DiffKind::Uncommitted))
656 .or_insert_with(|| {
657 let changes = repo.update(cx, |repo, cx| {
658 repo.load_committed_text(buffer_id, repo_path, cx)
659 });
660
661 cx.spawn(async move |this, cx| {
662 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
663 .await
664 .map_err(Arc::new)
665 })
666 .shared()
667 })
668 .clone();
669
670 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
671 }
672
673 async fn open_diff_internal(
674 this: WeakEntity<Self>,
675 kind: DiffKind,
676 texts: Result<DiffBasesChange>,
677 buffer_entity: Entity<Buffer>,
678 cx: &mut AsyncApp,
679 ) -> Result<Entity<BufferDiff>> {
680 let diff_bases_change = match texts {
681 Err(e) => {
682 this.update(cx, |this, cx| {
683 let buffer = buffer_entity.read(cx);
684 let buffer_id = buffer.remote_id();
685 this.loading_diffs.remove(&(buffer_id, kind));
686 })?;
687 return Err(e);
688 }
689 Ok(change) => change,
690 };
691
692 this.update(cx, |this, cx| {
693 let buffer = buffer_entity.read(cx);
694 let buffer_id = buffer.remote_id();
695 let language = buffer.language().cloned();
696 let language_registry = buffer.language_registry();
697 let text_snapshot = buffer.text_snapshot();
698 this.loading_diffs.remove(&(buffer_id, kind));
699
700 let git_store = cx.weak_entity();
701 let diff_state = this
702 .diffs
703 .entry(buffer_id)
704 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
705
706 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
707
708 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
709 diff_state.update(cx, |diff_state, cx| {
710 diff_state.language = language;
711 diff_state.language_registry = language_registry;
712
713 match kind {
714 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
715 DiffKind::Uncommitted => {
716 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
717 diff
718 } else {
719 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
720 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
721 unstaged_diff
722 };
723
724 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
725 diff_state.uncommitted_diff = Some(diff.downgrade())
726 }
727 }
728
729 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
730 let rx = diff_state.wait_for_recalculation();
731
732 anyhow::Ok(async move {
733 if let Some(rx) = rx {
734 rx.await;
735 }
736 Ok(diff)
737 })
738 })
739 })??
740 .await
741 }
742
743 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
744 let diff_state = self.diffs.get(&buffer_id)?;
745 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
746 }
747
748 pub fn get_uncommitted_diff(
749 &self,
750 buffer_id: BufferId,
751 cx: &App,
752 ) -> Option<Entity<BufferDiff>> {
753 let diff_state = self.diffs.get(&buffer_id)?;
754 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
755 }
756
757 pub fn open_conflict_set(
758 &mut self,
759 buffer: Entity<Buffer>,
760 cx: &mut Context<Self>,
761 ) -> Entity<ConflictSet> {
762 log::debug!("open conflict set");
763 let buffer_id = buffer.read(cx).remote_id();
764
765 if let Some(git_state) = self.diffs.get(&buffer_id)
766 && let Some(conflict_set) = git_state
767 .read(cx)
768 .conflict_set
769 .as_ref()
770 .and_then(|weak| weak.upgrade())
771 {
772 let conflict_set = conflict_set;
773 let buffer_snapshot = buffer.read(cx).text_snapshot();
774
775 git_state.update(cx, |state, cx| {
776 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
777 });
778
779 return conflict_set;
780 }
781
782 let is_unmerged = self
783 .repository_and_path_for_buffer_id(buffer_id, cx)
784 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
785 let git_store = cx.weak_entity();
786 let buffer_git_state = self
787 .diffs
788 .entry(buffer_id)
789 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
790 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
791
792 self._subscriptions
793 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
794 cx.emit(GitStoreEvent::ConflictsUpdated);
795 }));
796
797 buffer_git_state.update(cx, |state, cx| {
798 state.conflict_set = Some(conflict_set.downgrade());
799 let buffer_snapshot = buffer.read(cx).text_snapshot();
800 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
801 });
802
803 conflict_set
804 }
805
806 pub fn project_path_git_status(
807 &self,
808 project_path: &ProjectPath,
809 cx: &App,
810 ) -> Option<FileStatus> {
811 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
812 Some(repo.read(cx).status_for_path(&repo_path)?.status)
813 }
814
815 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
816 let mut work_directory_abs_paths = Vec::new();
817 let mut checkpoints = Vec::new();
818 for repository in self.repositories.values() {
819 repository.update(cx, |repository, _| {
820 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
821 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
822 });
823 }
824
825 cx.background_executor().spawn(async move {
826 let checkpoints = future::try_join_all(checkpoints).await?;
827 Ok(GitStoreCheckpoint {
828 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
829 .into_iter()
830 .zip(checkpoints)
831 .collect(),
832 })
833 })
834 }
835
836 pub fn restore_checkpoint(
837 &self,
838 checkpoint: GitStoreCheckpoint,
839 cx: &mut App,
840 ) -> Task<Result<()>> {
841 let repositories_by_work_dir_abs_path = self
842 .repositories
843 .values()
844 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
845 .collect::<HashMap<_, _>>();
846
847 let mut tasks = Vec::new();
848 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
849 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
850 let restore = repository.update(cx, |repository, _| {
851 repository.restore_checkpoint(checkpoint)
852 });
853 tasks.push(async move { restore.await? });
854 }
855 }
856 cx.background_spawn(async move {
857 future::try_join_all(tasks).await?;
858 Ok(())
859 })
860 }
861
862 /// Compares two checkpoints, returning true if they are equal.
863 pub fn compare_checkpoints(
864 &self,
865 left: GitStoreCheckpoint,
866 mut right: GitStoreCheckpoint,
867 cx: &mut App,
868 ) -> Task<Result<bool>> {
869 let repositories_by_work_dir_abs_path = self
870 .repositories
871 .values()
872 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
873 .collect::<HashMap<_, _>>();
874
875 let mut tasks = Vec::new();
876 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
877 if let Some(right_checkpoint) = right
878 .checkpoints_by_work_dir_abs_path
879 .remove(&work_dir_abs_path)
880 {
881 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
882 {
883 let compare = repository.update(cx, |repository, _| {
884 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
885 });
886
887 tasks.push(async move { compare.await? });
888 }
889 } else {
890 return Task::ready(Ok(false));
891 }
892 }
893 cx.background_spawn(async move {
894 Ok(future::try_join_all(tasks)
895 .await?
896 .into_iter()
897 .all(|result| result))
898 })
899 }
900
901 /// Blames a buffer.
902 pub fn blame_buffer(
903 &self,
904 buffer: &Entity<Buffer>,
905 version: Option<clock::Global>,
906 cx: &mut App,
907 ) -> Task<Result<Option<Blame>>> {
908 let buffer = buffer.read(cx);
909 let Some((repo, repo_path)) =
910 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
911 else {
912 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
913 };
914 let content = match &version {
915 Some(version) => buffer.rope_for_version(version),
916 None => buffer.as_rope().clone(),
917 };
918 let version = version.unwrap_or(buffer.version());
919 let buffer_id = buffer.remote_id();
920
921 let rx = repo.update(cx, |repo, _| {
922 repo.send_job(None, move |state, _| async move {
923 match state {
924 RepositoryState::Local { backend, .. } => backend
925 .blame(repo_path.clone(), content)
926 .await
927 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
928 .map(Some),
929 RepositoryState::Remote { project_id, client } => {
930 let response = client
931 .request(proto::BlameBuffer {
932 project_id: project_id.to_proto(),
933 buffer_id: buffer_id.into(),
934 version: serialize_version(&version),
935 })
936 .await?;
937 Ok(deserialize_blame_buffer_response(response))
938 }
939 }
940 })
941 });
942
943 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
944 }
945
946 pub fn get_permalink_to_line(
947 &self,
948 buffer: &Entity<Buffer>,
949 selection: Range<u32>,
950 cx: &mut App,
951 ) -> Task<Result<url::Url>> {
952 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
953 return Task::ready(Err(anyhow!("buffer has no file")));
954 };
955
956 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
957 &(file.worktree.read(cx).id(), file.path.clone()).into(),
958 cx,
959 ) else {
960 // If we're not in a Git repo, check whether this is a Rust source
961 // file in the Cargo registry (presumably opened with go-to-definition
962 // from a normal Rust file). If so, we can put together a permalink
963 // using crate metadata.
964 if buffer
965 .read(cx)
966 .language()
967 .is_none_or(|lang| lang.name() != "Rust".into())
968 {
969 return Task::ready(Err(anyhow!("no permalink available")));
970 }
971 let file_path = file.worktree.read(cx).absolutize(&file.path);
972 return cx.spawn(async move |cx| {
973 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
974 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
975 .context("no permalink available")
976 });
977 };
978
979 let buffer_id = buffer.read(cx).remote_id();
980 let branch = repo.read(cx).branch.clone();
981 let remote = branch
982 .as_ref()
983 .and_then(|b| b.upstream.as_ref())
984 .and_then(|b| b.remote_name())
985 .unwrap_or("origin")
986 .to_string();
987
988 let rx = repo.update(cx, |repo, _| {
989 repo.send_job(None, move |state, cx| async move {
990 match state {
991 RepositoryState::Local { backend, .. } => {
992 let origin_url = backend
993 .remote_url(&remote)
994 .with_context(|| format!("remote \"{remote}\" not found"))?;
995
996 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
997
998 let provider_registry =
999 cx.update(GitHostingProviderRegistry::default_global)?;
1000
1001 let (provider, remote) =
1002 parse_git_remote_url(provider_registry, &origin_url)
1003 .context("parsing Git remote URL")?;
1004
1005 Ok(provider.build_permalink(
1006 remote,
1007 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1008 ))
1009 }
1010 RepositoryState::Remote { project_id, client } => {
1011 let response = client
1012 .request(proto::GetPermalinkToLine {
1013 project_id: project_id.to_proto(),
1014 buffer_id: buffer_id.into(),
1015 selection: Some(proto::Range {
1016 start: selection.start as u64,
1017 end: selection.end as u64,
1018 }),
1019 })
1020 .await?;
1021
1022 url::Url::parse(&response.permalink).context("failed to parse permalink")
1023 }
1024 }
1025 })
1026 });
1027 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1028 }
1029
1030 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1031 match &self.state {
1032 GitStoreState::Local {
1033 downstream: downstream_client,
1034 ..
1035 } => downstream_client
1036 .as_ref()
1037 .map(|state| (state.client.clone(), state.project_id)),
1038 GitStoreState::Remote {
1039 downstream: downstream_client,
1040 ..
1041 } => downstream_client.clone(),
1042 }
1043 }
1044
1045 fn upstream_client(&self) -> Option<AnyProtoClient> {
1046 match &self.state {
1047 GitStoreState::Local { .. } => None,
1048 GitStoreState::Remote {
1049 upstream_client, ..
1050 } => Some(upstream_client.clone()),
1051 }
1052 }
1053
1054 fn on_worktree_store_event(
1055 &mut self,
1056 worktree_store: Entity<WorktreeStore>,
1057 event: &WorktreeStoreEvent,
1058 cx: &mut Context<Self>,
1059 ) {
1060 let GitStoreState::Local {
1061 project_environment,
1062 downstream,
1063 next_repository_id,
1064 fs,
1065 } = &self.state
1066 else {
1067 return;
1068 };
1069
1070 match event {
1071 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1072 if let Some(worktree) = self
1073 .worktree_store
1074 .read(cx)
1075 .worktree_for_id(*worktree_id, cx)
1076 {
1077 let paths_by_git_repo =
1078 self.process_updated_entries(&worktree, updated_entries, cx);
1079 let downstream = downstream
1080 .as_ref()
1081 .map(|downstream| downstream.updates_tx.clone());
1082 cx.spawn(async move |_, cx| {
1083 let paths_by_git_repo = paths_by_git_repo.await;
1084 for (repo, paths) in paths_by_git_repo {
1085 repo.update(cx, |repo, cx| {
1086 repo.paths_changed(paths, downstream.clone(), cx);
1087 })
1088 .ok();
1089 }
1090 })
1091 .detach();
1092 }
1093 }
1094 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1095 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1096 else {
1097 return;
1098 };
1099 if !worktree.read(cx).is_visible() {
1100 log::debug!(
1101 "not adding repositories for local worktree {:?} because it's not visible",
1102 worktree.read(cx).abs_path()
1103 );
1104 return;
1105 }
1106 self.update_repositories_from_worktree(
1107 project_environment.clone(),
1108 next_repository_id.clone(),
1109 downstream
1110 .as_ref()
1111 .map(|downstream| downstream.updates_tx.clone()),
1112 changed_repos.clone(),
1113 fs.clone(),
1114 cx,
1115 );
1116 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1117 }
1118 _ => {}
1119 }
1120 }
1121 fn on_repository_event(
1122 &mut self,
1123 repo: Entity<Repository>,
1124 event: &RepositoryEvent,
1125 cx: &mut Context<Self>,
1126 ) {
1127 let id = repo.read(cx).id;
1128 let repo_snapshot = repo.read(cx).snapshot.clone();
1129 for (buffer_id, diff) in self.diffs.iter() {
1130 if let Some((buffer_repo, repo_path)) =
1131 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1132 && buffer_repo == repo
1133 {
1134 diff.update(cx, |diff, cx| {
1135 if let Some(conflict_set) = &diff.conflict_set {
1136 let conflict_status_changed =
1137 conflict_set.update(cx, |conflict_set, cx| {
1138 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1139 conflict_set.set_has_conflict(has_conflict, cx)
1140 })?;
1141 if conflict_status_changed {
1142 let buffer_store = self.buffer_store.read(cx);
1143 if let Some(buffer) = buffer_store.get(*buffer_id) {
1144 let _ = diff
1145 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1146 }
1147 }
1148 }
1149 anyhow::Ok(())
1150 })
1151 .ok();
1152 }
1153 }
1154 cx.emit(GitStoreEvent::RepositoryUpdated(
1155 id,
1156 event.clone(),
1157 self.active_repo_id == Some(id),
1158 ))
1159 }
1160
1161 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1162 cx.emit(GitStoreEvent::JobsUpdated)
1163 }
1164
1165 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1166 fn update_repositories_from_worktree(
1167 &mut self,
1168 project_environment: Entity<ProjectEnvironment>,
1169 next_repository_id: Arc<AtomicU64>,
1170 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1171 updated_git_repositories: UpdatedGitRepositoriesSet,
1172 fs: Arc<dyn Fs>,
1173 cx: &mut Context<Self>,
1174 ) {
1175 let mut removed_ids = Vec::new();
1176 for update in updated_git_repositories.iter() {
1177 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1178 let existing_work_directory_abs_path =
1179 repo.read(cx).work_directory_abs_path.clone();
1180 Some(&existing_work_directory_abs_path)
1181 == update.old_work_directory_abs_path.as_ref()
1182 || Some(&existing_work_directory_abs_path)
1183 == update.new_work_directory_abs_path.as_ref()
1184 }) {
1185 if let Some(new_work_directory_abs_path) =
1186 update.new_work_directory_abs_path.clone()
1187 {
1188 existing.update(cx, |existing, cx| {
1189 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1190 existing.schedule_scan(updates_tx.clone(), cx);
1191 });
1192 } else {
1193 removed_ids.push(*id);
1194 }
1195 } else if let UpdatedGitRepository {
1196 new_work_directory_abs_path: Some(work_directory_abs_path),
1197 dot_git_abs_path: Some(dot_git_abs_path),
1198 repository_dir_abs_path: Some(repository_dir_abs_path),
1199 common_dir_abs_path: Some(common_dir_abs_path),
1200 ..
1201 } = update
1202 {
1203 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1204 let git_store = cx.weak_entity();
1205 let repo = cx.new(|cx| {
1206 let mut repo = Repository::local(
1207 id,
1208 work_directory_abs_path.clone(),
1209 dot_git_abs_path.clone(),
1210 repository_dir_abs_path.clone(),
1211 common_dir_abs_path.clone(),
1212 project_environment.downgrade(),
1213 fs.clone(),
1214 git_store,
1215 cx,
1216 );
1217 repo.schedule_scan(updates_tx.clone(), cx);
1218 repo
1219 });
1220 self._subscriptions
1221 .push(cx.subscribe(&repo, Self::on_repository_event));
1222 self._subscriptions
1223 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1224 self.repositories.insert(id, repo);
1225 cx.emit(GitStoreEvent::RepositoryAdded);
1226 self.active_repo_id.get_or_insert_with(|| {
1227 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1228 id
1229 });
1230 }
1231 }
1232
1233 for id in removed_ids {
1234 if self.active_repo_id == Some(id) {
1235 self.active_repo_id = None;
1236 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1237 }
1238 self.repositories.remove(&id);
1239 if let Some(updates_tx) = updates_tx.as_ref() {
1240 updates_tx
1241 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1242 .ok();
1243 }
1244 }
1245 }
1246
1247 fn on_buffer_store_event(
1248 &mut self,
1249 _: Entity<BufferStore>,
1250 event: &BufferStoreEvent,
1251 cx: &mut Context<Self>,
1252 ) {
1253 match event {
1254 BufferStoreEvent::BufferAdded(buffer) => {
1255 cx.subscribe(buffer, |this, buffer, event, cx| {
1256 if let BufferEvent::LanguageChanged = event {
1257 let buffer_id = buffer.read(cx).remote_id();
1258 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1259 diff_state.update(cx, |diff_state, cx| {
1260 diff_state.buffer_language_changed(buffer, cx);
1261 });
1262 }
1263 }
1264 })
1265 .detach();
1266 }
1267 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1268 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1269 diffs.remove(buffer_id);
1270 }
1271 }
1272 BufferStoreEvent::BufferDropped(buffer_id) => {
1273 self.diffs.remove(buffer_id);
1274 for diffs in self.shared_diffs.values_mut() {
1275 diffs.remove(buffer_id);
1276 }
1277 }
1278
1279 _ => {}
1280 }
1281 }
1282
1283 pub fn recalculate_buffer_diffs(
1284 &mut self,
1285 buffers: Vec<Entity<Buffer>>,
1286 cx: &mut Context<Self>,
1287 ) -> impl Future<Output = ()> + use<> {
1288 let mut futures = Vec::new();
1289 for buffer in buffers {
1290 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1291 let buffer = buffer.read(cx).text_snapshot();
1292 diff_state.update(cx, |diff_state, cx| {
1293 diff_state.recalculate_diffs(buffer.clone(), cx);
1294 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1295 });
1296 futures.push(diff_state.update(cx, |diff_state, cx| {
1297 diff_state
1298 .reparse_conflict_markers(buffer, cx)
1299 .map(|_| {})
1300 .boxed()
1301 }));
1302 }
1303 }
1304 async move {
1305 futures::future::join_all(futures).await;
1306 }
1307 }
1308
1309 fn on_buffer_diff_event(
1310 &mut self,
1311 diff: Entity<buffer_diff::BufferDiff>,
1312 event: &BufferDiffEvent,
1313 cx: &mut Context<Self>,
1314 ) {
1315 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1316 let buffer_id = diff.read(cx).buffer_id;
1317 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1318 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1319 diff_state.hunk_staging_operation_count += 1;
1320 diff_state.hunk_staging_operation_count
1321 });
1322 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1323 let recv = repo.update(cx, |repo, cx| {
1324 log::debug!("hunks changed for {}", path.as_unix_str());
1325 repo.spawn_set_index_text_job(
1326 path,
1327 new_index_text.as_ref().map(|rope| rope.to_string()),
1328 Some(hunk_staging_operation_count),
1329 cx,
1330 )
1331 });
1332 let diff = diff.downgrade();
1333 cx.spawn(async move |this, cx| {
1334 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1335 diff.update(cx, |diff, cx| {
1336 diff.clear_pending_hunks(cx);
1337 })
1338 .ok();
1339 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1340 .ok();
1341 }
1342 })
1343 .detach();
1344 }
1345 }
1346 }
1347 }
1348
1349 fn local_worktree_git_repos_changed(
1350 &mut self,
1351 worktree: Entity<Worktree>,
1352 changed_repos: &UpdatedGitRepositoriesSet,
1353 cx: &mut Context<Self>,
1354 ) {
1355 log::debug!("local worktree repos changed");
1356 debug_assert!(worktree.read(cx).is_local());
1357
1358 for repository in self.repositories.values() {
1359 repository.update(cx, |repository, cx| {
1360 let repo_abs_path = &repository.work_directory_abs_path;
1361 if changed_repos.iter().any(|update| {
1362 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1363 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1364 }) {
1365 repository.reload_buffer_diff_bases(cx);
1366 }
1367 });
1368 }
1369 }
1370
1371 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1372 &self.repositories
1373 }
1374
1375 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1376 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1377 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1378 Some(status.status)
1379 }
1380
1381 pub fn repository_and_path_for_buffer_id(
1382 &self,
1383 buffer_id: BufferId,
1384 cx: &App,
1385 ) -> Option<(Entity<Repository>, RepoPath)> {
1386 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1387 let project_path = buffer.read(cx).project_path(cx)?;
1388 self.repository_and_path_for_project_path(&project_path, cx)
1389 }
1390
1391 pub fn repository_and_path_for_project_path(
1392 &self,
1393 path: &ProjectPath,
1394 cx: &App,
1395 ) -> Option<(Entity<Repository>, RepoPath)> {
1396 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1397 self.repositories
1398 .values()
1399 .filter_map(|repo| {
1400 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1401 Some((repo.clone(), repo_path))
1402 })
1403 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1404 }
1405
1406 pub fn git_init(
1407 &self,
1408 path: Arc<Path>,
1409 fallback_branch_name: String,
1410 cx: &App,
1411 ) -> Task<Result<()>> {
1412 match &self.state {
1413 GitStoreState::Local { fs, .. } => {
1414 let fs = fs.clone();
1415 cx.background_executor()
1416 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1417 }
1418 GitStoreState::Remote {
1419 upstream_client,
1420 upstream_project_id: project_id,
1421 ..
1422 } => {
1423 let client = upstream_client.clone();
1424 let project_id = *project_id;
1425 cx.background_executor().spawn(async move {
1426 client
1427 .request(proto::GitInit {
1428 project_id: project_id,
1429 abs_path: path.to_string_lossy().into_owned(),
1430 fallback_branch_name,
1431 })
1432 .await?;
1433 Ok(())
1434 })
1435 }
1436 }
1437 }
1438
1439 pub fn git_clone(
1440 &self,
1441 repo: String,
1442 path: impl Into<Arc<std::path::Path>>,
1443 cx: &App,
1444 ) -> Task<Result<()>> {
1445 let path = path.into();
1446 match &self.state {
1447 GitStoreState::Local { fs, .. } => {
1448 let fs = fs.clone();
1449 cx.background_executor()
1450 .spawn(async move { fs.git_clone(&repo, &path).await })
1451 }
1452 GitStoreState::Remote {
1453 upstream_client,
1454 upstream_project_id,
1455 ..
1456 } => {
1457 if upstream_client.is_via_collab() {
1458 return Task::ready(Err(anyhow!(
1459 "Git Clone isn't supported for project guests"
1460 )));
1461 }
1462 let request = upstream_client.request(proto::GitClone {
1463 project_id: *upstream_project_id,
1464 abs_path: path.to_string_lossy().into_owned(),
1465 remote_repo: repo,
1466 });
1467
1468 cx.background_spawn(async move {
1469 let result = request.await?;
1470
1471 match result.success {
1472 true => Ok(()),
1473 false => Err(anyhow!("Git Clone failed")),
1474 }
1475 })
1476 }
1477 }
1478 }
1479
1480 async fn handle_update_repository(
1481 this: Entity<Self>,
1482 envelope: TypedEnvelope<proto::UpdateRepository>,
1483 mut cx: AsyncApp,
1484 ) -> Result<()> {
1485 this.update(&mut cx, |this, cx| {
1486 let path_style = this.worktree_store.read(cx).path_style();
1487 let mut update = envelope.payload;
1488
1489 let id = RepositoryId::from_proto(update.id);
1490 let client = this.upstream_client().context("no upstream client")?;
1491
1492 let mut repo_subscription = None;
1493 let repo = this.repositories.entry(id).or_insert_with(|| {
1494 let git_store = cx.weak_entity();
1495 let repo = cx.new(|cx| {
1496 Repository::remote(
1497 id,
1498 Path::new(&update.abs_path).into(),
1499 path_style,
1500 ProjectId(update.project_id),
1501 client,
1502 git_store,
1503 cx,
1504 )
1505 });
1506 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1507 cx.emit(GitStoreEvent::RepositoryAdded);
1508 repo
1509 });
1510 this._subscriptions.extend(repo_subscription);
1511
1512 repo.update(cx, {
1513 let update = update.clone();
1514 |repo, cx| repo.apply_remote_update(update, cx)
1515 })?;
1516
1517 this.active_repo_id.get_or_insert_with(|| {
1518 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1519 id
1520 });
1521
1522 if let Some((client, project_id)) = this.downstream_client() {
1523 update.project_id = project_id.to_proto();
1524 client.send(update).log_err();
1525 }
1526 Ok(())
1527 })?
1528 }
1529
1530 async fn handle_remove_repository(
1531 this: Entity<Self>,
1532 envelope: TypedEnvelope<proto::RemoveRepository>,
1533 mut cx: AsyncApp,
1534 ) -> Result<()> {
1535 this.update(&mut cx, |this, cx| {
1536 let mut update = envelope.payload;
1537 let id = RepositoryId::from_proto(update.id);
1538 this.repositories.remove(&id);
1539 if let Some((client, project_id)) = this.downstream_client() {
1540 update.project_id = project_id.to_proto();
1541 client.send(update).log_err();
1542 }
1543 if this.active_repo_id == Some(id) {
1544 this.active_repo_id = None;
1545 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1546 }
1547 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1548 })
1549 }
1550
1551 async fn handle_git_init(
1552 this: Entity<Self>,
1553 envelope: TypedEnvelope<proto::GitInit>,
1554 cx: AsyncApp,
1555 ) -> Result<proto::Ack> {
1556 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1557 let name = envelope.payload.fallback_branch_name;
1558 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1559 .await?;
1560
1561 Ok(proto::Ack {})
1562 }
1563
1564 async fn handle_git_clone(
1565 this: Entity<Self>,
1566 envelope: TypedEnvelope<proto::GitClone>,
1567 cx: AsyncApp,
1568 ) -> Result<proto::GitCloneResponse> {
1569 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1570 let repo_name = envelope.payload.remote_repo;
1571 let result = cx
1572 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1573 .await;
1574
1575 Ok(proto::GitCloneResponse {
1576 success: result.is_ok(),
1577 })
1578 }
1579
1580 async fn handle_fetch(
1581 this: Entity<Self>,
1582 envelope: TypedEnvelope<proto::Fetch>,
1583 mut cx: AsyncApp,
1584 ) -> Result<proto::RemoteMessageResponse> {
1585 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1586 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1587 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1588 let askpass_id = envelope.payload.askpass_id;
1589
1590 let askpass = make_remote_delegate(
1591 this,
1592 envelope.payload.project_id,
1593 repository_id,
1594 askpass_id,
1595 &mut cx,
1596 );
1597
1598 let remote_output = repository_handle
1599 .update(&mut cx, |repository_handle, cx| {
1600 repository_handle.fetch(fetch_options, askpass, cx)
1601 })?
1602 .await??;
1603
1604 Ok(proto::RemoteMessageResponse {
1605 stdout: remote_output.stdout,
1606 stderr: remote_output.stderr,
1607 })
1608 }
1609
1610 async fn handle_push(
1611 this: Entity<Self>,
1612 envelope: TypedEnvelope<proto::Push>,
1613 mut cx: AsyncApp,
1614 ) -> Result<proto::RemoteMessageResponse> {
1615 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1616 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1617
1618 let askpass_id = envelope.payload.askpass_id;
1619 let askpass = make_remote_delegate(
1620 this,
1621 envelope.payload.project_id,
1622 repository_id,
1623 askpass_id,
1624 &mut cx,
1625 );
1626
1627 let options = envelope
1628 .payload
1629 .options
1630 .as_ref()
1631 .map(|_| match envelope.payload.options() {
1632 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1633 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1634 });
1635
1636 let branch_name = envelope.payload.branch_name.into();
1637 let remote_name = envelope.payload.remote_name.into();
1638
1639 let remote_output = repository_handle
1640 .update(&mut cx, |repository_handle, cx| {
1641 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1642 })?
1643 .await??;
1644 Ok(proto::RemoteMessageResponse {
1645 stdout: remote_output.stdout,
1646 stderr: remote_output.stderr,
1647 })
1648 }
1649
1650 async fn handle_pull(
1651 this: Entity<Self>,
1652 envelope: TypedEnvelope<proto::Pull>,
1653 mut cx: AsyncApp,
1654 ) -> Result<proto::RemoteMessageResponse> {
1655 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1656 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1657 let askpass_id = envelope.payload.askpass_id;
1658 let askpass = make_remote_delegate(
1659 this,
1660 envelope.payload.project_id,
1661 repository_id,
1662 askpass_id,
1663 &mut cx,
1664 );
1665
1666 let branch_name = envelope.payload.branch_name.into();
1667 let remote_name = envelope.payload.remote_name.into();
1668
1669 let remote_message = repository_handle
1670 .update(&mut cx, |repository_handle, cx| {
1671 repository_handle.pull(branch_name, remote_name, askpass, cx)
1672 })?
1673 .await??;
1674
1675 Ok(proto::RemoteMessageResponse {
1676 stdout: remote_message.stdout,
1677 stderr: remote_message.stderr,
1678 })
1679 }
1680
1681 async fn handle_stage(
1682 this: Entity<Self>,
1683 envelope: TypedEnvelope<proto::Stage>,
1684 mut cx: AsyncApp,
1685 ) -> Result<proto::Ack> {
1686 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1687 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1688
1689 let entries = envelope
1690 .payload
1691 .paths
1692 .into_iter()
1693 .map(|path| RepoPath::new(&path))
1694 .collect::<Result<Vec<_>>>()?;
1695
1696 repository_handle
1697 .update(&mut cx, |repository_handle, cx| {
1698 repository_handle.stage_entries(entries, cx)
1699 })?
1700 .await?;
1701 Ok(proto::Ack {})
1702 }
1703
1704 async fn handle_unstage(
1705 this: Entity<Self>,
1706 envelope: TypedEnvelope<proto::Unstage>,
1707 mut cx: AsyncApp,
1708 ) -> Result<proto::Ack> {
1709 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1710 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1711
1712 let entries = envelope
1713 .payload
1714 .paths
1715 .into_iter()
1716 .map(|path| RepoPath::new(&path))
1717 .collect::<Result<Vec<_>>>()?;
1718
1719 repository_handle
1720 .update(&mut cx, |repository_handle, cx| {
1721 repository_handle.unstage_entries(entries, cx)
1722 })?
1723 .await?;
1724
1725 Ok(proto::Ack {})
1726 }
1727
1728 async fn handle_stash(
1729 this: Entity<Self>,
1730 envelope: TypedEnvelope<proto::Stash>,
1731 mut cx: AsyncApp,
1732 ) -> Result<proto::Ack> {
1733 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1734 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1735
1736 let entries = envelope
1737 .payload
1738 .paths
1739 .into_iter()
1740 .map(|path| RepoPath::new(&path))
1741 .collect::<Result<Vec<_>>>()?;
1742
1743 repository_handle
1744 .update(&mut cx, |repository_handle, cx| {
1745 repository_handle.stash_entries(entries, cx)
1746 })?
1747 .await?;
1748
1749 Ok(proto::Ack {})
1750 }
1751
1752 async fn handle_stash_pop(
1753 this: Entity<Self>,
1754 envelope: TypedEnvelope<proto::StashPop>,
1755 mut cx: AsyncApp,
1756 ) -> Result<proto::Ack> {
1757 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1758 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1759 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1760
1761 repository_handle
1762 .update(&mut cx, |repository_handle, cx| {
1763 repository_handle.stash_pop(stash_index, cx)
1764 })?
1765 .await?;
1766
1767 Ok(proto::Ack {})
1768 }
1769
1770 async fn handle_stash_apply(
1771 this: Entity<Self>,
1772 envelope: TypedEnvelope<proto::StashApply>,
1773 mut cx: AsyncApp,
1774 ) -> Result<proto::Ack> {
1775 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1776 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1777 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1778
1779 repository_handle
1780 .update(&mut cx, |repository_handle, cx| {
1781 repository_handle.stash_apply(stash_index, cx)
1782 })?
1783 .await?;
1784
1785 Ok(proto::Ack {})
1786 }
1787
1788 async fn handle_stash_drop(
1789 this: Entity<Self>,
1790 envelope: TypedEnvelope<proto::StashDrop>,
1791 mut cx: AsyncApp,
1792 ) -> Result<proto::Ack> {
1793 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1794 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1795 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1796
1797 repository_handle
1798 .update(&mut cx, |repository_handle, cx| {
1799 repository_handle.stash_drop(stash_index, cx)
1800 })?
1801 .await??;
1802
1803 Ok(proto::Ack {})
1804 }
1805
1806 async fn handle_set_index_text(
1807 this: Entity<Self>,
1808 envelope: TypedEnvelope<proto::SetIndexText>,
1809 mut cx: AsyncApp,
1810 ) -> Result<proto::Ack> {
1811 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1812 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1813 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1814
1815 repository_handle
1816 .update(&mut cx, |repository_handle, cx| {
1817 repository_handle.spawn_set_index_text_job(
1818 repo_path,
1819 envelope.payload.text,
1820 None,
1821 cx,
1822 )
1823 })?
1824 .await??;
1825 Ok(proto::Ack {})
1826 }
1827
1828 async fn handle_commit(
1829 this: Entity<Self>,
1830 envelope: TypedEnvelope<proto::Commit>,
1831 mut cx: AsyncApp,
1832 ) -> Result<proto::Ack> {
1833 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1834 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1835
1836 let message = SharedString::from(envelope.payload.message);
1837 let name = envelope.payload.name.map(SharedString::from);
1838 let email = envelope.payload.email.map(SharedString::from);
1839 let options = envelope.payload.options.unwrap_or_default();
1840
1841 repository_handle
1842 .update(&mut cx, |repository_handle, cx| {
1843 repository_handle.commit(
1844 message,
1845 name.zip(email),
1846 CommitOptions {
1847 amend: options.amend,
1848 signoff: options.signoff,
1849 },
1850 cx,
1851 )
1852 })?
1853 .await??;
1854 Ok(proto::Ack {})
1855 }
1856
1857 async fn handle_get_remotes(
1858 this: Entity<Self>,
1859 envelope: TypedEnvelope<proto::GetRemotes>,
1860 mut cx: AsyncApp,
1861 ) -> Result<proto::GetRemotesResponse> {
1862 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1863 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1864
1865 let branch_name = envelope.payload.branch_name;
1866
1867 let remotes = repository_handle
1868 .update(&mut cx, |repository_handle, _| {
1869 repository_handle.get_remotes(branch_name)
1870 })?
1871 .await??;
1872
1873 Ok(proto::GetRemotesResponse {
1874 remotes: remotes
1875 .into_iter()
1876 .map(|remotes| proto::get_remotes_response::Remote {
1877 name: remotes.name.to_string(),
1878 })
1879 .collect::<Vec<_>>(),
1880 })
1881 }
1882
1883 async fn handle_get_branches(
1884 this: Entity<Self>,
1885 envelope: TypedEnvelope<proto::GitGetBranches>,
1886 mut cx: AsyncApp,
1887 ) -> Result<proto::GitBranchesResponse> {
1888 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1889 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1890
1891 let branches = repository_handle
1892 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1893 .await??;
1894
1895 Ok(proto::GitBranchesResponse {
1896 branches: branches
1897 .into_iter()
1898 .map(|branch| branch_to_proto(&branch))
1899 .collect::<Vec<_>>(),
1900 })
1901 }
1902 async fn handle_get_default_branch(
1903 this: Entity<Self>,
1904 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1905 mut cx: AsyncApp,
1906 ) -> Result<proto::GetDefaultBranchResponse> {
1907 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1908 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1909
1910 let branch = repository_handle
1911 .update(&mut cx, |repository_handle, _| {
1912 repository_handle.default_branch()
1913 })?
1914 .await??
1915 .map(Into::into);
1916
1917 Ok(proto::GetDefaultBranchResponse { branch })
1918 }
1919 async fn handle_create_branch(
1920 this: Entity<Self>,
1921 envelope: TypedEnvelope<proto::GitCreateBranch>,
1922 mut cx: AsyncApp,
1923 ) -> Result<proto::Ack> {
1924 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1925 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1926 let branch_name = envelope.payload.branch_name;
1927
1928 repository_handle
1929 .update(&mut cx, |repository_handle, _| {
1930 repository_handle.create_branch(branch_name)
1931 })?
1932 .await??;
1933
1934 Ok(proto::Ack {})
1935 }
1936
1937 async fn handle_change_branch(
1938 this: Entity<Self>,
1939 envelope: TypedEnvelope<proto::GitChangeBranch>,
1940 mut cx: AsyncApp,
1941 ) -> Result<proto::Ack> {
1942 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1943 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1944 let branch_name = envelope.payload.branch_name;
1945
1946 repository_handle
1947 .update(&mut cx, |repository_handle, _| {
1948 repository_handle.change_branch(branch_name)
1949 })?
1950 .await??;
1951
1952 Ok(proto::Ack {})
1953 }
1954
1955 async fn handle_rename_branch(
1956 this: Entity<Self>,
1957 envelope: TypedEnvelope<proto::GitRenameBranch>,
1958 mut cx: AsyncApp,
1959 ) -> Result<proto::Ack> {
1960 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1961 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1962 let branch = envelope.payload.branch;
1963 let new_name = envelope.payload.new_name;
1964
1965 repository_handle
1966 .update(&mut cx, |repository_handle, _| {
1967 repository_handle.rename_branch(branch, new_name)
1968 })?
1969 .await??;
1970
1971 Ok(proto::Ack {})
1972 }
1973
1974 async fn handle_show(
1975 this: Entity<Self>,
1976 envelope: TypedEnvelope<proto::GitShow>,
1977 mut cx: AsyncApp,
1978 ) -> Result<proto::GitCommitDetails> {
1979 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1980 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1981
1982 let commit = repository_handle
1983 .update(&mut cx, |repository_handle, _| {
1984 repository_handle.show(envelope.payload.commit)
1985 })?
1986 .await??;
1987 Ok(proto::GitCommitDetails {
1988 sha: commit.sha.into(),
1989 message: commit.message.into(),
1990 commit_timestamp: commit.commit_timestamp,
1991 author_email: commit.author_email.into(),
1992 author_name: commit.author_name.into(),
1993 })
1994 }
1995
1996 async fn handle_load_commit_diff(
1997 this: Entity<Self>,
1998 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1999 mut cx: AsyncApp,
2000 ) -> Result<proto::LoadCommitDiffResponse> {
2001 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2002 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2003
2004 let commit_diff = repository_handle
2005 .update(&mut cx, |repository_handle, _| {
2006 repository_handle.load_commit_diff(envelope.payload.commit)
2007 })?
2008 .await??;
2009 Ok(proto::LoadCommitDiffResponse {
2010 files: commit_diff
2011 .files
2012 .into_iter()
2013 .map(|file| proto::CommitFile {
2014 path: file.path.to_proto(),
2015 old_text: file.old_text,
2016 new_text: file.new_text,
2017 })
2018 .collect(),
2019 })
2020 }
2021
2022 async fn handle_reset(
2023 this: Entity<Self>,
2024 envelope: TypedEnvelope<proto::GitReset>,
2025 mut cx: AsyncApp,
2026 ) -> Result<proto::Ack> {
2027 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2028 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2029
2030 let mode = match envelope.payload.mode() {
2031 git_reset::ResetMode::Soft => ResetMode::Soft,
2032 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2033 };
2034
2035 repository_handle
2036 .update(&mut cx, |repository_handle, cx| {
2037 repository_handle.reset(envelope.payload.commit, mode, cx)
2038 })?
2039 .await??;
2040 Ok(proto::Ack {})
2041 }
2042
2043 async fn handle_checkout_files(
2044 this: Entity<Self>,
2045 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2046 mut cx: AsyncApp,
2047 ) -> Result<proto::Ack> {
2048 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2049 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2050 let paths = envelope
2051 .payload
2052 .paths
2053 .iter()
2054 .map(|s| RepoPath::from_proto(s))
2055 .collect::<Result<Vec<_>>>()?;
2056
2057 repository_handle
2058 .update(&mut cx, |repository_handle, cx| {
2059 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2060 })?
2061 .await??;
2062 Ok(proto::Ack {})
2063 }
2064
2065 async fn handle_open_commit_message_buffer(
2066 this: Entity<Self>,
2067 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2068 mut cx: AsyncApp,
2069 ) -> Result<proto::OpenBufferResponse> {
2070 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2071 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2072 let buffer = repository
2073 .update(&mut cx, |repository, cx| {
2074 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2075 })?
2076 .await?;
2077
2078 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2079 this.update(&mut cx, |this, cx| {
2080 this.buffer_store.update(cx, |buffer_store, cx| {
2081 buffer_store
2082 .create_buffer_for_peer(
2083 &buffer,
2084 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2085 cx,
2086 )
2087 .detach_and_log_err(cx);
2088 })
2089 })?;
2090
2091 Ok(proto::OpenBufferResponse {
2092 buffer_id: buffer_id.to_proto(),
2093 })
2094 }
2095
2096 async fn handle_askpass(
2097 this: Entity<Self>,
2098 envelope: TypedEnvelope<proto::AskPassRequest>,
2099 mut cx: AsyncApp,
2100 ) -> Result<proto::AskPassResponse> {
2101 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2102 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2103
2104 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2105 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2106 debug_panic!("no askpass found");
2107 anyhow::bail!("no askpass found");
2108 };
2109
2110 let response = askpass
2111 .ask_password(envelope.payload.prompt)
2112 .await
2113 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2114
2115 delegates
2116 .lock()
2117 .insert(envelope.payload.askpass_id, askpass);
2118
2119 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2120 Ok(proto::AskPassResponse {
2121 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2122 })
2123 }
2124
2125 async fn handle_check_for_pushed_commits(
2126 this: Entity<Self>,
2127 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2128 mut cx: AsyncApp,
2129 ) -> Result<proto::CheckForPushedCommitsResponse> {
2130 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2131 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2132
2133 let branches = repository_handle
2134 .update(&mut cx, |repository_handle, _| {
2135 repository_handle.check_for_pushed_commits()
2136 })?
2137 .await??;
2138 Ok(proto::CheckForPushedCommitsResponse {
2139 pushed_to: branches
2140 .into_iter()
2141 .map(|commit| commit.to_string())
2142 .collect(),
2143 })
2144 }
2145
2146 async fn handle_git_diff(
2147 this: Entity<Self>,
2148 envelope: TypedEnvelope<proto::GitDiff>,
2149 mut cx: AsyncApp,
2150 ) -> Result<proto::GitDiffResponse> {
2151 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2152 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2153 let diff_type = match envelope.payload.diff_type() {
2154 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2155 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2156 };
2157
2158 let mut diff = repository_handle
2159 .update(&mut cx, |repository_handle, cx| {
2160 repository_handle.diff(diff_type, cx)
2161 })?
2162 .await??;
2163 const ONE_MB: usize = 1_000_000;
2164 if diff.len() > ONE_MB {
2165 diff = diff.chars().take(ONE_MB).collect()
2166 }
2167
2168 Ok(proto::GitDiffResponse { diff })
2169 }
2170
2171 async fn handle_open_unstaged_diff(
2172 this: Entity<Self>,
2173 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2174 mut cx: AsyncApp,
2175 ) -> Result<proto::OpenUnstagedDiffResponse> {
2176 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2177 let diff = this
2178 .update(&mut cx, |this, cx| {
2179 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2180 Some(this.open_unstaged_diff(buffer, cx))
2181 })?
2182 .context("missing buffer")?
2183 .await?;
2184 this.update(&mut cx, |this, _| {
2185 let shared_diffs = this
2186 .shared_diffs
2187 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2188 .or_default();
2189 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2190 })?;
2191 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2192 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2193 }
2194
2195 async fn handle_open_uncommitted_diff(
2196 this: Entity<Self>,
2197 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2198 mut cx: AsyncApp,
2199 ) -> Result<proto::OpenUncommittedDiffResponse> {
2200 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2201 let diff = this
2202 .update(&mut cx, |this, cx| {
2203 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2204 Some(this.open_uncommitted_diff(buffer, cx))
2205 })?
2206 .context("missing buffer")?
2207 .await?;
2208 this.update(&mut cx, |this, _| {
2209 let shared_diffs = this
2210 .shared_diffs
2211 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2212 .or_default();
2213 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2214 })?;
2215 diff.read_with(&cx, |diff, cx| {
2216 use proto::open_uncommitted_diff_response::Mode;
2217
2218 let unstaged_diff = diff.secondary_diff();
2219 let index_snapshot = unstaged_diff.and_then(|diff| {
2220 let diff = diff.read(cx);
2221 diff.base_text_exists().then(|| diff.base_text())
2222 });
2223
2224 let mode;
2225 let staged_text;
2226 let committed_text;
2227 if diff.base_text_exists() {
2228 let committed_snapshot = diff.base_text();
2229 committed_text = Some(committed_snapshot.text());
2230 if let Some(index_text) = index_snapshot {
2231 if index_text.remote_id() == committed_snapshot.remote_id() {
2232 mode = Mode::IndexMatchesHead;
2233 staged_text = None;
2234 } else {
2235 mode = Mode::IndexAndHead;
2236 staged_text = Some(index_text.text());
2237 }
2238 } else {
2239 mode = Mode::IndexAndHead;
2240 staged_text = None;
2241 }
2242 } else {
2243 mode = Mode::IndexAndHead;
2244 committed_text = None;
2245 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2246 }
2247
2248 proto::OpenUncommittedDiffResponse {
2249 committed_text,
2250 staged_text,
2251 mode: mode.into(),
2252 }
2253 })
2254 }
2255
2256 async fn handle_update_diff_bases(
2257 this: Entity<Self>,
2258 request: TypedEnvelope<proto::UpdateDiffBases>,
2259 mut cx: AsyncApp,
2260 ) -> Result<()> {
2261 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2262 this.update(&mut cx, |this, cx| {
2263 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2264 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2265 {
2266 let buffer = buffer.read(cx).text_snapshot();
2267 diff_state.update(cx, |diff_state, cx| {
2268 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2269 })
2270 }
2271 })
2272 }
2273
2274 async fn handle_blame_buffer(
2275 this: Entity<Self>,
2276 envelope: TypedEnvelope<proto::BlameBuffer>,
2277 mut cx: AsyncApp,
2278 ) -> Result<proto::BlameBufferResponse> {
2279 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2280 let version = deserialize_version(&envelope.payload.version);
2281 let buffer = this.read_with(&cx, |this, cx| {
2282 this.buffer_store.read(cx).get_existing(buffer_id)
2283 })??;
2284 buffer
2285 .update(&mut cx, |buffer, _| {
2286 buffer.wait_for_version(version.clone())
2287 })?
2288 .await?;
2289 let blame = this
2290 .update(&mut cx, |this, cx| {
2291 this.blame_buffer(&buffer, Some(version), cx)
2292 })?
2293 .await?;
2294 Ok(serialize_blame_buffer_response(blame))
2295 }
2296
2297 async fn handle_get_permalink_to_line(
2298 this: Entity<Self>,
2299 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2300 mut cx: AsyncApp,
2301 ) -> Result<proto::GetPermalinkToLineResponse> {
2302 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2303 // let version = deserialize_version(&envelope.payload.version);
2304 let selection = {
2305 let proto_selection = envelope
2306 .payload
2307 .selection
2308 .context("no selection to get permalink for defined")?;
2309 proto_selection.start as u32..proto_selection.end as u32
2310 };
2311 let buffer = this.read_with(&cx, |this, cx| {
2312 this.buffer_store.read(cx).get_existing(buffer_id)
2313 })??;
2314 let permalink = this
2315 .update(&mut cx, |this, cx| {
2316 this.get_permalink_to_line(&buffer, selection, cx)
2317 })?
2318 .await?;
2319 Ok(proto::GetPermalinkToLineResponse {
2320 permalink: permalink.to_string(),
2321 })
2322 }
2323
2324 fn repository_for_request(
2325 this: &Entity<Self>,
2326 id: RepositoryId,
2327 cx: &mut AsyncApp,
2328 ) -> Result<Entity<Repository>> {
2329 this.read_with(cx, |this, _| {
2330 this.repositories
2331 .get(&id)
2332 .context("missing repository handle")
2333 .cloned()
2334 })?
2335 }
2336
2337 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2338 self.repositories
2339 .iter()
2340 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2341 .collect()
2342 }
2343
2344 fn process_updated_entries(
2345 &self,
2346 worktree: &Entity<Worktree>,
2347 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2348 cx: &mut App,
2349 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2350 let path_style = worktree.read(cx).path_style();
2351 let mut repo_paths = self
2352 .repositories
2353 .values()
2354 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2355 .collect::<Vec<_>>();
2356 let mut entries: Vec<_> = updated_entries
2357 .iter()
2358 .map(|(path, _, _)| path.clone())
2359 .collect();
2360 entries.sort();
2361 let worktree = worktree.read(cx);
2362
2363 let entries = entries
2364 .into_iter()
2365 .map(|path| worktree.absolutize(&path))
2366 .collect::<Arc<[_]>>();
2367
2368 let executor = cx.background_executor().clone();
2369 cx.background_executor().spawn(async move {
2370 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2371 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2372 let mut tasks = FuturesOrdered::new();
2373 for (repo_path, repo) in repo_paths.into_iter().rev() {
2374 let entries = entries.clone();
2375 let task = executor.spawn(async move {
2376 // Find all repository paths that belong to this repo
2377 let mut ix = entries.partition_point(|path| path < &*repo_path);
2378 if ix == entries.len() {
2379 return None;
2380 };
2381
2382 let mut paths = Vec::new();
2383 // All paths prefixed by a given repo will constitute a continuous range.
2384 while let Some(path) = entries.get(ix)
2385 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2386 &repo_path, path, path_style,
2387 )
2388 {
2389 paths.push((repo_path, ix));
2390 ix += 1;
2391 }
2392 if paths.is_empty() {
2393 None
2394 } else {
2395 Some((repo, paths))
2396 }
2397 });
2398 tasks.push_back(task);
2399 }
2400
2401 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2402 let mut path_was_used = vec![false; entries.len()];
2403 let tasks = tasks.collect::<Vec<_>>().await;
2404 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2405 // We always want to assign a path to it's innermost repository.
2406 for t in tasks {
2407 let Some((repo, paths)) = t else {
2408 continue;
2409 };
2410 let entry = paths_by_git_repo.entry(repo).or_default();
2411 for (repo_path, ix) in paths {
2412 if path_was_used[ix] {
2413 continue;
2414 }
2415 path_was_used[ix] = true;
2416 entry.push(repo_path);
2417 }
2418 }
2419
2420 paths_by_git_repo
2421 })
2422 }
2423}
2424
2425impl BufferGitState {
2426 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2427 Self {
2428 unstaged_diff: Default::default(),
2429 uncommitted_diff: Default::default(),
2430 recalculate_diff_task: Default::default(),
2431 language: Default::default(),
2432 language_registry: Default::default(),
2433 recalculating_tx: postage::watch::channel_with(false).0,
2434 hunk_staging_operation_count: 0,
2435 hunk_staging_operation_count_as_of_write: 0,
2436 head_text: Default::default(),
2437 index_text: Default::default(),
2438 head_changed: Default::default(),
2439 index_changed: Default::default(),
2440 language_changed: Default::default(),
2441 conflict_updated_futures: Default::default(),
2442 conflict_set: Default::default(),
2443 reparse_conflict_markers_task: Default::default(),
2444 }
2445 }
2446
2447 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2448 self.language = buffer.read(cx).language().cloned();
2449 self.language_changed = true;
2450 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2451 }
2452
2453 fn reparse_conflict_markers(
2454 &mut self,
2455 buffer: text::BufferSnapshot,
2456 cx: &mut Context<Self>,
2457 ) -> oneshot::Receiver<()> {
2458 let (tx, rx) = oneshot::channel();
2459
2460 let Some(conflict_set) = self
2461 .conflict_set
2462 .as_ref()
2463 .and_then(|conflict_set| conflict_set.upgrade())
2464 else {
2465 return rx;
2466 };
2467
2468 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2469 if conflict_set.has_conflict {
2470 Some(conflict_set.snapshot())
2471 } else {
2472 None
2473 }
2474 });
2475
2476 if let Some(old_snapshot) = old_snapshot {
2477 self.conflict_updated_futures.push(tx);
2478 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2479 let (snapshot, changed_range) = cx
2480 .background_spawn(async move {
2481 let new_snapshot = ConflictSet::parse(&buffer);
2482 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2483 (new_snapshot, changed_range)
2484 })
2485 .await;
2486 this.update(cx, |this, cx| {
2487 if let Some(conflict_set) = &this.conflict_set {
2488 conflict_set
2489 .update(cx, |conflict_set, cx| {
2490 conflict_set.set_snapshot(snapshot, changed_range, cx);
2491 })
2492 .ok();
2493 }
2494 let futures = std::mem::take(&mut this.conflict_updated_futures);
2495 for tx in futures {
2496 tx.send(()).ok();
2497 }
2498 })
2499 }))
2500 }
2501
2502 rx
2503 }
2504
2505 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2506 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2507 }
2508
2509 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2510 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2511 }
2512
2513 fn handle_base_texts_updated(
2514 &mut self,
2515 buffer: text::BufferSnapshot,
2516 message: proto::UpdateDiffBases,
2517 cx: &mut Context<Self>,
2518 ) {
2519 use proto::update_diff_bases::Mode;
2520
2521 let Some(mode) = Mode::from_i32(message.mode) else {
2522 return;
2523 };
2524
2525 let diff_bases_change = match mode {
2526 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2527 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2528 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2529 Mode::IndexAndHead => DiffBasesChange::SetEach {
2530 index: message.staged_text,
2531 head: message.committed_text,
2532 },
2533 };
2534
2535 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2536 }
2537
2538 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2539 if *self.recalculating_tx.borrow() {
2540 let mut rx = self.recalculating_tx.subscribe();
2541 Some(async move {
2542 loop {
2543 let is_recalculating = rx.recv().await;
2544 if is_recalculating != Some(true) {
2545 break;
2546 }
2547 }
2548 })
2549 } else {
2550 None
2551 }
2552 }
2553
2554 fn diff_bases_changed(
2555 &mut self,
2556 buffer: text::BufferSnapshot,
2557 diff_bases_change: Option<DiffBasesChange>,
2558 cx: &mut Context<Self>,
2559 ) {
2560 match diff_bases_change {
2561 Some(DiffBasesChange::SetIndex(index)) => {
2562 self.index_text = index.map(|mut index| {
2563 text::LineEnding::normalize(&mut index);
2564 Arc::new(index)
2565 });
2566 self.index_changed = true;
2567 }
2568 Some(DiffBasesChange::SetHead(head)) => {
2569 self.head_text = head.map(|mut head| {
2570 text::LineEnding::normalize(&mut head);
2571 Arc::new(head)
2572 });
2573 self.head_changed = true;
2574 }
2575 Some(DiffBasesChange::SetBoth(text)) => {
2576 let text = text.map(|mut text| {
2577 text::LineEnding::normalize(&mut text);
2578 Arc::new(text)
2579 });
2580 self.head_text = text.clone();
2581 self.index_text = text;
2582 self.head_changed = true;
2583 self.index_changed = true;
2584 }
2585 Some(DiffBasesChange::SetEach { index, head }) => {
2586 self.index_text = index.map(|mut index| {
2587 text::LineEnding::normalize(&mut index);
2588 Arc::new(index)
2589 });
2590 self.index_changed = true;
2591 self.head_text = head.map(|mut head| {
2592 text::LineEnding::normalize(&mut head);
2593 Arc::new(head)
2594 });
2595 self.head_changed = true;
2596 }
2597 None => {}
2598 }
2599
2600 self.recalculate_diffs(buffer, cx)
2601 }
2602
2603 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2604 *self.recalculating_tx.borrow_mut() = true;
2605
2606 let language = self.language.clone();
2607 let language_registry = self.language_registry.clone();
2608 let unstaged_diff = self.unstaged_diff();
2609 let uncommitted_diff = self.uncommitted_diff();
2610 let head = self.head_text.clone();
2611 let index = self.index_text.clone();
2612 let index_changed = self.index_changed;
2613 let head_changed = self.head_changed;
2614 let language_changed = self.language_changed;
2615 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2616 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2617 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2618 (None, None) => true,
2619 _ => false,
2620 };
2621 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2622 log::debug!(
2623 "start recalculating diffs for buffer {}",
2624 buffer.remote_id()
2625 );
2626
2627 let mut new_unstaged_diff = None;
2628 if let Some(unstaged_diff) = &unstaged_diff {
2629 new_unstaged_diff = Some(
2630 BufferDiff::update_diff(
2631 unstaged_diff.clone(),
2632 buffer.clone(),
2633 index,
2634 index_changed,
2635 language_changed,
2636 language.clone(),
2637 language_registry.clone(),
2638 cx,
2639 )
2640 .await?,
2641 );
2642 }
2643
2644 let mut new_uncommitted_diff = None;
2645 if let Some(uncommitted_diff) = &uncommitted_diff {
2646 new_uncommitted_diff = if index_matches_head {
2647 new_unstaged_diff.clone()
2648 } else {
2649 Some(
2650 BufferDiff::update_diff(
2651 uncommitted_diff.clone(),
2652 buffer.clone(),
2653 head,
2654 head_changed,
2655 language_changed,
2656 language.clone(),
2657 language_registry.clone(),
2658 cx,
2659 )
2660 .await?,
2661 )
2662 }
2663 }
2664
2665 let cancel = this.update(cx, |this, _| {
2666 // This checks whether all pending stage/unstage operations
2667 // have quiesced (i.e. both the corresponding write and the
2668 // read of that write have completed). If not, then we cancel
2669 // this recalculation attempt to avoid invalidating pending
2670 // state too quickly; another recalculation will come along
2671 // later and clear the pending state once the state of the index has settled.
2672 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2673 *this.recalculating_tx.borrow_mut() = false;
2674 true
2675 } else {
2676 false
2677 }
2678 })?;
2679 if cancel {
2680 log::debug!(
2681 concat!(
2682 "aborting recalculating diffs for buffer {}",
2683 "due to subsequent hunk operations",
2684 ),
2685 buffer.remote_id()
2686 );
2687 return Ok(());
2688 }
2689
2690 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2691 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2692 {
2693 unstaged_diff.update(cx, |diff, cx| {
2694 if language_changed {
2695 diff.language_changed(cx);
2696 }
2697 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2698 })?
2699 } else {
2700 None
2701 };
2702
2703 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2704 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2705 {
2706 uncommitted_diff.update(cx, |diff, cx| {
2707 if language_changed {
2708 diff.language_changed(cx);
2709 }
2710 diff.set_snapshot_with_secondary(
2711 new_uncommitted_diff,
2712 &buffer,
2713 unstaged_changed_range,
2714 true,
2715 cx,
2716 );
2717 })?;
2718 }
2719
2720 log::debug!(
2721 "finished recalculating diffs for buffer {}",
2722 buffer.remote_id()
2723 );
2724
2725 if let Some(this) = this.upgrade() {
2726 this.update(cx, |this, _| {
2727 this.index_changed = false;
2728 this.head_changed = false;
2729 this.language_changed = false;
2730 *this.recalculating_tx.borrow_mut() = false;
2731 })?;
2732 }
2733
2734 Ok(())
2735 }));
2736 }
2737}
2738
2739fn make_remote_delegate(
2740 this: Entity<GitStore>,
2741 project_id: u64,
2742 repository_id: RepositoryId,
2743 askpass_id: u64,
2744 cx: &mut AsyncApp,
2745) -> AskPassDelegate {
2746 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2747 this.update(cx, |this, cx| {
2748 let Some((client, _)) = this.downstream_client() else {
2749 return;
2750 };
2751 let response = client.request(proto::AskPassRequest {
2752 project_id,
2753 repository_id: repository_id.to_proto(),
2754 askpass_id,
2755 prompt,
2756 });
2757 cx.spawn(async move |_, _| {
2758 let mut response = response.await?.response;
2759 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2760 .ok();
2761 response.zeroize();
2762 anyhow::Ok(())
2763 })
2764 .detach_and_log_err(cx);
2765 })
2766 .log_err();
2767 })
2768}
2769
2770impl RepositoryId {
2771 pub fn to_proto(self) -> u64 {
2772 self.0
2773 }
2774
2775 pub fn from_proto(id: u64) -> Self {
2776 RepositoryId(id)
2777 }
2778}
2779
2780impl RepositorySnapshot {
2781 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2782 Self {
2783 id,
2784 statuses_by_path: Default::default(),
2785 work_directory_abs_path,
2786 branch: None,
2787 head_commit: None,
2788 scan_id: 0,
2789 merge: Default::default(),
2790 remote_origin_url: None,
2791 remote_upstream_url: None,
2792 stash_entries: Default::default(),
2793 path_style,
2794 }
2795 }
2796
2797 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2798 proto::UpdateRepository {
2799 branch_summary: self.branch.as_ref().map(branch_to_proto),
2800 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2801 updated_statuses: self
2802 .statuses_by_path
2803 .iter()
2804 .map(|entry| entry.to_proto())
2805 .collect(),
2806 removed_statuses: Default::default(),
2807 current_merge_conflicts: self
2808 .merge
2809 .conflicted_paths
2810 .iter()
2811 .map(|repo_path| repo_path.to_proto())
2812 .collect(),
2813 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2814 project_id,
2815 id: self.id.to_proto(),
2816 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2817 entry_ids: vec![self.id.to_proto()],
2818 scan_id: self.scan_id,
2819 is_last_update: true,
2820 stash_entries: self
2821 .stash_entries
2822 .entries
2823 .iter()
2824 .map(stash_to_proto)
2825 .collect(),
2826 }
2827 }
2828
2829 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2830 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2831 let mut removed_statuses: Vec<String> = Vec::new();
2832
2833 let mut new_statuses = self.statuses_by_path.iter().peekable();
2834 let mut old_statuses = old.statuses_by_path.iter().peekable();
2835
2836 let mut current_new_entry = new_statuses.next();
2837 let mut current_old_entry = old_statuses.next();
2838 loop {
2839 match (current_new_entry, current_old_entry) {
2840 (Some(new_entry), Some(old_entry)) => {
2841 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2842 Ordering::Less => {
2843 updated_statuses.push(new_entry.to_proto());
2844 current_new_entry = new_statuses.next();
2845 }
2846 Ordering::Equal => {
2847 if new_entry.status != old_entry.status {
2848 updated_statuses.push(new_entry.to_proto());
2849 }
2850 current_old_entry = old_statuses.next();
2851 current_new_entry = new_statuses.next();
2852 }
2853 Ordering::Greater => {
2854 removed_statuses.push(old_entry.repo_path.to_proto());
2855 current_old_entry = old_statuses.next();
2856 }
2857 }
2858 }
2859 (None, Some(old_entry)) => {
2860 removed_statuses.push(old_entry.repo_path.to_proto());
2861 current_old_entry = old_statuses.next();
2862 }
2863 (Some(new_entry), None) => {
2864 updated_statuses.push(new_entry.to_proto());
2865 current_new_entry = new_statuses.next();
2866 }
2867 (None, None) => break,
2868 }
2869 }
2870
2871 proto::UpdateRepository {
2872 branch_summary: self.branch.as_ref().map(branch_to_proto),
2873 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2874 updated_statuses,
2875 removed_statuses,
2876 current_merge_conflicts: self
2877 .merge
2878 .conflicted_paths
2879 .iter()
2880 .map(|path| path.to_proto())
2881 .collect(),
2882 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2883 project_id,
2884 id: self.id.to_proto(),
2885 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2886 entry_ids: vec![],
2887 scan_id: self.scan_id,
2888 is_last_update: true,
2889 stash_entries: self
2890 .stash_entries
2891 .entries
2892 .iter()
2893 .map(stash_to_proto)
2894 .collect(),
2895 }
2896 }
2897
2898 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2899 self.statuses_by_path.iter().cloned()
2900 }
2901
2902 pub fn status_summary(&self) -> GitSummary {
2903 self.statuses_by_path.summary().item_summary
2904 }
2905
2906 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2907 self.statuses_by_path
2908 .get(&PathKey(path.0.clone()), ())
2909 .cloned()
2910 }
2911
2912 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2913 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
2914 }
2915
2916 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
2917 self.path_style
2918 .join(&self.work_directory_abs_path, repo_path.as_std_path())
2919 .unwrap()
2920 .into()
2921 }
2922
2923 #[inline]
2924 fn abs_path_to_repo_path_inner(
2925 work_directory_abs_path: &Path,
2926 abs_path: &Path,
2927 path_style: PathStyle,
2928 ) -> Option<RepoPath> {
2929 abs_path
2930 .strip_prefix(&work_directory_abs_path)
2931 .ok()
2932 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
2933 }
2934
2935 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2936 self.merge.conflicted_paths.contains(repo_path)
2937 }
2938
2939 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2940 let had_conflict_on_last_merge_head_change =
2941 self.merge.conflicted_paths.contains(repo_path);
2942 let has_conflict_currently = self
2943 .status_for_path(repo_path)
2944 .is_some_and(|entry| entry.status.is_conflicted());
2945 had_conflict_on_last_merge_head_change || has_conflict_currently
2946 }
2947
2948 /// This is the name that will be displayed in the repository selector for this repository.
2949 pub fn display_name(&self) -> SharedString {
2950 self.work_directory_abs_path
2951 .file_name()
2952 .unwrap_or_default()
2953 .to_string_lossy()
2954 .to_string()
2955 .into()
2956 }
2957}
2958
2959pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2960 proto::StashEntry {
2961 oid: entry.oid.as_bytes().to_vec(),
2962 message: entry.message.clone(),
2963 branch: entry.branch.clone(),
2964 index: entry.index as u64,
2965 timestamp: entry.timestamp,
2966 }
2967}
2968
2969pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2970 Ok(StashEntry {
2971 oid: Oid::from_bytes(&entry.oid)?,
2972 message: entry.message.clone(),
2973 index: entry.index as usize,
2974 branch: entry.branch.clone(),
2975 timestamp: entry.timestamp,
2976 })
2977}
2978
2979impl MergeDetails {
2980 async fn load(
2981 backend: &Arc<dyn GitRepository>,
2982 status: &SumTree<StatusEntry>,
2983 prev_snapshot: &RepositorySnapshot,
2984 ) -> Result<(MergeDetails, bool)> {
2985 log::debug!("load merge details");
2986 let message = backend.merge_message().await;
2987 let heads = backend
2988 .revparse_batch(vec![
2989 "MERGE_HEAD".into(),
2990 "CHERRY_PICK_HEAD".into(),
2991 "REBASE_HEAD".into(),
2992 "REVERT_HEAD".into(),
2993 "APPLY_HEAD".into(),
2994 ])
2995 .await
2996 .log_err()
2997 .unwrap_or_default()
2998 .into_iter()
2999 .map(|opt| opt.map(SharedString::from))
3000 .collect::<Vec<_>>();
3001 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3002 let conflicted_paths = if merge_heads_changed {
3003 let current_conflicted_paths = TreeSet::from_ordered_entries(
3004 status
3005 .iter()
3006 .filter(|entry| entry.status.is_conflicted())
3007 .map(|entry| entry.repo_path.clone()),
3008 );
3009
3010 // It can happen that we run a scan while a lengthy merge is in progress
3011 // that will eventually result in conflicts, but before those conflicts
3012 // are reported by `git status`. Since for the moment we only care about
3013 // the merge heads state for the purposes of tracking conflicts, don't update
3014 // this state until we see some conflicts.
3015 if heads.iter().any(Option::is_some)
3016 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3017 && current_conflicted_paths.is_empty()
3018 {
3019 log::debug!("not updating merge heads because no conflicts found");
3020 return Ok((
3021 MergeDetails {
3022 message: message.map(SharedString::from),
3023 ..prev_snapshot.merge.clone()
3024 },
3025 false,
3026 ));
3027 }
3028
3029 current_conflicted_paths
3030 } else {
3031 prev_snapshot.merge.conflicted_paths.clone()
3032 };
3033 let details = MergeDetails {
3034 conflicted_paths,
3035 message: message.map(SharedString::from),
3036 heads,
3037 };
3038 Ok((details, merge_heads_changed))
3039 }
3040}
3041
3042impl Repository {
3043 pub fn snapshot(&self) -> RepositorySnapshot {
3044 self.snapshot.clone()
3045 }
3046
3047 fn local(
3048 id: RepositoryId,
3049 work_directory_abs_path: Arc<Path>,
3050 dot_git_abs_path: Arc<Path>,
3051 repository_dir_abs_path: Arc<Path>,
3052 common_dir_abs_path: Arc<Path>,
3053 project_environment: WeakEntity<ProjectEnvironment>,
3054 fs: Arc<dyn Fs>,
3055 git_store: WeakEntity<GitStore>,
3056 cx: &mut Context<Self>,
3057 ) -> Self {
3058 let snapshot =
3059 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3060 Repository {
3061 this: cx.weak_entity(),
3062 git_store,
3063 snapshot,
3064 commit_message_buffer: None,
3065 askpass_delegates: Default::default(),
3066 paths_needing_status_update: Default::default(),
3067 latest_askpass_id: 0,
3068 job_sender: Repository::spawn_local_git_worker(
3069 work_directory_abs_path,
3070 dot_git_abs_path,
3071 repository_dir_abs_path,
3072 common_dir_abs_path,
3073 project_environment,
3074 fs,
3075 cx,
3076 ),
3077 job_id: 0,
3078 active_jobs: Default::default(),
3079 }
3080 }
3081
3082 fn remote(
3083 id: RepositoryId,
3084 work_directory_abs_path: Arc<Path>,
3085 path_style: PathStyle,
3086 project_id: ProjectId,
3087 client: AnyProtoClient,
3088 git_store: WeakEntity<GitStore>,
3089 cx: &mut Context<Self>,
3090 ) -> Self {
3091 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3092 Self {
3093 this: cx.weak_entity(),
3094 snapshot,
3095 commit_message_buffer: None,
3096 git_store,
3097 paths_needing_status_update: Default::default(),
3098 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3099 askpass_delegates: Default::default(),
3100 latest_askpass_id: 0,
3101 active_jobs: Default::default(),
3102 job_id: 0,
3103 }
3104 }
3105
3106 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3107 self.git_store.upgrade()
3108 }
3109
3110 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3111 let this = cx.weak_entity();
3112 let git_store = self.git_store.clone();
3113 let _ = self.send_keyed_job(
3114 Some(GitJobKey::ReloadBufferDiffBases),
3115 None,
3116 |state, mut cx| async move {
3117 let RepositoryState::Local { backend, .. } = state else {
3118 log::error!("tried to recompute diffs for a non-local repository");
3119 return Ok(());
3120 };
3121
3122 let Some(this) = this.upgrade() else {
3123 return Ok(());
3124 };
3125
3126 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3127 git_store.update(cx, |git_store, cx| {
3128 git_store
3129 .diffs
3130 .iter()
3131 .filter_map(|(buffer_id, diff_state)| {
3132 let buffer_store = git_store.buffer_store.read(cx);
3133 let buffer = buffer_store.get(*buffer_id)?;
3134 let file = File::from_dyn(buffer.read(cx).file())?;
3135 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3136 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3137 log::debug!(
3138 "start reload diff bases for repo path {}",
3139 repo_path.as_unix_str()
3140 );
3141 diff_state.update(cx, |diff_state, _| {
3142 let has_unstaged_diff = diff_state
3143 .unstaged_diff
3144 .as_ref()
3145 .is_some_and(|diff| diff.is_upgradable());
3146 let has_uncommitted_diff = diff_state
3147 .uncommitted_diff
3148 .as_ref()
3149 .is_some_and(|set| set.is_upgradable());
3150
3151 Some((
3152 buffer,
3153 repo_path,
3154 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3155 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3156 ))
3157 })
3158 })
3159 .collect::<Vec<_>>()
3160 })
3161 })??;
3162
3163 let buffer_diff_base_changes = cx
3164 .background_spawn(async move {
3165 let mut changes = Vec::new();
3166 for (buffer, repo_path, current_index_text, current_head_text) in
3167 &repo_diff_state_updates
3168 {
3169 let index_text = if current_index_text.is_some() {
3170 backend.load_index_text(repo_path.clone()).await
3171 } else {
3172 None
3173 };
3174 let head_text = if current_head_text.is_some() {
3175 backend.load_committed_text(repo_path.clone()).await
3176 } else {
3177 None
3178 };
3179
3180 let change =
3181 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3182 (Some(current_index), Some(current_head)) => {
3183 let index_changed =
3184 index_text.as_ref() != current_index.as_deref();
3185 let head_changed =
3186 head_text.as_ref() != current_head.as_deref();
3187 if index_changed && head_changed {
3188 if index_text == head_text {
3189 Some(DiffBasesChange::SetBoth(head_text))
3190 } else {
3191 Some(DiffBasesChange::SetEach {
3192 index: index_text,
3193 head: head_text,
3194 })
3195 }
3196 } else if index_changed {
3197 Some(DiffBasesChange::SetIndex(index_text))
3198 } else if head_changed {
3199 Some(DiffBasesChange::SetHead(head_text))
3200 } else {
3201 None
3202 }
3203 }
3204 (Some(current_index), None) => {
3205 let index_changed =
3206 index_text.as_ref() != current_index.as_deref();
3207 index_changed
3208 .then_some(DiffBasesChange::SetIndex(index_text))
3209 }
3210 (None, Some(current_head)) => {
3211 let head_changed =
3212 head_text.as_ref() != current_head.as_deref();
3213 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3214 }
3215 (None, None) => None,
3216 };
3217
3218 changes.push((buffer.clone(), change))
3219 }
3220 changes
3221 })
3222 .await;
3223
3224 git_store.update(&mut cx, |git_store, cx| {
3225 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3226 let buffer_snapshot = buffer.read(cx).text_snapshot();
3227 let buffer_id = buffer_snapshot.remote_id();
3228 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3229 continue;
3230 };
3231
3232 let downstream_client = git_store.downstream_client();
3233 diff_state.update(cx, |diff_state, cx| {
3234 use proto::update_diff_bases::Mode;
3235
3236 if let Some((diff_bases_change, (client, project_id))) =
3237 diff_bases_change.clone().zip(downstream_client)
3238 {
3239 let (staged_text, committed_text, mode) = match diff_bases_change {
3240 DiffBasesChange::SetIndex(index) => {
3241 (index, None, Mode::IndexOnly)
3242 }
3243 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3244 DiffBasesChange::SetEach { index, head } => {
3245 (index, head, Mode::IndexAndHead)
3246 }
3247 DiffBasesChange::SetBoth(text) => {
3248 (None, text, Mode::IndexMatchesHead)
3249 }
3250 };
3251 client
3252 .send(proto::UpdateDiffBases {
3253 project_id: project_id.to_proto(),
3254 buffer_id: buffer_id.to_proto(),
3255 staged_text,
3256 committed_text,
3257 mode: mode as i32,
3258 })
3259 .log_err();
3260 }
3261
3262 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3263 });
3264 }
3265 })
3266 },
3267 );
3268 }
3269
3270 pub fn send_job<F, Fut, R>(
3271 &mut self,
3272 status: Option<SharedString>,
3273 job: F,
3274 ) -> oneshot::Receiver<R>
3275 where
3276 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3277 Fut: Future<Output = R> + 'static,
3278 R: Send + 'static,
3279 {
3280 self.send_keyed_job(None, status, job)
3281 }
3282
3283 fn send_keyed_job<F, Fut, R>(
3284 &mut self,
3285 key: Option<GitJobKey>,
3286 status: Option<SharedString>,
3287 job: F,
3288 ) -> oneshot::Receiver<R>
3289 where
3290 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3291 Fut: Future<Output = R> + 'static,
3292 R: Send + 'static,
3293 {
3294 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3295 let job_id = post_inc(&mut self.job_id);
3296 let this = self.this.clone();
3297 self.job_sender
3298 .unbounded_send(GitJob {
3299 key,
3300 job: Box::new(move |state, cx: &mut AsyncApp| {
3301 let job = job(state, cx.clone());
3302 cx.spawn(async move |cx| {
3303 if let Some(s) = status.clone() {
3304 this.update(cx, |this, cx| {
3305 this.active_jobs.insert(
3306 job_id,
3307 JobInfo {
3308 start: Instant::now(),
3309 message: s.clone(),
3310 },
3311 );
3312
3313 cx.notify();
3314 })
3315 .ok();
3316 }
3317 let result = job.await;
3318
3319 this.update(cx, |this, cx| {
3320 this.active_jobs.remove(&job_id);
3321 cx.notify();
3322 })
3323 .ok();
3324
3325 result_tx.send(result).ok();
3326 })
3327 }),
3328 })
3329 .ok();
3330 result_rx
3331 }
3332
3333 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3334 let Some(git_store) = self.git_store.upgrade() else {
3335 return;
3336 };
3337 let entity = cx.entity();
3338 git_store.update(cx, |git_store, cx| {
3339 let Some((&id, _)) = git_store
3340 .repositories
3341 .iter()
3342 .find(|(_, handle)| *handle == &entity)
3343 else {
3344 return;
3345 };
3346 git_store.active_repo_id = Some(id);
3347 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3348 });
3349 }
3350
3351 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3352 self.snapshot.status()
3353 }
3354
3355 pub fn cached_stash(&self) -> GitStash {
3356 self.snapshot.stash_entries.clone()
3357 }
3358
3359 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3360 let git_store = self.git_store.upgrade()?;
3361 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3362 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3363 let abs_path = SanitizedPath::new(&abs_path);
3364 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3365 Some(ProjectPath {
3366 worktree_id: worktree.read(cx).id(),
3367 path: relative_path,
3368 })
3369 }
3370
3371 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3372 let git_store = self.git_store.upgrade()?;
3373 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3374 let abs_path = worktree_store.absolutize(path, cx)?;
3375 self.snapshot.abs_path_to_repo_path(&abs_path)
3376 }
3377
3378 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3379 other
3380 .read(cx)
3381 .snapshot
3382 .work_directory_abs_path
3383 .starts_with(&self.snapshot.work_directory_abs_path)
3384 }
3385
3386 pub fn open_commit_buffer(
3387 &mut self,
3388 languages: Option<Arc<LanguageRegistry>>,
3389 buffer_store: Entity<BufferStore>,
3390 cx: &mut Context<Self>,
3391 ) -> Task<Result<Entity<Buffer>>> {
3392 let id = self.id;
3393 if let Some(buffer) = self.commit_message_buffer.clone() {
3394 return Task::ready(Ok(buffer));
3395 }
3396 let this = cx.weak_entity();
3397
3398 let rx = self.send_job(None, move |state, mut cx| async move {
3399 let Some(this) = this.upgrade() else {
3400 bail!("git store was dropped");
3401 };
3402 match state {
3403 RepositoryState::Local { .. } => {
3404 this.update(&mut cx, |_, cx| {
3405 Self::open_local_commit_buffer(languages, buffer_store, cx)
3406 })?
3407 .await
3408 }
3409 RepositoryState::Remote { project_id, client } => {
3410 let request = client.request(proto::OpenCommitMessageBuffer {
3411 project_id: project_id.0,
3412 repository_id: id.to_proto(),
3413 });
3414 let response = request.await.context("requesting to open commit buffer")?;
3415 let buffer_id = BufferId::new(response.buffer_id)?;
3416 let buffer = buffer_store
3417 .update(&mut cx, |buffer_store, cx| {
3418 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3419 })?
3420 .await?;
3421 if let Some(language_registry) = languages {
3422 let git_commit_language =
3423 language_registry.language_for_name("Git Commit").await?;
3424 buffer.update(&mut cx, |buffer, cx| {
3425 buffer.set_language(Some(git_commit_language), cx);
3426 })?;
3427 }
3428 this.update(&mut cx, |this, _| {
3429 this.commit_message_buffer = Some(buffer.clone());
3430 })?;
3431 Ok(buffer)
3432 }
3433 }
3434 });
3435
3436 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3437 }
3438
3439 fn open_local_commit_buffer(
3440 language_registry: Option<Arc<LanguageRegistry>>,
3441 buffer_store: Entity<BufferStore>,
3442 cx: &mut Context<Self>,
3443 ) -> Task<Result<Entity<Buffer>>> {
3444 cx.spawn(async move |repository, cx| {
3445 let buffer = buffer_store
3446 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3447 .await?;
3448
3449 if let Some(language_registry) = language_registry {
3450 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3451 buffer.update(cx, |buffer, cx| {
3452 buffer.set_language(Some(git_commit_language), cx);
3453 })?;
3454 }
3455
3456 repository.update(cx, |repository, _| {
3457 repository.commit_message_buffer = Some(buffer.clone());
3458 })?;
3459 Ok(buffer)
3460 })
3461 }
3462
3463 pub fn checkout_files(
3464 &mut self,
3465 commit: &str,
3466 paths: Vec<RepoPath>,
3467 _cx: &mut App,
3468 ) -> oneshot::Receiver<Result<()>> {
3469 let commit = commit.to_string();
3470 let id = self.id;
3471
3472 self.send_job(
3473 Some(format!("git checkout {}", commit).into()),
3474 move |git_repo, _| async move {
3475 match git_repo {
3476 RepositoryState::Local {
3477 backend,
3478 environment,
3479 ..
3480 } => {
3481 backend
3482 .checkout_files(commit, paths, environment.clone())
3483 .await
3484 }
3485 RepositoryState::Remote { project_id, client } => {
3486 client
3487 .request(proto::GitCheckoutFiles {
3488 project_id: project_id.0,
3489 repository_id: id.to_proto(),
3490 commit,
3491 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3492 })
3493 .await?;
3494
3495 Ok(())
3496 }
3497 }
3498 },
3499 )
3500 }
3501
3502 pub fn reset(
3503 &mut self,
3504 commit: String,
3505 reset_mode: ResetMode,
3506 _cx: &mut App,
3507 ) -> oneshot::Receiver<Result<()>> {
3508 let id = self.id;
3509
3510 self.send_job(None, move |git_repo, _| async move {
3511 match git_repo {
3512 RepositoryState::Local {
3513 backend,
3514 environment,
3515 ..
3516 } => backend.reset(commit, reset_mode, environment).await,
3517 RepositoryState::Remote { project_id, client } => {
3518 client
3519 .request(proto::GitReset {
3520 project_id: project_id.0,
3521 repository_id: id.to_proto(),
3522 commit,
3523 mode: match reset_mode {
3524 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3525 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3526 },
3527 })
3528 .await?;
3529
3530 Ok(())
3531 }
3532 }
3533 })
3534 }
3535
3536 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3537 let id = self.id;
3538 self.send_job(None, move |git_repo, _cx| async move {
3539 match git_repo {
3540 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3541 RepositoryState::Remote { project_id, client } => {
3542 let resp = client
3543 .request(proto::GitShow {
3544 project_id: project_id.0,
3545 repository_id: id.to_proto(),
3546 commit,
3547 })
3548 .await?;
3549
3550 Ok(CommitDetails {
3551 sha: resp.sha.into(),
3552 message: resp.message.into(),
3553 commit_timestamp: resp.commit_timestamp,
3554 author_email: resp.author_email.into(),
3555 author_name: resp.author_name.into(),
3556 })
3557 }
3558 }
3559 })
3560 }
3561
3562 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3563 let id = self.id;
3564 self.send_job(None, move |git_repo, cx| async move {
3565 match git_repo {
3566 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3567 RepositoryState::Remote {
3568 client, project_id, ..
3569 } => {
3570 let response = client
3571 .request(proto::LoadCommitDiff {
3572 project_id: project_id.0,
3573 repository_id: id.to_proto(),
3574 commit,
3575 })
3576 .await?;
3577 Ok(CommitDiff {
3578 files: response
3579 .files
3580 .into_iter()
3581 .map(|file| {
3582 Ok(CommitFile {
3583 path: RepoPath::from_proto(&file.path)?,
3584 old_text: file.old_text,
3585 new_text: file.new_text,
3586 })
3587 })
3588 .collect::<Result<Vec<_>>>()?,
3589 })
3590 }
3591 }
3592 })
3593 }
3594
3595 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3596 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3597 }
3598
3599 pub fn stage_entries(
3600 &self,
3601 entries: Vec<RepoPath>,
3602 cx: &mut Context<Self>,
3603 ) -> Task<anyhow::Result<()>> {
3604 if entries.is_empty() {
3605 return Task::ready(Ok(()));
3606 }
3607 let id = self.id;
3608
3609 let mut save_futures = Vec::new();
3610 if let Some(buffer_store) = self.buffer_store(cx) {
3611 buffer_store.update(cx, |buffer_store, cx| {
3612 for path in &entries {
3613 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3614 continue;
3615 };
3616 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3617 && buffer
3618 .read(cx)
3619 .file()
3620 .is_some_and(|file| file.disk_state().exists())
3621 && buffer.read(cx).has_unsaved_edits()
3622 {
3623 save_futures.push(buffer_store.save_buffer(buffer, cx));
3624 }
3625 }
3626 })
3627 }
3628
3629 cx.spawn(async move |this, cx| {
3630 for save_future in save_futures {
3631 save_future.await?;
3632 }
3633
3634 this.update(cx, |this, _| {
3635 this.send_job(None, move |git_repo, _cx| async move {
3636 match git_repo {
3637 RepositoryState::Local {
3638 backend,
3639 environment,
3640 ..
3641 } => backend.stage_paths(entries, environment.clone()).await,
3642 RepositoryState::Remote { project_id, client } => {
3643 client
3644 .request(proto::Stage {
3645 project_id: project_id.0,
3646 repository_id: id.to_proto(),
3647 paths: entries
3648 .into_iter()
3649 .map(|repo_path| repo_path.to_proto())
3650 .collect(),
3651 })
3652 .await
3653 .context("sending stage request")?;
3654
3655 Ok(())
3656 }
3657 }
3658 })
3659 })?
3660 .await??;
3661
3662 Ok(())
3663 })
3664 }
3665
3666 pub fn unstage_entries(
3667 &self,
3668 entries: Vec<RepoPath>,
3669 cx: &mut Context<Self>,
3670 ) -> Task<anyhow::Result<()>> {
3671 if entries.is_empty() {
3672 return Task::ready(Ok(()));
3673 }
3674 let id = self.id;
3675
3676 let mut save_futures = Vec::new();
3677 if let Some(buffer_store) = self.buffer_store(cx) {
3678 buffer_store.update(cx, |buffer_store, cx| {
3679 for path in &entries {
3680 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3681 continue;
3682 };
3683 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3684 && buffer
3685 .read(cx)
3686 .file()
3687 .is_some_and(|file| file.disk_state().exists())
3688 && buffer.read(cx).has_unsaved_edits()
3689 {
3690 save_futures.push(buffer_store.save_buffer(buffer, cx));
3691 }
3692 }
3693 })
3694 }
3695
3696 cx.spawn(async move |this, cx| {
3697 for save_future in save_futures {
3698 save_future.await?;
3699 }
3700
3701 this.update(cx, |this, _| {
3702 this.send_job(None, move |git_repo, _cx| async move {
3703 match git_repo {
3704 RepositoryState::Local {
3705 backend,
3706 environment,
3707 ..
3708 } => backend.unstage_paths(entries, environment).await,
3709 RepositoryState::Remote { project_id, client } => {
3710 client
3711 .request(proto::Unstage {
3712 project_id: project_id.0,
3713 repository_id: id.to_proto(),
3714 paths: entries
3715 .into_iter()
3716 .map(|repo_path| repo_path.to_proto())
3717 .collect(),
3718 })
3719 .await
3720 .context("sending unstage request")?;
3721
3722 Ok(())
3723 }
3724 }
3725 })
3726 })?
3727 .await??;
3728
3729 Ok(())
3730 })
3731 }
3732
3733 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3734 let to_stage = self
3735 .cached_status()
3736 .filter(|entry| !entry.status.staging().is_fully_staged())
3737 .map(|entry| entry.repo_path)
3738 .collect();
3739 self.stage_entries(to_stage, cx)
3740 }
3741
3742 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3743 let to_unstage = self
3744 .cached_status()
3745 .filter(|entry| entry.status.staging().has_staged())
3746 .map(|entry| entry.repo_path)
3747 .collect();
3748 self.unstage_entries(to_unstage, cx)
3749 }
3750
3751 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3752 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3753
3754 self.stash_entries(to_stash, cx)
3755 }
3756
3757 pub fn stash_entries(
3758 &mut self,
3759 entries: Vec<RepoPath>,
3760 cx: &mut Context<Self>,
3761 ) -> Task<anyhow::Result<()>> {
3762 let id = self.id;
3763
3764 cx.spawn(async move |this, cx| {
3765 this.update(cx, |this, _| {
3766 this.send_job(None, move |git_repo, _cx| async move {
3767 match git_repo {
3768 RepositoryState::Local {
3769 backend,
3770 environment,
3771 ..
3772 } => backend.stash_paths(entries, environment).await,
3773 RepositoryState::Remote { project_id, client } => {
3774 client
3775 .request(proto::Stash {
3776 project_id: project_id.0,
3777 repository_id: id.to_proto(),
3778 paths: entries
3779 .into_iter()
3780 .map(|repo_path| repo_path.to_proto())
3781 .collect(),
3782 })
3783 .await
3784 .context("sending stash request")?;
3785 Ok(())
3786 }
3787 }
3788 })
3789 })?
3790 .await??;
3791 Ok(())
3792 })
3793 }
3794
3795 pub fn stash_pop(
3796 &mut self,
3797 index: Option<usize>,
3798 cx: &mut Context<Self>,
3799 ) -> Task<anyhow::Result<()>> {
3800 let id = self.id;
3801 cx.spawn(async move |this, cx| {
3802 this.update(cx, |this, _| {
3803 this.send_job(None, move |git_repo, _cx| async move {
3804 match git_repo {
3805 RepositoryState::Local {
3806 backend,
3807 environment,
3808 ..
3809 } => backend.stash_pop(index, environment).await,
3810 RepositoryState::Remote { project_id, client } => {
3811 client
3812 .request(proto::StashPop {
3813 project_id: project_id.0,
3814 repository_id: id.to_proto(),
3815 stash_index: index.map(|i| i as u64),
3816 })
3817 .await
3818 .context("sending stash pop request")?;
3819 Ok(())
3820 }
3821 }
3822 })
3823 })?
3824 .await??;
3825 Ok(())
3826 })
3827 }
3828
3829 pub fn stash_apply(
3830 &mut self,
3831 index: Option<usize>,
3832 cx: &mut Context<Self>,
3833 ) -> Task<anyhow::Result<()>> {
3834 let id = self.id;
3835 cx.spawn(async move |this, cx| {
3836 this.update(cx, |this, _| {
3837 this.send_job(None, move |git_repo, _cx| async move {
3838 match git_repo {
3839 RepositoryState::Local {
3840 backend,
3841 environment,
3842 ..
3843 } => backend.stash_apply(index, environment).await,
3844 RepositoryState::Remote { project_id, client } => {
3845 client
3846 .request(proto::StashApply {
3847 project_id: project_id.0,
3848 repository_id: id.to_proto(),
3849 stash_index: index.map(|i| i as u64),
3850 })
3851 .await
3852 .context("sending stash apply request")?;
3853 Ok(())
3854 }
3855 }
3856 })
3857 })?
3858 .await??;
3859 Ok(())
3860 })
3861 }
3862
3863 pub fn stash_drop(
3864 &mut self,
3865 index: Option<usize>,
3866 cx: &mut Context<Self>,
3867 ) -> oneshot::Receiver<anyhow::Result<()>> {
3868 let id = self.id;
3869 let updates_tx = self
3870 .git_store()
3871 .and_then(|git_store| match &git_store.read(cx).state {
3872 GitStoreState::Local { downstream, .. } => downstream
3873 .as_ref()
3874 .map(|downstream| downstream.updates_tx.clone()),
3875 _ => None,
3876 });
3877 let this = cx.weak_entity();
3878 self.send_job(None, move |git_repo, mut cx| async move {
3879 match git_repo {
3880 RepositoryState::Local {
3881 backend,
3882 environment,
3883 ..
3884 } => {
3885 // TODO would be nice to not have to do this manually
3886 let result = backend.stash_drop(index, environment).await;
3887 if result.is_ok()
3888 && let Ok(stash_entries) = backend.stash_entries().await
3889 {
3890 let snapshot = this.update(&mut cx, |this, cx| {
3891 this.snapshot.stash_entries = stash_entries;
3892 cx.emit(RepositoryEvent::StashEntriesChanged);
3893 this.snapshot.clone()
3894 })?;
3895 if let Some(updates_tx) = updates_tx {
3896 updates_tx
3897 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3898 .ok();
3899 }
3900 }
3901
3902 result
3903 }
3904 RepositoryState::Remote { project_id, client } => {
3905 client
3906 .request(proto::StashDrop {
3907 project_id: project_id.0,
3908 repository_id: id.to_proto(),
3909 stash_index: index.map(|i| i as u64),
3910 })
3911 .await
3912 .context("sending stash pop request")?;
3913 Ok(())
3914 }
3915 }
3916 })
3917 }
3918
3919 pub fn commit(
3920 &mut self,
3921 message: SharedString,
3922 name_and_email: Option<(SharedString, SharedString)>,
3923 options: CommitOptions,
3924 _cx: &mut App,
3925 ) -> oneshot::Receiver<Result<()>> {
3926 let id = self.id;
3927
3928 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3929 match git_repo {
3930 RepositoryState::Local {
3931 backend,
3932 environment,
3933 ..
3934 } => {
3935 backend
3936 .commit(message, name_and_email, options, environment)
3937 .await
3938 }
3939 RepositoryState::Remote { project_id, client } => {
3940 let (name, email) = name_and_email.unzip();
3941 client
3942 .request(proto::Commit {
3943 project_id: project_id.0,
3944 repository_id: id.to_proto(),
3945 message: String::from(message),
3946 name: name.map(String::from),
3947 email: email.map(String::from),
3948 options: Some(proto::commit::CommitOptions {
3949 amend: options.amend,
3950 signoff: options.signoff,
3951 }),
3952 })
3953 .await
3954 .context("sending commit request")?;
3955
3956 Ok(())
3957 }
3958 }
3959 })
3960 }
3961
3962 pub fn fetch(
3963 &mut self,
3964 fetch_options: FetchOptions,
3965 askpass: AskPassDelegate,
3966 _cx: &mut App,
3967 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3968 let askpass_delegates = self.askpass_delegates.clone();
3969 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3970 let id = self.id;
3971
3972 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3973 match git_repo {
3974 RepositoryState::Local {
3975 backend,
3976 environment,
3977 ..
3978 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3979 RepositoryState::Remote { project_id, client } => {
3980 askpass_delegates.lock().insert(askpass_id, askpass);
3981 let _defer = util::defer(|| {
3982 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3983 debug_assert!(askpass_delegate.is_some());
3984 });
3985
3986 let response = client
3987 .request(proto::Fetch {
3988 project_id: project_id.0,
3989 repository_id: id.to_proto(),
3990 askpass_id,
3991 remote: fetch_options.to_proto(),
3992 })
3993 .await
3994 .context("sending fetch request")?;
3995
3996 Ok(RemoteCommandOutput {
3997 stdout: response.stdout,
3998 stderr: response.stderr,
3999 })
4000 }
4001 }
4002 })
4003 }
4004
4005 pub fn push(
4006 &mut self,
4007 branch: SharedString,
4008 remote: SharedString,
4009 options: Option<PushOptions>,
4010 askpass: AskPassDelegate,
4011 cx: &mut Context<Self>,
4012 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4013 let askpass_delegates = self.askpass_delegates.clone();
4014 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4015 let id = self.id;
4016
4017 let args = options
4018 .map(|option| match option {
4019 PushOptions::SetUpstream => " --set-upstream",
4020 PushOptions::Force => " --force-with-lease",
4021 })
4022 .unwrap_or("");
4023
4024 let updates_tx = self
4025 .git_store()
4026 .and_then(|git_store| match &git_store.read(cx).state {
4027 GitStoreState::Local { downstream, .. } => downstream
4028 .as_ref()
4029 .map(|downstream| downstream.updates_tx.clone()),
4030 _ => None,
4031 });
4032
4033 let this = cx.weak_entity();
4034 self.send_job(
4035 Some(format!("git push {} {} {}", args, remote, branch).into()),
4036 move |git_repo, mut cx| async move {
4037 match git_repo {
4038 RepositoryState::Local {
4039 backend,
4040 environment,
4041 ..
4042 } => {
4043 let result = backend
4044 .push(
4045 branch.to_string(),
4046 remote.to_string(),
4047 options,
4048 askpass,
4049 environment.clone(),
4050 cx.clone(),
4051 )
4052 .await;
4053 // TODO would be nice to not have to do this manually
4054 if result.is_ok() {
4055 let branches = backend.branches().await?;
4056 let branch = branches.into_iter().find(|branch| branch.is_head);
4057 log::info!("head branch after scan is {branch:?}");
4058 let snapshot = this.update(&mut cx, |this, cx| {
4059 this.snapshot.branch = branch;
4060 cx.emit(RepositoryEvent::BranchChanged);
4061 this.snapshot.clone()
4062 })?;
4063 if let Some(updates_tx) = updates_tx {
4064 updates_tx
4065 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4066 .ok();
4067 }
4068 }
4069 result
4070 }
4071 RepositoryState::Remote { project_id, client } => {
4072 askpass_delegates.lock().insert(askpass_id, askpass);
4073 let _defer = util::defer(|| {
4074 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4075 debug_assert!(askpass_delegate.is_some());
4076 });
4077 let response = client
4078 .request(proto::Push {
4079 project_id: project_id.0,
4080 repository_id: id.to_proto(),
4081 askpass_id,
4082 branch_name: branch.to_string(),
4083 remote_name: remote.to_string(),
4084 options: options.map(|options| match options {
4085 PushOptions::Force => proto::push::PushOptions::Force,
4086 PushOptions::SetUpstream => {
4087 proto::push::PushOptions::SetUpstream
4088 }
4089 }
4090 as i32),
4091 })
4092 .await
4093 .context("sending push request")?;
4094
4095 Ok(RemoteCommandOutput {
4096 stdout: response.stdout,
4097 stderr: response.stderr,
4098 })
4099 }
4100 }
4101 },
4102 )
4103 }
4104
4105 pub fn pull(
4106 &mut self,
4107 branch: SharedString,
4108 remote: SharedString,
4109 askpass: AskPassDelegate,
4110 _cx: &mut App,
4111 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4112 let askpass_delegates = self.askpass_delegates.clone();
4113 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4114 let id = self.id;
4115
4116 self.send_job(
4117 Some(format!("git pull {} {}", remote, branch).into()),
4118 move |git_repo, cx| async move {
4119 match git_repo {
4120 RepositoryState::Local {
4121 backend,
4122 environment,
4123 ..
4124 } => {
4125 backend
4126 .pull(
4127 branch.to_string(),
4128 remote.to_string(),
4129 askpass,
4130 environment.clone(),
4131 cx,
4132 )
4133 .await
4134 }
4135 RepositoryState::Remote { project_id, client } => {
4136 askpass_delegates.lock().insert(askpass_id, askpass);
4137 let _defer = util::defer(|| {
4138 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4139 debug_assert!(askpass_delegate.is_some());
4140 });
4141 let response = client
4142 .request(proto::Pull {
4143 project_id: project_id.0,
4144 repository_id: id.to_proto(),
4145 askpass_id,
4146 branch_name: branch.to_string(),
4147 remote_name: remote.to_string(),
4148 })
4149 .await
4150 .context("sending pull request")?;
4151
4152 Ok(RemoteCommandOutput {
4153 stdout: response.stdout,
4154 stderr: response.stderr,
4155 })
4156 }
4157 }
4158 },
4159 )
4160 }
4161
4162 fn spawn_set_index_text_job(
4163 &mut self,
4164 path: RepoPath,
4165 content: Option<String>,
4166 hunk_staging_operation_count: Option<usize>,
4167 cx: &mut Context<Self>,
4168 ) -> oneshot::Receiver<anyhow::Result<()>> {
4169 let id = self.id;
4170 let this = cx.weak_entity();
4171 let git_store = self.git_store.clone();
4172 self.send_keyed_job(
4173 Some(GitJobKey::WriteIndex(path.clone())),
4174 None,
4175 move |git_repo, mut cx| async move {
4176 log::debug!(
4177 "start updating index text for buffer {}",
4178 path.as_unix_str()
4179 );
4180 match git_repo {
4181 RepositoryState::Local {
4182 backend,
4183 environment,
4184 ..
4185 } => {
4186 backend
4187 .set_index_text(path.clone(), content, environment.clone())
4188 .await?;
4189 }
4190 RepositoryState::Remote { project_id, client } => {
4191 client
4192 .request(proto::SetIndexText {
4193 project_id: project_id.0,
4194 repository_id: id.to_proto(),
4195 path: path.to_proto(),
4196 text: content,
4197 })
4198 .await?;
4199 }
4200 }
4201 log::debug!(
4202 "finish updating index text for buffer {}",
4203 path.as_unix_str()
4204 );
4205
4206 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4207 let project_path = this
4208 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4209 .ok()
4210 .flatten();
4211 git_store.update(&mut cx, |git_store, cx| {
4212 let buffer_id = git_store
4213 .buffer_store
4214 .read(cx)
4215 .get_by_path(&project_path?)?
4216 .read(cx)
4217 .remote_id();
4218 let diff_state = git_store.diffs.get(&buffer_id)?;
4219 diff_state.update(cx, |diff_state, _| {
4220 diff_state.hunk_staging_operation_count_as_of_write =
4221 hunk_staging_operation_count;
4222 });
4223 Some(())
4224 })?;
4225 }
4226 Ok(())
4227 },
4228 )
4229 }
4230
4231 pub fn get_remotes(
4232 &mut self,
4233 branch_name: Option<String>,
4234 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4235 let id = self.id;
4236 self.send_job(None, move |repo, _cx| async move {
4237 match repo {
4238 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4239 RepositoryState::Remote { project_id, client } => {
4240 let response = client
4241 .request(proto::GetRemotes {
4242 project_id: project_id.0,
4243 repository_id: id.to_proto(),
4244 branch_name,
4245 })
4246 .await?;
4247
4248 let remotes = response
4249 .remotes
4250 .into_iter()
4251 .map(|remotes| git::repository::Remote {
4252 name: remotes.name.into(),
4253 })
4254 .collect();
4255
4256 Ok(remotes)
4257 }
4258 }
4259 })
4260 }
4261
4262 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4263 let id = self.id;
4264 self.send_job(None, move |repo, _| async move {
4265 match repo {
4266 RepositoryState::Local { backend, .. } => backend.branches().await,
4267 RepositoryState::Remote { project_id, client } => {
4268 let response = client
4269 .request(proto::GitGetBranches {
4270 project_id: project_id.0,
4271 repository_id: id.to_proto(),
4272 })
4273 .await?;
4274
4275 let branches = response
4276 .branches
4277 .into_iter()
4278 .map(|branch| proto_to_branch(&branch))
4279 .collect();
4280
4281 Ok(branches)
4282 }
4283 }
4284 })
4285 }
4286
4287 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4288 let id = self.id;
4289 self.send_job(None, move |repo, _| async move {
4290 match repo {
4291 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4292 RepositoryState::Remote { project_id, client } => {
4293 let response = client
4294 .request(proto::GetDefaultBranch {
4295 project_id: project_id.0,
4296 repository_id: id.to_proto(),
4297 })
4298 .await?;
4299
4300 anyhow::Ok(response.branch.map(SharedString::from))
4301 }
4302 }
4303 })
4304 }
4305
4306 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4307 let id = self.id;
4308 self.send_job(None, move |repo, _cx| async move {
4309 match repo {
4310 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4311 RepositoryState::Remote { project_id, client } => {
4312 let response = client
4313 .request(proto::GitDiff {
4314 project_id: project_id.0,
4315 repository_id: id.to_proto(),
4316 diff_type: match diff_type {
4317 DiffType::HeadToIndex => {
4318 proto::git_diff::DiffType::HeadToIndex.into()
4319 }
4320 DiffType::HeadToWorktree => {
4321 proto::git_diff::DiffType::HeadToWorktree.into()
4322 }
4323 },
4324 })
4325 .await?;
4326
4327 Ok(response.diff)
4328 }
4329 }
4330 })
4331 }
4332
4333 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4334 let id = self.id;
4335 self.send_job(
4336 Some(format!("git switch -c {branch_name}").into()),
4337 move |repo, _cx| async move {
4338 match repo {
4339 RepositoryState::Local { backend, .. } => {
4340 backend.create_branch(branch_name).await
4341 }
4342 RepositoryState::Remote { project_id, client } => {
4343 client
4344 .request(proto::GitCreateBranch {
4345 project_id: project_id.0,
4346 repository_id: id.to_proto(),
4347 branch_name,
4348 })
4349 .await?;
4350
4351 Ok(())
4352 }
4353 }
4354 },
4355 )
4356 }
4357
4358 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4359 let id = self.id;
4360 self.send_job(
4361 Some(format!("git switch {branch_name}").into()),
4362 move |repo, _cx| async move {
4363 match repo {
4364 RepositoryState::Local { backend, .. } => {
4365 backend.change_branch(branch_name).await
4366 }
4367 RepositoryState::Remote { project_id, client } => {
4368 client
4369 .request(proto::GitChangeBranch {
4370 project_id: project_id.0,
4371 repository_id: id.to_proto(),
4372 branch_name,
4373 })
4374 .await?;
4375
4376 Ok(())
4377 }
4378 }
4379 },
4380 )
4381 }
4382
4383 pub fn rename_branch(
4384 &mut self,
4385 branch: String,
4386 new_name: String,
4387 ) -> oneshot::Receiver<Result<()>> {
4388 let id = self.id;
4389 self.send_job(
4390 Some(format!("git branch -m {branch} {new_name}").into()),
4391 move |repo, _cx| async move {
4392 match repo {
4393 RepositoryState::Local { backend, .. } => {
4394 backend.rename_branch(branch, new_name).await
4395 }
4396 RepositoryState::Remote { project_id, client } => {
4397 client
4398 .request(proto::GitRenameBranch {
4399 project_id: project_id.0,
4400 repository_id: id.to_proto(),
4401 branch,
4402 new_name,
4403 })
4404 .await?;
4405
4406 Ok(())
4407 }
4408 }
4409 },
4410 )
4411 }
4412
4413 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4414 let id = self.id;
4415 self.send_job(None, move |repo, _cx| async move {
4416 match repo {
4417 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4418 RepositoryState::Remote { project_id, client } => {
4419 let response = client
4420 .request(proto::CheckForPushedCommits {
4421 project_id: project_id.0,
4422 repository_id: id.to_proto(),
4423 })
4424 .await?;
4425
4426 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4427
4428 Ok(branches)
4429 }
4430 }
4431 })
4432 }
4433
4434 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4435 self.send_job(None, |repo, _cx| async move {
4436 match repo {
4437 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4438 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4439 }
4440 })
4441 }
4442
4443 pub fn restore_checkpoint(
4444 &mut self,
4445 checkpoint: GitRepositoryCheckpoint,
4446 ) -> oneshot::Receiver<Result<()>> {
4447 self.send_job(None, move |repo, _cx| async move {
4448 match repo {
4449 RepositoryState::Local { backend, .. } => {
4450 backend.restore_checkpoint(checkpoint).await
4451 }
4452 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4453 }
4454 })
4455 }
4456
4457 pub(crate) fn apply_remote_update(
4458 &mut self,
4459 update: proto::UpdateRepository,
4460 cx: &mut Context<Self>,
4461 ) -> Result<()> {
4462 let conflicted_paths = TreeSet::from_ordered_entries(
4463 update
4464 .current_merge_conflicts
4465 .into_iter()
4466 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4467 );
4468 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4469 let new_head_commit = update
4470 .head_commit_details
4471 .as_ref()
4472 .map(proto_to_commit_details);
4473 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4474 cx.emit(RepositoryEvent::BranchChanged)
4475 }
4476 self.snapshot.branch = new_branch;
4477 self.snapshot.head_commit = new_head_commit;
4478
4479 self.snapshot.merge.conflicted_paths = conflicted_paths;
4480 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4481 let new_stash_entries = GitStash {
4482 entries: update
4483 .stash_entries
4484 .iter()
4485 .filter_map(|entry| proto_to_stash(entry).ok())
4486 .collect(),
4487 };
4488 if self.snapshot.stash_entries != new_stash_entries {
4489 cx.emit(RepositoryEvent::StashEntriesChanged)
4490 }
4491 self.snapshot.stash_entries = new_stash_entries;
4492
4493 let edits = update
4494 .removed_statuses
4495 .into_iter()
4496 .filter_map(|path| {
4497 Some(sum_tree::Edit::Remove(PathKey(
4498 RelPath::from_proto(&path).log_err()?,
4499 )))
4500 })
4501 .chain(
4502 update
4503 .updated_statuses
4504 .into_iter()
4505 .filter_map(|updated_status| {
4506 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4507 }),
4508 )
4509 .collect::<Vec<_>>();
4510 if !edits.is_empty() {
4511 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4512 }
4513 self.snapshot.statuses_by_path.edit(edits, ());
4514 if update.is_last_update {
4515 self.snapshot.scan_id = update.scan_id;
4516 }
4517 Ok(())
4518 }
4519
4520 pub fn compare_checkpoints(
4521 &mut self,
4522 left: GitRepositoryCheckpoint,
4523 right: GitRepositoryCheckpoint,
4524 ) -> oneshot::Receiver<Result<bool>> {
4525 self.send_job(None, move |repo, _cx| async move {
4526 match repo {
4527 RepositoryState::Local { backend, .. } => {
4528 backend.compare_checkpoints(left, right).await
4529 }
4530 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4531 }
4532 })
4533 }
4534
4535 pub fn diff_checkpoints(
4536 &mut self,
4537 base_checkpoint: GitRepositoryCheckpoint,
4538 target_checkpoint: GitRepositoryCheckpoint,
4539 ) -> oneshot::Receiver<Result<String>> {
4540 self.send_job(None, move |repo, _cx| async move {
4541 match repo {
4542 RepositoryState::Local { backend, .. } => {
4543 backend
4544 .diff_checkpoints(base_checkpoint, target_checkpoint)
4545 .await
4546 }
4547 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4548 }
4549 })
4550 }
4551
4552 fn schedule_scan(
4553 &mut self,
4554 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4555 cx: &mut Context<Self>,
4556 ) {
4557 let this = cx.weak_entity();
4558 let _ = self.send_keyed_job(
4559 Some(GitJobKey::ReloadGitState),
4560 None,
4561 |state, mut cx| async move {
4562 log::debug!("run scheduled git status scan");
4563
4564 let Some(this) = this.upgrade() else {
4565 return Ok(());
4566 };
4567 let RepositoryState::Local { backend, .. } = state else {
4568 bail!("not a local repository")
4569 };
4570 let (snapshot, events) = this
4571 .update(&mut cx, |this, _| {
4572 this.paths_needing_status_update.clear();
4573 compute_snapshot(
4574 this.id,
4575 this.work_directory_abs_path.clone(),
4576 this.snapshot.clone(),
4577 backend.clone(),
4578 )
4579 })?
4580 .await?;
4581 this.update(&mut cx, |this, cx| {
4582 this.snapshot = snapshot.clone();
4583 for event in events {
4584 cx.emit(event);
4585 }
4586 })?;
4587 if let Some(updates_tx) = updates_tx {
4588 updates_tx
4589 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4590 .ok();
4591 }
4592 Ok(())
4593 },
4594 );
4595 }
4596
4597 fn spawn_local_git_worker(
4598 work_directory_abs_path: Arc<Path>,
4599 dot_git_abs_path: Arc<Path>,
4600 _repository_dir_abs_path: Arc<Path>,
4601 _common_dir_abs_path: Arc<Path>,
4602 project_environment: WeakEntity<ProjectEnvironment>,
4603 fs: Arc<dyn Fs>,
4604 cx: &mut Context<Self>,
4605 ) -> mpsc::UnboundedSender<GitJob> {
4606 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4607
4608 cx.spawn(async move |_, cx| {
4609 let environment = project_environment
4610 .upgrade()
4611 .context("missing project environment")?
4612 .update(cx, |project_environment, cx| {
4613 project_environment.get_local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
4614 })?
4615 .await
4616 .unwrap_or_else(|| {
4617 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4618 HashMap::default()
4619 });
4620 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4621 let backend = cx
4622 .background_spawn(async move {
4623 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4624 .or_else(|| which::which("git").ok());
4625 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4626 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4627 })
4628 .await?;
4629
4630 if let Some(git_hosting_provider_registry) =
4631 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4632 {
4633 git_hosting_providers::register_additional_providers(
4634 git_hosting_provider_registry,
4635 backend.clone(),
4636 );
4637 }
4638
4639 let state = RepositoryState::Local {
4640 backend,
4641 environment: Arc::new(environment),
4642 };
4643 let mut jobs = VecDeque::new();
4644 loop {
4645 while let Ok(Some(next_job)) = job_rx.try_next() {
4646 jobs.push_back(next_job);
4647 }
4648
4649 if let Some(job) = jobs.pop_front() {
4650 if let Some(current_key) = &job.key
4651 && jobs
4652 .iter()
4653 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4654 {
4655 continue;
4656 }
4657 (job.job)(state.clone(), cx).await;
4658 } else if let Some(job) = job_rx.next().await {
4659 jobs.push_back(job);
4660 } else {
4661 break;
4662 }
4663 }
4664 anyhow::Ok(())
4665 })
4666 .detach_and_log_err(cx);
4667
4668 job_tx
4669 }
4670
4671 fn spawn_remote_git_worker(
4672 project_id: ProjectId,
4673 client: AnyProtoClient,
4674 cx: &mut Context<Self>,
4675 ) -> mpsc::UnboundedSender<GitJob> {
4676 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4677
4678 cx.spawn(async move |_, cx| {
4679 let state = RepositoryState::Remote { project_id, client };
4680 let mut jobs = VecDeque::new();
4681 loop {
4682 while let Ok(Some(next_job)) = job_rx.try_next() {
4683 jobs.push_back(next_job);
4684 }
4685
4686 if let Some(job) = jobs.pop_front() {
4687 if let Some(current_key) = &job.key
4688 && jobs
4689 .iter()
4690 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4691 {
4692 continue;
4693 }
4694 (job.job)(state.clone(), cx).await;
4695 } else if let Some(job) = job_rx.next().await {
4696 jobs.push_back(job);
4697 } else {
4698 break;
4699 }
4700 }
4701 anyhow::Ok(())
4702 })
4703 .detach_and_log_err(cx);
4704
4705 job_tx
4706 }
4707
4708 fn load_staged_text(
4709 &mut self,
4710 buffer_id: BufferId,
4711 repo_path: RepoPath,
4712 cx: &App,
4713 ) -> Task<Result<Option<String>>> {
4714 let rx = self.send_job(None, move |state, _| async move {
4715 match state {
4716 RepositoryState::Local { backend, .. } => {
4717 anyhow::Ok(backend.load_index_text(repo_path).await)
4718 }
4719 RepositoryState::Remote { project_id, client } => {
4720 let response = client
4721 .request(proto::OpenUnstagedDiff {
4722 project_id: project_id.to_proto(),
4723 buffer_id: buffer_id.to_proto(),
4724 })
4725 .await?;
4726 Ok(response.staged_text)
4727 }
4728 }
4729 });
4730 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4731 }
4732
4733 fn load_committed_text(
4734 &mut self,
4735 buffer_id: BufferId,
4736 repo_path: RepoPath,
4737 cx: &App,
4738 ) -> Task<Result<DiffBasesChange>> {
4739 let rx = self.send_job(None, move |state, _| async move {
4740 match state {
4741 RepositoryState::Local { backend, .. } => {
4742 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4743 let staged_text = backend.load_index_text(repo_path).await;
4744 let diff_bases_change = if committed_text == staged_text {
4745 DiffBasesChange::SetBoth(committed_text)
4746 } else {
4747 DiffBasesChange::SetEach {
4748 index: staged_text,
4749 head: committed_text,
4750 }
4751 };
4752 anyhow::Ok(diff_bases_change)
4753 }
4754 RepositoryState::Remote { project_id, client } => {
4755 use proto::open_uncommitted_diff_response::Mode;
4756
4757 let response = client
4758 .request(proto::OpenUncommittedDiff {
4759 project_id: project_id.to_proto(),
4760 buffer_id: buffer_id.to_proto(),
4761 })
4762 .await?;
4763 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4764 let bases = match mode {
4765 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4766 Mode::IndexAndHead => DiffBasesChange::SetEach {
4767 head: response.committed_text,
4768 index: response.staged_text,
4769 },
4770 };
4771 Ok(bases)
4772 }
4773 }
4774 });
4775
4776 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4777 }
4778
4779 fn paths_changed(
4780 &mut self,
4781 paths: Vec<RepoPath>,
4782 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4783 cx: &mut Context<Self>,
4784 ) {
4785 self.paths_needing_status_update.extend(paths);
4786
4787 let this = cx.weak_entity();
4788 let _ = self.send_keyed_job(
4789 Some(GitJobKey::RefreshStatuses),
4790 None,
4791 |state, mut cx| async move {
4792 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4793 (
4794 this.snapshot.clone(),
4795 mem::take(&mut this.paths_needing_status_update),
4796 )
4797 })?;
4798 let RepositoryState::Local { backend, .. } = state else {
4799 bail!("not a local repository")
4800 };
4801
4802 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4803 if paths.is_empty() {
4804 return Ok(());
4805 }
4806 let statuses = backend.status(&paths).await?;
4807 let stash_entries = backend.stash_entries().await?;
4808
4809 let changed_path_statuses = cx
4810 .background_spawn(async move {
4811 let mut changed_path_statuses = Vec::new();
4812 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4813 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4814
4815 for (repo_path, status) in &*statuses.entries {
4816 changed_paths.remove(repo_path);
4817 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4818 && cursor.item().is_some_and(|entry| entry.status == *status)
4819 {
4820 continue;
4821 }
4822
4823 changed_path_statuses.push(Edit::Insert(StatusEntry {
4824 repo_path: repo_path.clone(),
4825 status: *status,
4826 }));
4827 }
4828 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4829 for path in changed_paths.into_iter() {
4830 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4831 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4832 }
4833 }
4834 changed_path_statuses
4835 })
4836 .await;
4837
4838 this.update(&mut cx, |this, cx| {
4839 if this.snapshot.stash_entries != stash_entries {
4840 cx.emit(RepositoryEvent::StashEntriesChanged);
4841 this.snapshot.stash_entries = stash_entries;
4842 }
4843
4844 if !changed_path_statuses.is_empty() {
4845 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
4846 this.snapshot
4847 .statuses_by_path
4848 .edit(changed_path_statuses, ());
4849 this.snapshot.scan_id += 1;
4850 }
4851
4852 if let Some(updates_tx) = updates_tx {
4853 updates_tx
4854 .unbounded_send(DownstreamUpdate::UpdateRepository(
4855 this.snapshot.clone(),
4856 ))
4857 .ok();
4858 }
4859 })
4860 },
4861 );
4862 }
4863
4864 /// currently running git command and when it started
4865 pub fn current_job(&self) -> Option<JobInfo> {
4866 self.active_jobs.values().next().cloned()
4867 }
4868
4869 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4870 self.send_job(None, |_, _| async {})
4871 }
4872}
4873
4874fn get_permalink_in_rust_registry_src(
4875 provider_registry: Arc<GitHostingProviderRegistry>,
4876 path: PathBuf,
4877 selection: Range<u32>,
4878) -> Result<url::Url> {
4879 #[derive(Deserialize)]
4880 struct CargoVcsGit {
4881 sha1: String,
4882 }
4883
4884 #[derive(Deserialize)]
4885 struct CargoVcsInfo {
4886 git: CargoVcsGit,
4887 path_in_vcs: String,
4888 }
4889
4890 #[derive(Deserialize)]
4891 struct CargoPackage {
4892 repository: String,
4893 }
4894
4895 #[derive(Deserialize)]
4896 struct CargoToml {
4897 package: CargoPackage,
4898 }
4899
4900 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4901 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4902 Some((dir, json))
4903 }) else {
4904 bail!("No .cargo_vcs_info.json found in parent directories")
4905 };
4906 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4907 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4908 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4909 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4910 .context("parsing package.repository field of manifest")?;
4911 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4912 let permalink = provider.build_permalink(
4913 remote,
4914 BuildPermalinkParams::new(
4915 &cargo_vcs_info.git.sha1,
4916 &RepoPath(
4917 RelPath::new(&path, PathStyle::local())
4918 .context("invalid path")?
4919 .into_arc(),
4920 ),
4921 Some(selection),
4922 ),
4923 );
4924 Ok(permalink)
4925}
4926
4927fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4928 let Some(blame) = blame else {
4929 return proto::BlameBufferResponse {
4930 blame_response: None,
4931 };
4932 };
4933
4934 let entries = blame
4935 .entries
4936 .into_iter()
4937 .map(|entry| proto::BlameEntry {
4938 sha: entry.sha.as_bytes().into(),
4939 start_line: entry.range.start,
4940 end_line: entry.range.end,
4941 original_line_number: entry.original_line_number,
4942 author: entry.author,
4943 author_mail: entry.author_mail,
4944 author_time: entry.author_time,
4945 author_tz: entry.author_tz,
4946 committer: entry.committer_name,
4947 committer_mail: entry.committer_email,
4948 committer_time: entry.committer_time,
4949 committer_tz: entry.committer_tz,
4950 summary: entry.summary,
4951 previous: entry.previous,
4952 filename: entry.filename,
4953 })
4954 .collect::<Vec<_>>();
4955
4956 let messages = blame
4957 .messages
4958 .into_iter()
4959 .map(|(oid, message)| proto::CommitMessage {
4960 oid: oid.as_bytes().into(),
4961 message,
4962 })
4963 .collect::<Vec<_>>();
4964
4965 proto::BlameBufferResponse {
4966 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4967 entries,
4968 messages,
4969 remote_url: blame.remote_url,
4970 }),
4971 }
4972}
4973
4974fn deserialize_blame_buffer_response(
4975 response: proto::BlameBufferResponse,
4976) -> Option<git::blame::Blame> {
4977 let response = response.blame_response?;
4978 let entries = response
4979 .entries
4980 .into_iter()
4981 .filter_map(|entry| {
4982 Some(git::blame::BlameEntry {
4983 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4984 range: entry.start_line..entry.end_line,
4985 original_line_number: entry.original_line_number,
4986 committer_name: entry.committer,
4987 committer_time: entry.committer_time,
4988 committer_tz: entry.committer_tz,
4989 committer_email: entry.committer_mail,
4990 author: entry.author,
4991 author_mail: entry.author_mail,
4992 author_time: entry.author_time,
4993 author_tz: entry.author_tz,
4994 summary: entry.summary,
4995 previous: entry.previous,
4996 filename: entry.filename,
4997 })
4998 })
4999 .collect::<Vec<_>>();
5000
5001 let messages = response
5002 .messages
5003 .into_iter()
5004 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5005 .collect::<HashMap<_, _>>();
5006
5007 Some(Blame {
5008 entries,
5009 messages,
5010 remote_url: response.remote_url,
5011 })
5012}
5013
5014fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5015 proto::Branch {
5016 is_head: branch.is_head,
5017 ref_name: branch.ref_name.to_string(),
5018 unix_timestamp: branch
5019 .most_recent_commit
5020 .as_ref()
5021 .map(|commit| commit.commit_timestamp as u64),
5022 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5023 ref_name: upstream.ref_name.to_string(),
5024 tracking: upstream
5025 .tracking
5026 .status()
5027 .map(|upstream| proto::UpstreamTracking {
5028 ahead: upstream.ahead as u64,
5029 behind: upstream.behind as u64,
5030 }),
5031 }),
5032 most_recent_commit: branch
5033 .most_recent_commit
5034 .as_ref()
5035 .map(|commit| proto::CommitSummary {
5036 sha: commit.sha.to_string(),
5037 subject: commit.subject.to_string(),
5038 commit_timestamp: commit.commit_timestamp,
5039 author_name: commit.author_name.to_string(),
5040 }),
5041 }
5042}
5043
5044fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5045 git::repository::Branch {
5046 is_head: proto.is_head,
5047 ref_name: proto.ref_name.clone().into(),
5048 upstream: proto
5049 .upstream
5050 .as_ref()
5051 .map(|upstream| git::repository::Upstream {
5052 ref_name: upstream.ref_name.to_string().into(),
5053 tracking: upstream
5054 .tracking
5055 .as_ref()
5056 .map(|tracking| {
5057 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5058 ahead: tracking.ahead as u32,
5059 behind: tracking.behind as u32,
5060 })
5061 })
5062 .unwrap_or(git::repository::UpstreamTracking::Gone),
5063 }),
5064 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5065 git::repository::CommitSummary {
5066 sha: commit.sha.to_string().into(),
5067 subject: commit.subject.to_string().into(),
5068 commit_timestamp: commit.commit_timestamp,
5069 author_name: commit.author_name.to_string().into(),
5070 has_parent: true,
5071 }
5072 }),
5073 }
5074}
5075
5076fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5077 proto::GitCommitDetails {
5078 sha: commit.sha.to_string(),
5079 message: commit.message.to_string(),
5080 commit_timestamp: commit.commit_timestamp,
5081 author_email: commit.author_email.to_string(),
5082 author_name: commit.author_name.to_string(),
5083 }
5084}
5085
5086fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5087 CommitDetails {
5088 sha: proto.sha.clone().into(),
5089 message: proto.message.clone().into(),
5090 commit_timestamp: proto.commit_timestamp,
5091 author_email: proto.author_email.clone().into(),
5092 author_name: proto.author_name.clone().into(),
5093 }
5094}
5095
5096async fn compute_snapshot(
5097 id: RepositoryId,
5098 work_directory_abs_path: Arc<Path>,
5099 prev_snapshot: RepositorySnapshot,
5100 backend: Arc<dyn GitRepository>,
5101) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5102 let mut events = Vec::new();
5103 let branches = backend.branches().await?;
5104 let branch = branches.into_iter().find(|branch| branch.is_head);
5105 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5106 let stash_entries = backend.stash_entries().await?;
5107 let statuses_by_path = SumTree::from_iter(
5108 statuses
5109 .entries
5110 .iter()
5111 .map(|(repo_path, status)| StatusEntry {
5112 repo_path: repo_path.clone(),
5113 status: *status,
5114 }),
5115 (),
5116 );
5117 let (merge_details, merge_heads_changed) =
5118 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5119 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5120
5121 if merge_heads_changed {
5122 events.push(RepositoryEvent::MergeHeadsChanged);
5123 }
5124
5125 if statuses_by_path != prev_snapshot.statuses_by_path {
5126 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5127 }
5128
5129 // Useful when branch is None in detached head state
5130 let head_commit = match backend.head_sha().await {
5131 Some(head_sha) => backend.show(head_sha).await.log_err(),
5132 None => None,
5133 };
5134
5135 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5136 events.push(RepositoryEvent::BranchChanged);
5137 }
5138
5139 // Used by edit prediction data collection
5140 let remote_origin_url = backend.remote_url("origin");
5141 let remote_upstream_url = backend.remote_url("upstream");
5142
5143 let snapshot = RepositorySnapshot {
5144 id,
5145 statuses_by_path,
5146 work_directory_abs_path,
5147 path_style: prev_snapshot.path_style,
5148 scan_id: prev_snapshot.scan_id + 1,
5149 branch,
5150 head_commit,
5151 merge: merge_details,
5152 remote_origin_url,
5153 remote_upstream_url,
5154 stash_entries,
5155 };
5156
5157 Ok((snapshot, events))
5158}
5159
5160fn status_from_proto(
5161 simple_status: i32,
5162 status: Option<proto::GitFileStatus>,
5163) -> anyhow::Result<FileStatus> {
5164 use proto::git_file_status::Variant;
5165
5166 let Some(variant) = status.and_then(|status| status.variant) else {
5167 let code = proto::GitStatus::from_i32(simple_status)
5168 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5169 let result = match code {
5170 proto::GitStatus::Added => TrackedStatus {
5171 worktree_status: StatusCode::Added,
5172 index_status: StatusCode::Unmodified,
5173 }
5174 .into(),
5175 proto::GitStatus::Modified => TrackedStatus {
5176 worktree_status: StatusCode::Modified,
5177 index_status: StatusCode::Unmodified,
5178 }
5179 .into(),
5180 proto::GitStatus::Conflict => UnmergedStatus {
5181 first_head: UnmergedStatusCode::Updated,
5182 second_head: UnmergedStatusCode::Updated,
5183 }
5184 .into(),
5185 proto::GitStatus::Deleted => TrackedStatus {
5186 worktree_status: StatusCode::Deleted,
5187 index_status: StatusCode::Unmodified,
5188 }
5189 .into(),
5190 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5191 };
5192 return Ok(result);
5193 };
5194
5195 let result = match variant {
5196 Variant::Untracked(_) => FileStatus::Untracked,
5197 Variant::Ignored(_) => FileStatus::Ignored,
5198 Variant::Unmerged(unmerged) => {
5199 let [first_head, second_head] =
5200 [unmerged.first_head, unmerged.second_head].map(|head| {
5201 let code = proto::GitStatus::from_i32(head)
5202 .with_context(|| format!("Invalid git status code: {head}"))?;
5203 let result = match code {
5204 proto::GitStatus::Added => UnmergedStatusCode::Added,
5205 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5206 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5207 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5208 };
5209 Ok(result)
5210 });
5211 let [first_head, second_head] = [first_head?, second_head?];
5212 UnmergedStatus {
5213 first_head,
5214 second_head,
5215 }
5216 .into()
5217 }
5218 Variant::Tracked(tracked) => {
5219 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5220 .map(|status| {
5221 let code = proto::GitStatus::from_i32(status)
5222 .with_context(|| format!("Invalid git status code: {status}"))?;
5223 let result = match code {
5224 proto::GitStatus::Modified => StatusCode::Modified,
5225 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5226 proto::GitStatus::Added => StatusCode::Added,
5227 proto::GitStatus::Deleted => StatusCode::Deleted,
5228 proto::GitStatus::Renamed => StatusCode::Renamed,
5229 proto::GitStatus::Copied => StatusCode::Copied,
5230 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5231 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5232 };
5233 Ok(result)
5234 });
5235 let [index_status, worktree_status] = [index_status?, worktree_status?];
5236 TrackedStatus {
5237 index_status,
5238 worktree_status,
5239 }
5240 .into()
5241 }
5242 };
5243 Ok(result)
5244}
5245
5246fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5247 use proto::git_file_status::{Tracked, Unmerged, Variant};
5248
5249 let variant = match status {
5250 FileStatus::Untracked => Variant::Untracked(Default::default()),
5251 FileStatus::Ignored => Variant::Ignored(Default::default()),
5252 FileStatus::Unmerged(UnmergedStatus {
5253 first_head,
5254 second_head,
5255 }) => Variant::Unmerged(Unmerged {
5256 first_head: unmerged_status_to_proto(first_head),
5257 second_head: unmerged_status_to_proto(second_head),
5258 }),
5259 FileStatus::Tracked(TrackedStatus {
5260 index_status,
5261 worktree_status,
5262 }) => Variant::Tracked(Tracked {
5263 index_status: tracked_status_to_proto(index_status),
5264 worktree_status: tracked_status_to_proto(worktree_status),
5265 }),
5266 };
5267 proto::GitFileStatus {
5268 variant: Some(variant),
5269 }
5270}
5271
5272fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5273 match code {
5274 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5275 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5276 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5277 }
5278}
5279
5280fn tracked_status_to_proto(code: StatusCode) -> i32 {
5281 match code {
5282 StatusCode::Added => proto::GitStatus::Added as _,
5283 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5284 StatusCode::Modified => proto::GitStatus::Modified as _,
5285 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5286 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5287 StatusCode::Copied => proto::GitStatus::Copied as _,
5288 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5289 }
5290}