1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use task::Shell;
66use text::{Bias, BufferId};
67use util::{
68 ResultExt, debug_panic,
69 paths::{PathStyle, SanitizedPath},
70 post_inc,
71 rel_path::RelPath,
72};
73use worktree::{
74 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
75 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
76};
77use zeroize::Zeroize;
78
79pub struct GitStore {
80 state: GitStoreState,
81 buffer_store: Entity<BufferStore>,
82 worktree_store: Entity<WorktreeStore>,
83 repositories: HashMap<RepositoryId, Entity<Repository>>,
84 active_repo_id: Option<RepositoryId>,
85 #[allow(clippy::type_complexity)]
86 loading_diffs:
87 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
88 diffs: HashMap<BufferId, Entity<BufferGitState>>,
89 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
90 _subscriptions: Vec<Subscription>,
91}
92
93#[derive(Default)]
94struct SharedDiffs {
95 unstaged: Option<Entity<BufferDiff>>,
96 uncommitted: Option<Entity<BufferDiff>>,
97}
98
99struct BufferGitState {
100 unstaged_diff: Option<WeakEntity<BufferDiff>>,
101 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
102 conflict_set: Option<WeakEntity<ConflictSet>>,
103 recalculate_diff_task: Option<Task<Result<()>>>,
104 reparse_conflict_markers_task: Option<Task<Result<()>>>,
105 language: Option<Arc<Language>>,
106 language_registry: Option<Arc<LanguageRegistry>>,
107 conflict_updated_futures: Vec<oneshot::Sender<()>>,
108 recalculating_tx: postage::watch::Sender<bool>,
109
110 /// These operation counts are used to ensure that head and index text
111 /// values read from the git repository are up-to-date with any hunk staging
112 /// operations that have been performed on the BufferDiff.
113 ///
114 /// The operation count is incremented immediately when the user initiates a
115 /// hunk stage/unstage operation. Then, upon finishing writing the new index
116 /// text do disk, the `operation count as of write` is updated to reflect
117 /// the operation count that prompted the write.
118 hunk_staging_operation_count: usize,
119 hunk_staging_operation_count_as_of_write: usize,
120
121 head_text: Option<Arc<String>>,
122 index_text: Option<Arc<String>>,
123 head_changed: bool,
124 index_changed: bool,
125 language_changed: bool,
126}
127
128#[derive(Clone, Debug)]
129enum DiffBasesChange {
130 SetIndex(Option<String>),
131 SetHead(Option<String>),
132 SetEach {
133 index: Option<String>,
134 head: Option<String>,
135 },
136 SetBoth(Option<String>),
137}
138
139#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
140enum DiffKind {
141 Unstaged,
142 Uncommitted,
143}
144
145enum GitStoreState {
146 Local {
147 next_repository_id: Arc<AtomicU64>,
148 downstream: Option<LocalDownstreamState>,
149 project_environment: Entity<ProjectEnvironment>,
150 fs: Arc<dyn Fs>,
151 },
152 Remote {
153 upstream_client: AnyProtoClient,
154 upstream_project_id: u64,
155 downstream: Option<(AnyProtoClient, ProjectId)>,
156 },
157}
158
159enum DownstreamUpdate {
160 UpdateRepository(RepositorySnapshot),
161 RemoveRepository(RepositoryId),
162}
163
164struct LocalDownstreamState {
165 client: AnyProtoClient,
166 project_id: ProjectId,
167 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
168 _task: Task<Result<()>>,
169}
170
171#[derive(Clone, Debug)]
172pub struct GitStoreCheckpoint {
173 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
174}
175
176#[derive(Clone, Debug, PartialEq, Eq)]
177pub struct StatusEntry {
178 pub repo_path: RepoPath,
179 pub status: FileStatus,
180}
181
182impl StatusEntry {
183 fn to_proto(&self) -> proto::StatusEntry {
184 let simple_status = match self.status {
185 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
186 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
187 FileStatus::Tracked(TrackedStatus {
188 index_status,
189 worktree_status,
190 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
191 worktree_status
192 } else {
193 index_status
194 }),
195 };
196
197 proto::StatusEntry {
198 repo_path: self.repo_path.to_proto(),
199 simple_status,
200 status: Some(status_to_proto(self.status)),
201 }
202 }
203}
204
205impl TryFrom<proto::StatusEntry> for StatusEntry {
206 type Error = anyhow::Error;
207
208 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
209 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
210 let status = status_from_proto(value.simple_status, value.status)?;
211 Ok(Self { repo_path, status })
212 }
213}
214
215impl sum_tree::Item for StatusEntry {
216 type Summary = PathSummary<GitSummary>;
217
218 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
219 PathSummary {
220 max_path: self.repo_path.0.clone(),
221 item_summary: self.status.summary(),
222 }
223 }
224}
225
226impl sum_tree::KeyedItem for StatusEntry {
227 type Key = PathKey;
228
229 fn key(&self) -> Self::Key {
230 PathKey(self.repo_path.0.clone())
231 }
232}
233
234#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
235pub struct RepositoryId(pub u64);
236
237#[derive(Clone, Debug, Default, PartialEq, Eq)]
238pub struct MergeDetails {
239 pub conflicted_paths: TreeSet<RepoPath>,
240 pub message: Option<SharedString>,
241 pub heads: Vec<Option<SharedString>>,
242}
243
244#[derive(Clone, Debug, PartialEq, Eq)]
245pub struct RepositorySnapshot {
246 pub id: RepositoryId,
247 pub statuses_by_path: SumTree<StatusEntry>,
248 pub work_directory_abs_path: Arc<Path>,
249 pub path_style: PathStyle,
250 pub branch: Option<Branch>,
251 pub head_commit: Option<CommitDetails>,
252 pub scan_id: u64,
253 pub merge: MergeDetails,
254 pub remote_origin_url: Option<String>,
255 pub remote_upstream_url: Option<String>,
256 pub stash_entries: GitStash,
257}
258
259type JobId = u64;
260
261#[derive(Clone, Debug, PartialEq, Eq)]
262pub struct JobInfo {
263 pub start: Instant,
264 pub message: SharedString,
265}
266
267pub struct Repository {
268 this: WeakEntity<Self>,
269 snapshot: RepositorySnapshot,
270 commit_message_buffer: Option<Entity<Buffer>>,
271 git_store: WeakEntity<GitStore>,
272 // For a local repository, holds paths that have had worktree events since the last status scan completed,
273 // and that should be examined during the next status scan.
274 paths_needing_status_update: BTreeSet<RepoPath>,
275 job_sender: mpsc::UnboundedSender<GitJob>,
276 active_jobs: HashMap<JobId, JobInfo>,
277 job_id: JobId,
278 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
279 latest_askpass_id: u64,
280}
281
282impl std::ops::Deref for Repository {
283 type Target = RepositorySnapshot;
284
285 fn deref(&self) -> &Self::Target {
286 &self.snapshot
287 }
288}
289
290#[derive(Clone)]
291pub enum RepositoryState {
292 Local {
293 backend: Arc<dyn GitRepository>,
294 environment: Arc<HashMap<String, String>>,
295 },
296 Remote {
297 project_id: ProjectId,
298 client: AnyProtoClient,
299 },
300}
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub enum RepositoryEvent {
304 StatusesChanged {
305 // TODO could report which statuses changed here
306 full_scan: bool,
307 },
308 MergeHeadsChanged,
309 BranchChanged,
310 StashEntriesChanged,
311}
312
313#[derive(Clone, Debug)]
314pub struct JobsUpdated;
315
316#[derive(Debug)]
317pub enum GitStoreEvent {
318 ActiveRepositoryChanged(Option<RepositoryId>),
319 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
320 RepositoryAdded,
321 RepositoryRemoved(RepositoryId),
322 IndexWriteError(anyhow::Error),
323 JobsUpdated,
324 ConflictsUpdated,
325}
326
327impl EventEmitter<RepositoryEvent> for Repository {}
328impl EventEmitter<JobsUpdated> for Repository {}
329impl EventEmitter<GitStoreEvent> for GitStore {}
330
331pub struct GitJob {
332 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
333 key: Option<GitJobKey>,
334}
335
336#[derive(PartialEq, Eq)]
337enum GitJobKey {
338 WriteIndex(RepoPath),
339 ReloadBufferDiffBases,
340 RefreshStatuses,
341 ReloadGitState,
342}
343
344impl GitStore {
345 pub fn local(
346 worktree_store: &Entity<WorktreeStore>,
347 buffer_store: Entity<BufferStore>,
348 environment: Entity<ProjectEnvironment>,
349 fs: Arc<dyn Fs>,
350 cx: &mut Context<Self>,
351 ) -> Self {
352 Self::new(
353 worktree_store.clone(),
354 buffer_store,
355 GitStoreState::Local {
356 next_repository_id: Arc::new(AtomicU64::new(1)),
357 downstream: None,
358 project_environment: environment,
359 fs,
360 },
361 cx,
362 )
363 }
364
365 pub fn remote(
366 worktree_store: &Entity<WorktreeStore>,
367 buffer_store: Entity<BufferStore>,
368 upstream_client: AnyProtoClient,
369 project_id: u64,
370 cx: &mut Context<Self>,
371 ) -> Self {
372 Self::new(
373 worktree_store.clone(),
374 buffer_store,
375 GitStoreState::Remote {
376 upstream_client,
377 upstream_project_id: project_id,
378 downstream: None,
379 },
380 cx,
381 )
382 }
383
384 fn new(
385 worktree_store: Entity<WorktreeStore>,
386 buffer_store: Entity<BufferStore>,
387 state: GitStoreState,
388 cx: &mut Context<Self>,
389 ) -> Self {
390 let _subscriptions = vec![
391 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
392 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
393 ];
394
395 GitStore {
396 state,
397 buffer_store,
398 worktree_store,
399 repositories: HashMap::default(),
400 active_repo_id: None,
401 _subscriptions,
402 loading_diffs: HashMap::default(),
403 shared_diffs: HashMap::default(),
404 diffs: HashMap::default(),
405 }
406 }
407
408 pub fn init(client: &AnyProtoClient) {
409 client.add_entity_request_handler(Self::handle_get_remotes);
410 client.add_entity_request_handler(Self::handle_get_branches);
411 client.add_entity_request_handler(Self::handle_get_default_branch);
412 client.add_entity_request_handler(Self::handle_change_branch);
413 client.add_entity_request_handler(Self::handle_create_branch);
414 client.add_entity_request_handler(Self::handle_rename_branch);
415 client.add_entity_request_handler(Self::handle_git_init);
416 client.add_entity_request_handler(Self::handle_push);
417 client.add_entity_request_handler(Self::handle_pull);
418 client.add_entity_request_handler(Self::handle_fetch);
419 client.add_entity_request_handler(Self::handle_stage);
420 client.add_entity_request_handler(Self::handle_unstage);
421 client.add_entity_request_handler(Self::handle_stash);
422 client.add_entity_request_handler(Self::handle_stash_pop);
423 client.add_entity_request_handler(Self::handle_stash_apply);
424 client.add_entity_request_handler(Self::handle_stash_drop);
425 client.add_entity_request_handler(Self::handle_commit);
426 client.add_entity_request_handler(Self::handle_reset);
427 client.add_entity_request_handler(Self::handle_show);
428 client.add_entity_request_handler(Self::handle_load_commit_diff);
429 client.add_entity_request_handler(Self::handle_checkout_files);
430 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
431 client.add_entity_request_handler(Self::handle_set_index_text);
432 client.add_entity_request_handler(Self::handle_askpass);
433 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
434 client.add_entity_request_handler(Self::handle_git_diff);
435 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
436 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
437 client.add_entity_message_handler(Self::handle_update_diff_bases);
438 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
439 client.add_entity_request_handler(Self::handle_blame_buffer);
440 client.add_entity_message_handler(Self::handle_update_repository);
441 client.add_entity_message_handler(Self::handle_remove_repository);
442 client.add_entity_request_handler(Self::handle_git_clone);
443 }
444
445 pub fn is_local(&self) -> bool {
446 matches!(self.state, GitStoreState::Local { .. })
447 }
448 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
449 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
450 let id = repo.read(cx).id;
451 if self.active_repo_id != Some(id) {
452 self.active_repo_id = Some(id);
453 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
454 }
455 }
456 }
457
458 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
459 match &mut self.state {
460 GitStoreState::Remote {
461 downstream: downstream_client,
462 ..
463 } => {
464 for repo in self.repositories.values() {
465 let update = repo.read(cx).snapshot.initial_update(project_id);
466 for update in split_repository_update(update) {
467 client.send(update).log_err();
468 }
469 }
470 *downstream_client = Some((client, ProjectId(project_id)));
471 }
472 GitStoreState::Local {
473 downstream: downstream_client,
474 ..
475 } => {
476 let mut snapshots = HashMap::default();
477 let (updates_tx, mut updates_rx) = mpsc::unbounded();
478 for repo in self.repositories.values() {
479 updates_tx
480 .unbounded_send(DownstreamUpdate::UpdateRepository(
481 repo.read(cx).snapshot.clone(),
482 ))
483 .ok();
484 }
485 *downstream_client = Some(LocalDownstreamState {
486 client: client.clone(),
487 project_id: ProjectId(project_id),
488 updates_tx,
489 _task: cx.spawn(async move |this, cx| {
490 cx.background_spawn(async move {
491 while let Some(update) = updates_rx.next().await {
492 match update {
493 DownstreamUpdate::UpdateRepository(snapshot) => {
494 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
495 {
496 let update =
497 snapshot.build_update(old_snapshot, project_id);
498 *old_snapshot = snapshot;
499 for update in split_repository_update(update) {
500 client.send(update)?;
501 }
502 } else {
503 let update = snapshot.initial_update(project_id);
504 for update in split_repository_update(update) {
505 client.send(update)?;
506 }
507 snapshots.insert(snapshot.id, snapshot);
508 }
509 }
510 DownstreamUpdate::RemoveRepository(id) => {
511 client.send(proto::RemoveRepository {
512 project_id,
513 id: id.to_proto(),
514 })?;
515 }
516 }
517 }
518 anyhow::Ok(())
519 })
520 .await
521 .ok();
522 this.update(cx, |this, _| {
523 if let GitStoreState::Local {
524 downstream: downstream_client,
525 ..
526 } = &mut this.state
527 {
528 downstream_client.take();
529 } else {
530 unreachable!("unshared called on remote store");
531 }
532 })
533 }),
534 });
535 }
536 }
537 }
538
539 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
540 match &mut self.state {
541 GitStoreState::Local {
542 downstream: downstream_client,
543 ..
544 } => {
545 downstream_client.take();
546 }
547 GitStoreState::Remote {
548 downstream: downstream_client,
549 ..
550 } => {
551 downstream_client.take();
552 }
553 }
554 self.shared_diffs.clear();
555 }
556
557 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
558 self.shared_diffs.remove(peer_id);
559 }
560
561 pub fn active_repository(&self) -> Option<Entity<Repository>> {
562 self.active_repo_id
563 .as_ref()
564 .map(|id| self.repositories[id].clone())
565 }
566
567 pub fn open_unstaged_diff(
568 &mut self,
569 buffer: Entity<Buffer>,
570 cx: &mut Context<Self>,
571 ) -> Task<Result<Entity<BufferDiff>>> {
572 let buffer_id = buffer.read(cx).remote_id();
573 if let Some(diff_state) = self.diffs.get(&buffer_id)
574 && let Some(unstaged_diff) = diff_state
575 .read(cx)
576 .unstaged_diff
577 .as_ref()
578 .and_then(|weak| weak.upgrade())
579 {
580 if let Some(task) =
581 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
582 {
583 return cx.background_executor().spawn(async move {
584 task.await;
585 Ok(unstaged_diff)
586 });
587 }
588 return Task::ready(Ok(unstaged_diff));
589 }
590
591 let Some((repo, repo_path)) =
592 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
593 else {
594 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
595 };
596
597 let task = self
598 .loading_diffs
599 .entry((buffer_id, DiffKind::Unstaged))
600 .or_insert_with(|| {
601 let staged_text = repo.update(cx, |repo, cx| {
602 repo.load_staged_text(buffer_id, repo_path, cx)
603 });
604 cx.spawn(async move |this, cx| {
605 Self::open_diff_internal(
606 this,
607 DiffKind::Unstaged,
608 staged_text.await.map(DiffBasesChange::SetIndex),
609 buffer,
610 cx,
611 )
612 .await
613 .map_err(Arc::new)
614 })
615 .shared()
616 })
617 .clone();
618
619 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
620 }
621
622 pub fn open_uncommitted_diff(
623 &mut self,
624 buffer: Entity<Buffer>,
625 cx: &mut Context<Self>,
626 ) -> Task<Result<Entity<BufferDiff>>> {
627 let buffer_id = buffer.read(cx).remote_id();
628
629 if let Some(diff_state) = self.diffs.get(&buffer_id)
630 && let Some(uncommitted_diff) = diff_state
631 .read(cx)
632 .uncommitted_diff
633 .as_ref()
634 .and_then(|weak| weak.upgrade())
635 {
636 if let Some(task) =
637 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
638 {
639 return cx.background_executor().spawn(async move {
640 task.await;
641 Ok(uncommitted_diff)
642 });
643 }
644 return Task::ready(Ok(uncommitted_diff));
645 }
646
647 let Some((repo, repo_path)) =
648 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
649 else {
650 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
651 };
652
653 let task = self
654 .loading_diffs
655 .entry((buffer_id, DiffKind::Uncommitted))
656 .or_insert_with(|| {
657 let changes = repo.update(cx, |repo, cx| {
658 repo.load_committed_text(buffer_id, repo_path, cx)
659 });
660
661 cx.spawn(async move |this, cx| {
662 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
663 .await
664 .map_err(Arc::new)
665 })
666 .shared()
667 })
668 .clone();
669
670 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
671 }
672
673 async fn open_diff_internal(
674 this: WeakEntity<Self>,
675 kind: DiffKind,
676 texts: Result<DiffBasesChange>,
677 buffer_entity: Entity<Buffer>,
678 cx: &mut AsyncApp,
679 ) -> Result<Entity<BufferDiff>> {
680 let diff_bases_change = match texts {
681 Err(e) => {
682 this.update(cx, |this, cx| {
683 let buffer = buffer_entity.read(cx);
684 let buffer_id = buffer.remote_id();
685 this.loading_diffs.remove(&(buffer_id, kind));
686 })?;
687 return Err(e);
688 }
689 Ok(change) => change,
690 };
691
692 this.update(cx, |this, cx| {
693 let buffer = buffer_entity.read(cx);
694 let buffer_id = buffer.remote_id();
695 let language = buffer.language().cloned();
696 let language_registry = buffer.language_registry();
697 let text_snapshot = buffer.text_snapshot();
698 this.loading_diffs.remove(&(buffer_id, kind));
699
700 let git_store = cx.weak_entity();
701 let diff_state = this
702 .diffs
703 .entry(buffer_id)
704 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
705
706 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
707
708 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
709 diff_state.update(cx, |diff_state, cx| {
710 diff_state.language = language;
711 diff_state.language_registry = language_registry;
712
713 match kind {
714 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
715 DiffKind::Uncommitted => {
716 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
717 diff
718 } else {
719 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
720 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
721 unstaged_diff
722 };
723
724 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
725 diff_state.uncommitted_diff = Some(diff.downgrade())
726 }
727 }
728
729 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
730 let rx = diff_state.wait_for_recalculation();
731
732 anyhow::Ok(async move {
733 if let Some(rx) = rx {
734 rx.await;
735 }
736 Ok(diff)
737 })
738 })
739 })??
740 .await
741 }
742
743 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
744 let diff_state = self.diffs.get(&buffer_id)?;
745 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
746 }
747
748 pub fn get_uncommitted_diff(
749 &self,
750 buffer_id: BufferId,
751 cx: &App,
752 ) -> Option<Entity<BufferDiff>> {
753 let diff_state = self.diffs.get(&buffer_id)?;
754 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
755 }
756
757 pub fn open_conflict_set(
758 &mut self,
759 buffer: Entity<Buffer>,
760 cx: &mut Context<Self>,
761 ) -> Entity<ConflictSet> {
762 log::debug!("open conflict set");
763 let buffer_id = buffer.read(cx).remote_id();
764
765 if let Some(git_state) = self.diffs.get(&buffer_id)
766 && let Some(conflict_set) = git_state
767 .read(cx)
768 .conflict_set
769 .as_ref()
770 .and_then(|weak| weak.upgrade())
771 {
772 let conflict_set = conflict_set;
773 let buffer_snapshot = buffer.read(cx).text_snapshot();
774
775 git_state.update(cx, |state, cx| {
776 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
777 });
778
779 return conflict_set;
780 }
781
782 let is_unmerged = self
783 .repository_and_path_for_buffer_id(buffer_id, cx)
784 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
785 let git_store = cx.weak_entity();
786 let buffer_git_state = self
787 .diffs
788 .entry(buffer_id)
789 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
790 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
791
792 self._subscriptions
793 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
794 cx.emit(GitStoreEvent::ConflictsUpdated);
795 }));
796
797 buffer_git_state.update(cx, |state, cx| {
798 state.conflict_set = Some(conflict_set.downgrade());
799 let buffer_snapshot = buffer.read(cx).text_snapshot();
800 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
801 });
802
803 conflict_set
804 }
805
806 pub fn project_path_git_status(
807 &self,
808 project_path: &ProjectPath,
809 cx: &App,
810 ) -> Option<FileStatus> {
811 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
812 Some(repo.read(cx).status_for_path(&repo_path)?.status)
813 }
814
815 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
816 let mut work_directory_abs_paths = Vec::new();
817 let mut checkpoints = Vec::new();
818 for repository in self.repositories.values() {
819 repository.update(cx, |repository, _| {
820 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
821 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
822 });
823 }
824
825 cx.background_executor().spawn(async move {
826 let checkpoints = future::try_join_all(checkpoints).await?;
827 Ok(GitStoreCheckpoint {
828 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
829 .into_iter()
830 .zip(checkpoints)
831 .collect(),
832 })
833 })
834 }
835
836 pub fn restore_checkpoint(
837 &self,
838 checkpoint: GitStoreCheckpoint,
839 cx: &mut App,
840 ) -> Task<Result<()>> {
841 let repositories_by_work_dir_abs_path = self
842 .repositories
843 .values()
844 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
845 .collect::<HashMap<_, _>>();
846
847 let mut tasks = Vec::new();
848 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
849 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
850 let restore = repository.update(cx, |repository, _| {
851 repository.restore_checkpoint(checkpoint)
852 });
853 tasks.push(async move { restore.await? });
854 }
855 }
856 cx.background_spawn(async move {
857 future::try_join_all(tasks).await?;
858 Ok(())
859 })
860 }
861
862 /// Compares two checkpoints, returning true if they are equal.
863 pub fn compare_checkpoints(
864 &self,
865 left: GitStoreCheckpoint,
866 mut right: GitStoreCheckpoint,
867 cx: &mut App,
868 ) -> Task<Result<bool>> {
869 let repositories_by_work_dir_abs_path = self
870 .repositories
871 .values()
872 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
873 .collect::<HashMap<_, _>>();
874
875 let mut tasks = Vec::new();
876 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
877 if let Some(right_checkpoint) = right
878 .checkpoints_by_work_dir_abs_path
879 .remove(&work_dir_abs_path)
880 {
881 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
882 {
883 let compare = repository.update(cx, |repository, _| {
884 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
885 });
886
887 tasks.push(async move { compare.await? });
888 }
889 } else {
890 return Task::ready(Ok(false));
891 }
892 }
893 cx.background_spawn(async move {
894 Ok(future::try_join_all(tasks)
895 .await?
896 .into_iter()
897 .all(|result| result))
898 })
899 }
900
901 /// Blames a buffer.
902 pub fn blame_buffer(
903 &self,
904 buffer: &Entity<Buffer>,
905 version: Option<clock::Global>,
906 cx: &mut App,
907 ) -> Task<Result<Option<Blame>>> {
908 let buffer = buffer.read(cx);
909 let Some((repo, repo_path)) =
910 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
911 else {
912 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
913 };
914 let content = match &version {
915 Some(version) => buffer.rope_for_version(version),
916 None => buffer.as_rope().clone(),
917 };
918 let version = version.unwrap_or(buffer.version());
919 let buffer_id = buffer.remote_id();
920
921 let rx = repo.update(cx, |repo, _| {
922 repo.send_job(None, move |state, _| async move {
923 match state {
924 RepositoryState::Local { backend, .. } => backend
925 .blame(repo_path.clone(), content)
926 .await
927 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
928 .map(Some),
929 RepositoryState::Remote { project_id, client } => {
930 let response = client
931 .request(proto::BlameBuffer {
932 project_id: project_id.to_proto(),
933 buffer_id: buffer_id.into(),
934 version: serialize_version(&version),
935 })
936 .await?;
937 Ok(deserialize_blame_buffer_response(response))
938 }
939 }
940 })
941 });
942
943 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
944 }
945
946 pub fn get_permalink_to_line(
947 &self,
948 buffer: &Entity<Buffer>,
949 selection: Range<u32>,
950 cx: &mut App,
951 ) -> Task<Result<url::Url>> {
952 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
953 return Task::ready(Err(anyhow!("buffer has no file")));
954 };
955
956 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
957 &(file.worktree.read(cx).id(), file.path.clone()).into(),
958 cx,
959 ) else {
960 // If we're not in a Git repo, check whether this is a Rust source
961 // file in the Cargo registry (presumably opened with go-to-definition
962 // from a normal Rust file). If so, we can put together a permalink
963 // using crate metadata.
964 if buffer
965 .read(cx)
966 .language()
967 .is_none_or(|lang| lang.name() != "Rust".into())
968 {
969 return Task::ready(Err(anyhow!("no permalink available")));
970 }
971 let file_path = file.worktree.read(cx).absolutize(&file.path);
972 return cx.spawn(async move |cx| {
973 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
974 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
975 .context("no permalink available")
976 });
977 };
978
979 let buffer_id = buffer.read(cx).remote_id();
980 let branch = repo.read(cx).branch.clone();
981 let remote = branch
982 .as_ref()
983 .and_then(|b| b.upstream.as_ref())
984 .and_then(|b| b.remote_name())
985 .unwrap_or("origin")
986 .to_string();
987
988 let rx = repo.update(cx, |repo, _| {
989 repo.send_job(None, move |state, cx| async move {
990 match state {
991 RepositoryState::Local { backend, .. } => {
992 let origin_url = backend
993 .remote_url(&remote)
994 .with_context(|| format!("remote \"{remote}\" not found"))?;
995
996 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
997
998 let provider_registry =
999 cx.update(GitHostingProviderRegistry::default_global)?;
1000
1001 let (provider, remote) =
1002 parse_git_remote_url(provider_registry, &origin_url)
1003 .context("parsing Git remote URL")?;
1004
1005 Ok(provider.build_permalink(
1006 remote,
1007 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1008 ))
1009 }
1010 RepositoryState::Remote { project_id, client } => {
1011 let response = client
1012 .request(proto::GetPermalinkToLine {
1013 project_id: project_id.to_proto(),
1014 buffer_id: buffer_id.into(),
1015 selection: Some(proto::Range {
1016 start: selection.start as u64,
1017 end: selection.end as u64,
1018 }),
1019 })
1020 .await?;
1021
1022 url::Url::parse(&response.permalink).context("failed to parse permalink")
1023 }
1024 }
1025 })
1026 });
1027 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1028 }
1029
1030 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1031 match &self.state {
1032 GitStoreState::Local {
1033 downstream: downstream_client,
1034 ..
1035 } => downstream_client
1036 .as_ref()
1037 .map(|state| (state.client.clone(), state.project_id)),
1038 GitStoreState::Remote {
1039 downstream: downstream_client,
1040 ..
1041 } => downstream_client.clone(),
1042 }
1043 }
1044
1045 fn upstream_client(&self) -> Option<AnyProtoClient> {
1046 match &self.state {
1047 GitStoreState::Local { .. } => None,
1048 GitStoreState::Remote {
1049 upstream_client, ..
1050 } => Some(upstream_client.clone()),
1051 }
1052 }
1053
1054 fn on_worktree_store_event(
1055 &mut self,
1056 worktree_store: Entity<WorktreeStore>,
1057 event: &WorktreeStoreEvent,
1058 cx: &mut Context<Self>,
1059 ) {
1060 let GitStoreState::Local {
1061 project_environment,
1062 downstream,
1063 next_repository_id,
1064 fs,
1065 } = &self.state
1066 else {
1067 return;
1068 };
1069
1070 match event {
1071 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1072 if let Some(worktree) = self
1073 .worktree_store
1074 .read(cx)
1075 .worktree_for_id(*worktree_id, cx)
1076 {
1077 let paths_by_git_repo =
1078 self.process_updated_entries(&worktree, updated_entries, cx);
1079 let downstream = downstream
1080 .as_ref()
1081 .map(|downstream| downstream.updates_tx.clone());
1082 cx.spawn(async move |_, cx| {
1083 let paths_by_git_repo = paths_by_git_repo.await;
1084 for (repo, paths) in paths_by_git_repo {
1085 repo.update(cx, |repo, cx| {
1086 repo.paths_changed(paths, downstream.clone(), cx);
1087 })
1088 .ok();
1089 }
1090 })
1091 .detach();
1092 }
1093 }
1094 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1095 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1096 else {
1097 return;
1098 };
1099 if !worktree.read(cx).is_visible() {
1100 log::debug!(
1101 "not adding repositories for local worktree {:?} because it's not visible",
1102 worktree.read(cx).abs_path()
1103 );
1104 return;
1105 }
1106 self.update_repositories_from_worktree(
1107 project_environment.clone(),
1108 next_repository_id.clone(),
1109 downstream
1110 .as_ref()
1111 .map(|downstream| downstream.updates_tx.clone()),
1112 changed_repos.clone(),
1113 fs.clone(),
1114 cx,
1115 );
1116 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1117 }
1118 _ => {}
1119 }
1120 }
1121 fn on_repository_event(
1122 &mut self,
1123 repo: Entity<Repository>,
1124 event: &RepositoryEvent,
1125 cx: &mut Context<Self>,
1126 ) {
1127 let id = repo.read(cx).id;
1128 let repo_snapshot = repo.read(cx).snapshot.clone();
1129 for (buffer_id, diff) in self.diffs.iter() {
1130 if let Some((buffer_repo, repo_path)) =
1131 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1132 && buffer_repo == repo
1133 {
1134 diff.update(cx, |diff, cx| {
1135 if let Some(conflict_set) = &diff.conflict_set {
1136 let conflict_status_changed =
1137 conflict_set.update(cx, |conflict_set, cx| {
1138 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1139 conflict_set.set_has_conflict(has_conflict, cx)
1140 })?;
1141 if conflict_status_changed {
1142 let buffer_store = self.buffer_store.read(cx);
1143 if let Some(buffer) = buffer_store.get(*buffer_id) {
1144 let _ = diff
1145 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1146 }
1147 }
1148 }
1149 anyhow::Ok(())
1150 })
1151 .ok();
1152 }
1153 }
1154 cx.emit(GitStoreEvent::RepositoryUpdated(
1155 id,
1156 event.clone(),
1157 self.active_repo_id == Some(id),
1158 ))
1159 }
1160
1161 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1162 cx.emit(GitStoreEvent::JobsUpdated)
1163 }
1164
1165 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1166 fn update_repositories_from_worktree(
1167 &mut self,
1168 project_environment: Entity<ProjectEnvironment>,
1169 next_repository_id: Arc<AtomicU64>,
1170 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1171 updated_git_repositories: UpdatedGitRepositoriesSet,
1172 fs: Arc<dyn Fs>,
1173 cx: &mut Context<Self>,
1174 ) {
1175 let mut removed_ids = Vec::new();
1176 for update in updated_git_repositories.iter() {
1177 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1178 let existing_work_directory_abs_path =
1179 repo.read(cx).work_directory_abs_path.clone();
1180 Some(&existing_work_directory_abs_path)
1181 == update.old_work_directory_abs_path.as_ref()
1182 || Some(&existing_work_directory_abs_path)
1183 == update.new_work_directory_abs_path.as_ref()
1184 }) {
1185 if let Some(new_work_directory_abs_path) =
1186 update.new_work_directory_abs_path.clone()
1187 {
1188 existing.update(cx, |existing, cx| {
1189 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1190 existing.schedule_scan(updates_tx.clone(), cx);
1191 });
1192 } else {
1193 removed_ids.push(*id);
1194 }
1195 } else if let UpdatedGitRepository {
1196 new_work_directory_abs_path: Some(work_directory_abs_path),
1197 dot_git_abs_path: Some(dot_git_abs_path),
1198 repository_dir_abs_path: Some(repository_dir_abs_path),
1199 common_dir_abs_path: Some(common_dir_abs_path),
1200 ..
1201 } = update
1202 {
1203 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1204 let git_store = cx.weak_entity();
1205 let repo = cx.new(|cx| {
1206 let mut repo = Repository::local(
1207 id,
1208 work_directory_abs_path.clone(),
1209 dot_git_abs_path.clone(),
1210 repository_dir_abs_path.clone(),
1211 common_dir_abs_path.clone(),
1212 project_environment.downgrade(),
1213 fs.clone(),
1214 git_store,
1215 cx,
1216 );
1217 repo.schedule_scan(updates_tx.clone(), cx);
1218 repo
1219 });
1220 self._subscriptions
1221 .push(cx.subscribe(&repo, Self::on_repository_event));
1222 self._subscriptions
1223 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1224 self.repositories.insert(id, repo);
1225 cx.emit(GitStoreEvent::RepositoryAdded);
1226 self.active_repo_id.get_or_insert_with(|| {
1227 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1228 id
1229 });
1230 }
1231 }
1232
1233 for id in removed_ids {
1234 if self.active_repo_id == Some(id) {
1235 self.active_repo_id = None;
1236 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1237 }
1238 self.repositories.remove(&id);
1239 if let Some(updates_tx) = updates_tx.as_ref() {
1240 updates_tx
1241 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1242 .ok();
1243 }
1244 }
1245 }
1246
1247 fn on_buffer_store_event(
1248 &mut self,
1249 _: Entity<BufferStore>,
1250 event: &BufferStoreEvent,
1251 cx: &mut Context<Self>,
1252 ) {
1253 match event {
1254 BufferStoreEvent::BufferAdded(buffer) => {
1255 cx.subscribe(buffer, |this, buffer, event, cx| {
1256 if let BufferEvent::LanguageChanged = event {
1257 let buffer_id = buffer.read(cx).remote_id();
1258 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1259 diff_state.update(cx, |diff_state, cx| {
1260 diff_state.buffer_language_changed(buffer, cx);
1261 });
1262 }
1263 }
1264 })
1265 .detach();
1266 }
1267 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1268 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1269 diffs.remove(buffer_id);
1270 }
1271 }
1272 BufferStoreEvent::BufferDropped(buffer_id) => {
1273 self.diffs.remove(buffer_id);
1274 for diffs in self.shared_diffs.values_mut() {
1275 diffs.remove(buffer_id);
1276 }
1277 }
1278
1279 _ => {}
1280 }
1281 }
1282
1283 pub fn recalculate_buffer_diffs(
1284 &mut self,
1285 buffers: Vec<Entity<Buffer>>,
1286 cx: &mut Context<Self>,
1287 ) -> impl Future<Output = ()> + use<> {
1288 let mut futures = Vec::new();
1289 for buffer in buffers {
1290 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1291 let buffer = buffer.read(cx).text_snapshot();
1292 diff_state.update(cx, |diff_state, cx| {
1293 diff_state.recalculate_diffs(buffer.clone(), cx);
1294 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1295 });
1296 futures.push(diff_state.update(cx, |diff_state, cx| {
1297 diff_state
1298 .reparse_conflict_markers(buffer, cx)
1299 .map(|_| {})
1300 .boxed()
1301 }));
1302 }
1303 }
1304 async move {
1305 futures::future::join_all(futures).await;
1306 }
1307 }
1308
1309 fn on_buffer_diff_event(
1310 &mut self,
1311 diff: Entity<buffer_diff::BufferDiff>,
1312 event: &BufferDiffEvent,
1313 cx: &mut Context<Self>,
1314 ) {
1315 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1316 let buffer_id = diff.read(cx).buffer_id;
1317 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1318 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1319 diff_state.hunk_staging_operation_count += 1;
1320 diff_state.hunk_staging_operation_count
1321 });
1322 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1323 let recv = repo.update(cx, |repo, cx| {
1324 log::debug!("hunks changed for {}", path.as_unix_str());
1325 repo.spawn_set_index_text_job(
1326 path,
1327 new_index_text.as_ref().map(|rope| rope.to_string()),
1328 Some(hunk_staging_operation_count),
1329 cx,
1330 )
1331 });
1332 let diff = diff.downgrade();
1333 cx.spawn(async move |this, cx| {
1334 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1335 diff.update(cx, |diff, cx| {
1336 diff.clear_pending_hunks(cx);
1337 })
1338 .ok();
1339 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1340 .ok();
1341 }
1342 })
1343 .detach();
1344 }
1345 }
1346 }
1347 }
1348
1349 fn local_worktree_git_repos_changed(
1350 &mut self,
1351 worktree: Entity<Worktree>,
1352 changed_repos: &UpdatedGitRepositoriesSet,
1353 cx: &mut Context<Self>,
1354 ) {
1355 log::debug!("local worktree repos changed");
1356 debug_assert!(worktree.read(cx).is_local());
1357
1358 for repository in self.repositories.values() {
1359 repository.update(cx, |repository, cx| {
1360 let repo_abs_path = &repository.work_directory_abs_path;
1361 if changed_repos.iter().any(|update| {
1362 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1363 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1364 }) {
1365 repository.reload_buffer_diff_bases(cx);
1366 }
1367 });
1368 }
1369 }
1370
1371 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1372 &self.repositories
1373 }
1374
1375 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1376 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1377 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1378 Some(status.status)
1379 }
1380
1381 pub fn repository_and_path_for_buffer_id(
1382 &self,
1383 buffer_id: BufferId,
1384 cx: &App,
1385 ) -> Option<(Entity<Repository>, RepoPath)> {
1386 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1387 let project_path = buffer.read(cx).project_path(cx)?;
1388 self.repository_and_path_for_project_path(&project_path, cx)
1389 }
1390
1391 pub fn repository_and_path_for_project_path(
1392 &self,
1393 path: &ProjectPath,
1394 cx: &App,
1395 ) -> Option<(Entity<Repository>, RepoPath)> {
1396 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1397 self.repositories
1398 .values()
1399 .filter_map(|repo| {
1400 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1401 Some((repo.clone(), repo_path))
1402 })
1403 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1404 }
1405
1406 pub fn git_init(
1407 &self,
1408 path: Arc<Path>,
1409 fallback_branch_name: String,
1410 cx: &App,
1411 ) -> Task<Result<()>> {
1412 match &self.state {
1413 GitStoreState::Local { fs, .. } => {
1414 let fs = fs.clone();
1415 cx.background_executor()
1416 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1417 }
1418 GitStoreState::Remote {
1419 upstream_client,
1420 upstream_project_id: project_id,
1421 ..
1422 } => {
1423 let client = upstream_client.clone();
1424 let project_id = *project_id;
1425 cx.background_executor().spawn(async move {
1426 client
1427 .request(proto::GitInit {
1428 project_id: project_id,
1429 abs_path: path.to_string_lossy().into_owned(),
1430 fallback_branch_name,
1431 })
1432 .await?;
1433 Ok(())
1434 })
1435 }
1436 }
1437 }
1438
1439 pub fn git_clone(
1440 &self,
1441 repo: String,
1442 path: impl Into<Arc<std::path::Path>>,
1443 cx: &App,
1444 ) -> Task<Result<()>> {
1445 let path = path.into();
1446 match &self.state {
1447 GitStoreState::Local { fs, .. } => {
1448 let fs = fs.clone();
1449 cx.background_executor()
1450 .spawn(async move { fs.git_clone(&repo, &path).await })
1451 }
1452 GitStoreState::Remote {
1453 upstream_client,
1454 upstream_project_id,
1455 ..
1456 } => {
1457 if upstream_client.is_via_collab() {
1458 return Task::ready(Err(anyhow!(
1459 "Git Clone isn't supported for project guests"
1460 )));
1461 }
1462 let request = upstream_client.request(proto::GitClone {
1463 project_id: *upstream_project_id,
1464 abs_path: path.to_string_lossy().into_owned(),
1465 remote_repo: repo,
1466 });
1467
1468 cx.background_spawn(async move {
1469 let result = request.await?;
1470
1471 match result.success {
1472 true => Ok(()),
1473 false => Err(anyhow!("Git Clone failed")),
1474 }
1475 })
1476 }
1477 }
1478 }
1479
1480 async fn handle_update_repository(
1481 this: Entity<Self>,
1482 envelope: TypedEnvelope<proto::UpdateRepository>,
1483 mut cx: AsyncApp,
1484 ) -> Result<()> {
1485 this.update(&mut cx, |this, cx| {
1486 let path_style = this.worktree_store.read(cx).path_style();
1487 let mut update = envelope.payload;
1488
1489 let id = RepositoryId::from_proto(update.id);
1490 let client = this.upstream_client().context("no upstream client")?;
1491
1492 let mut repo_subscription = None;
1493 let repo = this.repositories.entry(id).or_insert_with(|| {
1494 let git_store = cx.weak_entity();
1495 let repo = cx.new(|cx| {
1496 Repository::remote(
1497 id,
1498 Path::new(&update.abs_path).into(),
1499 path_style,
1500 ProjectId(update.project_id),
1501 client,
1502 git_store,
1503 cx,
1504 )
1505 });
1506 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1507 cx.emit(GitStoreEvent::RepositoryAdded);
1508 repo
1509 });
1510 this._subscriptions.extend(repo_subscription);
1511
1512 repo.update(cx, {
1513 let update = update.clone();
1514 |repo, cx| repo.apply_remote_update(update, cx)
1515 })?;
1516
1517 this.active_repo_id.get_or_insert_with(|| {
1518 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1519 id
1520 });
1521
1522 if let Some((client, project_id)) = this.downstream_client() {
1523 update.project_id = project_id.to_proto();
1524 client.send(update).log_err();
1525 }
1526 Ok(())
1527 })?
1528 }
1529
1530 async fn handle_remove_repository(
1531 this: Entity<Self>,
1532 envelope: TypedEnvelope<proto::RemoveRepository>,
1533 mut cx: AsyncApp,
1534 ) -> Result<()> {
1535 this.update(&mut cx, |this, cx| {
1536 let mut update = envelope.payload;
1537 let id = RepositoryId::from_proto(update.id);
1538 this.repositories.remove(&id);
1539 if let Some((client, project_id)) = this.downstream_client() {
1540 update.project_id = project_id.to_proto();
1541 client.send(update).log_err();
1542 }
1543 if this.active_repo_id == Some(id) {
1544 this.active_repo_id = None;
1545 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1546 }
1547 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1548 })
1549 }
1550
1551 async fn handle_git_init(
1552 this: Entity<Self>,
1553 envelope: TypedEnvelope<proto::GitInit>,
1554 cx: AsyncApp,
1555 ) -> Result<proto::Ack> {
1556 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1557 let name = envelope.payload.fallback_branch_name;
1558 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1559 .await?;
1560
1561 Ok(proto::Ack {})
1562 }
1563
1564 async fn handle_git_clone(
1565 this: Entity<Self>,
1566 envelope: TypedEnvelope<proto::GitClone>,
1567 cx: AsyncApp,
1568 ) -> Result<proto::GitCloneResponse> {
1569 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1570 let repo_name = envelope.payload.remote_repo;
1571 let result = cx
1572 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1573 .await;
1574
1575 Ok(proto::GitCloneResponse {
1576 success: result.is_ok(),
1577 })
1578 }
1579
1580 async fn handle_fetch(
1581 this: Entity<Self>,
1582 envelope: TypedEnvelope<proto::Fetch>,
1583 mut cx: AsyncApp,
1584 ) -> Result<proto::RemoteMessageResponse> {
1585 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1586 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1587 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1588 let askpass_id = envelope.payload.askpass_id;
1589
1590 let askpass = make_remote_delegate(
1591 this,
1592 envelope.payload.project_id,
1593 repository_id,
1594 askpass_id,
1595 &mut cx,
1596 );
1597
1598 let remote_output = repository_handle
1599 .update(&mut cx, |repository_handle, cx| {
1600 repository_handle.fetch(fetch_options, askpass, cx)
1601 })?
1602 .await??;
1603
1604 Ok(proto::RemoteMessageResponse {
1605 stdout: remote_output.stdout,
1606 stderr: remote_output.stderr,
1607 })
1608 }
1609
1610 async fn handle_push(
1611 this: Entity<Self>,
1612 envelope: TypedEnvelope<proto::Push>,
1613 mut cx: AsyncApp,
1614 ) -> Result<proto::RemoteMessageResponse> {
1615 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1616 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1617
1618 let askpass_id = envelope.payload.askpass_id;
1619 let askpass = make_remote_delegate(
1620 this,
1621 envelope.payload.project_id,
1622 repository_id,
1623 askpass_id,
1624 &mut cx,
1625 );
1626
1627 let options = envelope
1628 .payload
1629 .options
1630 .as_ref()
1631 .map(|_| match envelope.payload.options() {
1632 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1633 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1634 });
1635
1636 let branch_name = envelope.payload.branch_name.into();
1637 let remote_name = envelope.payload.remote_name.into();
1638
1639 let remote_output = repository_handle
1640 .update(&mut cx, |repository_handle, cx| {
1641 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1642 })?
1643 .await??;
1644 Ok(proto::RemoteMessageResponse {
1645 stdout: remote_output.stdout,
1646 stderr: remote_output.stderr,
1647 })
1648 }
1649
1650 async fn handle_pull(
1651 this: Entity<Self>,
1652 envelope: TypedEnvelope<proto::Pull>,
1653 mut cx: AsyncApp,
1654 ) -> Result<proto::RemoteMessageResponse> {
1655 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1656 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1657 let askpass_id = envelope.payload.askpass_id;
1658 let askpass = make_remote_delegate(
1659 this,
1660 envelope.payload.project_id,
1661 repository_id,
1662 askpass_id,
1663 &mut cx,
1664 );
1665
1666 let branch_name = envelope.payload.branch_name.into();
1667 let remote_name = envelope.payload.remote_name.into();
1668
1669 let remote_message = repository_handle
1670 .update(&mut cx, |repository_handle, cx| {
1671 repository_handle.pull(branch_name, remote_name, askpass, cx)
1672 })?
1673 .await??;
1674
1675 Ok(proto::RemoteMessageResponse {
1676 stdout: remote_message.stdout,
1677 stderr: remote_message.stderr,
1678 })
1679 }
1680
1681 async fn handle_stage(
1682 this: Entity<Self>,
1683 envelope: TypedEnvelope<proto::Stage>,
1684 mut cx: AsyncApp,
1685 ) -> Result<proto::Ack> {
1686 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1687 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1688
1689 let entries = envelope
1690 .payload
1691 .paths
1692 .into_iter()
1693 .map(|path| RepoPath::new(&path))
1694 .collect::<Result<Vec<_>>>()?;
1695
1696 repository_handle
1697 .update(&mut cx, |repository_handle, cx| {
1698 repository_handle.stage_entries(entries, cx)
1699 })?
1700 .await?;
1701 Ok(proto::Ack {})
1702 }
1703
1704 async fn handle_unstage(
1705 this: Entity<Self>,
1706 envelope: TypedEnvelope<proto::Unstage>,
1707 mut cx: AsyncApp,
1708 ) -> Result<proto::Ack> {
1709 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1710 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1711
1712 let entries = envelope
1713 .payload
1714 .paths
1715 .into_iter()
1716 .map(|path| RepoPath::new(&path))
1717 .collect::<Result<Vec<_>>>()?;
1718
1719 repository_handle
1720 .update(&mut cx, |repository_handle, cx| {
1721 repository_handle.unstage_entries(entries, cx)
1722 })?
1723 .await?;
1724
1725 Ok(proto::Ack {})
1726 }
1727
1728 async fn handle_stash(
1729 this: Entity<Self>,
1730 envelope: TypedEnvelope<proto::Stash>,
1731 mut cx: AsyncApp,
1732 ) -> Result<proto::Ack> {
1733 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1734 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1735
1736 let entries = envelope
1737 .payload
1738 .paths
1739 .into_iter()
1740 .map(|path| RepoPath::new(&path))
1741 .collect::<Result<Vec<_>>>()?;
1742
1743 repository_handle
1744 .update(&mut cx, |repository_handle, cx| {
1745 repository_handle.stash_entries(entries, cx)
1746 })?
1747 .await?;
1748
1749 Ok(proto::Ack {})
1750 }
1751
1752 async fn handle_stash_pop(
1753 this: Entity<Self>,
1754 envelope: TypedEnvelope<proto::StashPop>,
1755 mut cx: AsyncApp,
1756 ) -> Result<proto::Ack> {
1757 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1758 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1759 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1760
1761 repository_handle
1762 .update(&mut cx, |repository_handle, cx| {
1763 repository_handle.stash_pop(stash_index, cx)
1764 })?
1765 .await?;
1766
1767 Ok(proto::Ack {})
1768 }
1769
1770 async fn handle_stash_apply(
1771 this: Entity<Self>,
1772 envelope: TypedEnvelope<proto::StashApply>,
1773 mut cx: AsyncApp,
1774 ) -> Result<proto::Ack> {
1775 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1776 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1777 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1778
1779 repository_handle
1780 .update(&mut cx, |repository_handle, cx| {
1781 repository_handle.stash_apply(stash_index, cx)
1782 })?
1783 .await?;
1784
1785 Ok(proto::Ack {})
1786 }
1787
1788 async fn handle_stash_drop(
1789 this: Entity<Self>,
1790 envelope: TypedEnvelope<proto::StashDrop>,
1791 mut cx: AsyncApp,
1792 ) -> Result<proto::Ack> {
1793 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1794 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1795 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1796
1797 repository_handle
1798 .update(&mut cx, |repository_handle, cx| {
1799 repository_handle.stash_drop(stash_index, cx)
1800 })?
1801 .await??;
1802
1803 Ok(proto::Ack {})
1804 }
1805
1806 async fn handle_set_index_text(
1807 this: Entity<Self>,
1808 envelope: TypedEnvelope<proto::SetIndexText>,
1809 mut cx: AsyncApp,
1810 ) -> Result<proto::Ack> {
1811 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1812 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1813 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1814
1815 repository_handle
1816 .update(&mut cx, |repository_handle, cx| {
1817 repository_handle.spawn_set_index_text_job(
1818 repo_path,
1819 envelope.payload.text,
1820 None,
1821 cx,
1822 )
1823 })?
1824 .await??;
1825 Ok(proto::Ack {})
1826 }
1827
1828 async fn handle_commit(
1829 this: Entity<Self>,
1830 envelope: TypedEnvelope<proto::Commit>,
1831 mut cx: AsyncApp,
1832 ) -> Result<proto::Ack> {
1833 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1834 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1835
1836 let message = SharedString::from(envelope.payload.message);
1837 let name = envelope.payload.name.map(SharedString::from);
1838 let email = envelope.payload.email.map(SharedString::from);
1839 let options = envelope.payload.options.unwrap_or_default();
1840
1841 repository_handle
1842 .update(&mut cx, |repository_handle, cx| {
1843 repository_handle.commit(
1844 message,
1845 name.zip(email),
1846 CommitOptions {
1847 amend: options.amend,
1848 signoff: options.signoff,
1849 },
1850 cx,
1851 )
1852 })?
1853 .await??;
1854 Ok(proto::Ack {})
1855 }
1856
1857 async fn handle_get_remotes(
1858 this: Entity<Self>,
1859 envelope: TypedEnvelope<proto::GetRemotes>,
1860 mut cx: AsyncApp,
1861 ) -> Result<proto::GetRemotesResponse> {
1862 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1863 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1864
1865 let branch_name = envelope.payload.branch_name;
1866
1867 let remotes = repository_handle
1868 .update(&mut cx, |repository_handle, _| {
1869 repository_handle.get_remotes(branch_name)
1870 })?
1871 .await??;
1872
1873 Ok(proto::GetRemotesResponse {
1874 remotes: remotes
1875 .into_iter()
1876 .map(|remotes| proto::get_remotes_response::Remote {
1877 name: remotes.name.to_string(),
1878 })
1879 .collect::<Vec<_>>(),
1880 })
1881 }
1882
1883 async fn handle_get_branches(
1884 this: Entity<Self>,
1885 envelope: TypedEnvelope<proto::GitGetBranches>,
1886 mut cx: AsyncApp,
1887 ) -> Result<proto::GitBranchesResponse> {
1888 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1889 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1890
1891 let branches = repository_handle
1892 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1893 .await??;
1894
1895 Ok(proto::GitBranchesResponse {
1896 branches: branches
1897 .into_iter()
1898 .map(|branch| branch_to_proto(&branch))
1899 .collect::<Vec<_>>(),
1900 })
1901 }
1902 async fn handle_get_default_branch(
1903 this: Entity<Self>,
1904 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1905 mut cx: AsyncApp,
1906 ) -> Result<proto::GetDefaultBranchResponse> {
1907 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1908 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1909
1910 let branch = repository_handle
1911 .update(&mut cx, |repository_handle, _| {
1912 repository_handle.default_branch()
1913 })?
1914 .await??
1915 .map(Into::into);
1916
1917 Ok(proto::GetDefaultBranchResponse { branch })
1918 }
1919 async fn handle_create_branch(
1920 this: Entity<Self>,
1921 envelope: TypedEnvelope<proto::GitCreateBranch>,
1922 mut cx: AsyncApp,
1923 ) -> Result<proto::Ack> {
1924 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1925 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1926 let branch_name = envelope.payload.branch_name;
1927
1928 repository_handle
1929 .update(&mut cx, |repository_handle, _| {
1930 repository_handle.create_branch(branch_name)
1931 })?
1932 .await??;
1933
1934 Ok(proto::Ack {})
1935 }
1936
1937 async fn handle_change_branch(
1938 this: Entity<Self>,
1939 envelope: TypedEnvelope<proto::GitChangeBranch>,
1940 mut cx: AsyncApp,
1941 ) -> Result<proto::Ack> {
1942 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1943 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1944 let branch_name = envelope.payload.branch_name;
1945
1946 repository_handle
1947 .update(&mut cx, |repository_handle, _| {
1948 repository_handle.change_branch(branch_name)
1949 })?
1950 .await??;
1951
1952 Ok(proto::Ack {})
1953 }
1954
1955 async fn handle_rename_branch(
1956 this: Entity<Self>,
1957 envelope: TypedEnvelope<proto::GitRenameBranch>,
1958 mut cx: AsyncApp,
1959 ) -> Result<proto::Ack> {
1960 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1961 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1962 let branch = envelope.payload.branch;
1963 let new_name = envelope.payload.new_name;
1964
1965 repository_handle
1966 .update(&mut cx, |repository_handle, _| {
1967 repository_handle.rename_branch(branch, new_name)
1968 })?
1969 .await??;
1970
1971 Ok(proto::Ack {})
1972 }
1973
1974 async fn handle_show(
1975 this: Entity<Self>,
1976 envelope: TypedEnvelope<proto::GitShow>,
1977 mut cx: AsyncApp,
1978 ) -> Result<proto::GitCommitDetails> {
1979 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1980 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1981
1982 let commit = repository_handle
1983 .update(&mut cx, |repository_handle, _| {
1984 repository_handle.show(envelope.payload.commit)
1985 })?
1986 .await??;
1987 Ok(proto::GitCommitDetails {
1988 sha: commit.sha.into(),
1989 message: commit.message.into(),
1990 commit_timestamp: commit.commit_timestamp,
1991 author_email: commit.author_email.into(),
1992 author_name: commit.author_name.into(),
1993 })
1994 }
1995
1996 async fn handle_load_commit_diff(
1997 this: Entity<Self>,
1998 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1999 mut cx: AsyncApp,
2000 ) -> Result<proto::LoadCommitDiffResponse> {
2001 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2002 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2003
2004 let commit_diff = repository_handle
2005 .update(&mut cx, |repository_handle, _| {
2006 repository_handle.load_commit_diff(envelope.payload.commit)
2007 })?
2008 .await??;
2009 Ok(proto::LoadCommitDiffResponse {
2010 files: commit_diff
2011 .files
2012 .into_iter()
2013 .map(|file| proto::CommitFile {
2014 path: file.path.to_proto(),
2015 old_text: file.old_text,
2016 new_text: file.new_text,
2017 })
2018 .collect(),
2019 })
2020 }
2021
2022 async fn handle_reset(
2023 this: Entity<Self>,
2024 envelope: TypedEnvelope<proto::GitReset>,
2025 mut cx: AsyncApp,
2026 ) -> Result<proto::Ack> {
2027 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2028 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2029
2030 let mode = match envelope.payload.mode() {
2031 git_reset::ResetMode::Soft => ResetMode::Soft,
2032 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2033 };
2034
2035 repository_handle
2036 .update(&mut cx, |repository_handle, cx| {
2037 repository_handle.reset(envelope.payload.commit, mode, cx)
2038 })?
2039 .await??;
2040 Ok(proto::Ack {})
2041 }
2042
2043 async fn handle_checkout_files(
2044 this: Entity<Self>,
2045 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2046 mut cx: AsyncApp,
2047 ) -> Result<proto::Ack> {
2048 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2049 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2050 let paths = envelope
2051 .payload
2052 .paths
2053 .iter()
2054 .map(|s| RepoPath::from_proto(s))
2055 .collect::<Result<Vec<_>>>()?;
2056
2057 repository_handle
2058 .update(&mut cx, |repository_handle, cx| {
2059 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2060 })?
2061 .await??;
2062 Ok(proto::Ack {})
2063 }
2064
2065 async fn handle_open_commit_message_buffer(
2066 this: Entity<Self>,
2067 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2068 mut cx: AsyncApp,
2069 ) -> Result<proto::OpenBufferResponse> {
2070 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2071 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2072 let buffer = repository
2073 .update(&mut cx, |repository, cx| {
2074 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2075 })?
2076 .await?;
2077
2078 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2079 this.update(&mut cx, |this, cx| {
2080 this.buffer_store.update(cx, |buffer_store, cx| {
2081 buffer_store
2082 .create_buffer_for_peer(
2083 &buffer,
2084 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2085 cx,
2086 )
2087 .detach_and_log_err(cx);
2088 })
2089 })?;
2090
2091 Ok(proto::OpenBufferResponse {
2092 buffer_id: buffer_id.to_proto(),
2093 })
2094 }
2095
2096 async fn handle_askpass(
2097 this: Entity<Self>,
2098 envelope: TypedEnvelope<proto::AskPassRequest>,
2099 mut cx: AsyncApp,
2100 ) -> Result<proto::AskPassResponse> {
2101 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2102 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2103
2104 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2105 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2106 debug_panic!("no askpass found");
2107 anyhow::bail!("no askpass found");
2108 };
2109
2110 let response = askpass
2111 .ask_password(envelope.payload.prompt)
2112 .await
2113 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2114
2115 delegates
2116 .lock()
2117 .insert(envelope.payload.askpass_id, askpass);
2118
2119 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2120 Ok(proto::AskPassResponse {
2121 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2122 })
2123 }
2124
2125 async fn handle_check_for_pushed_commits(
2126 this: Entity<Self>,
2127 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2128 mut cx: AsyncApp,
2129 ) -> Result<proto::CheckForPushedCommitsResponse> {
2130 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2131 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2132
2133 let branches = repository_handle
2134 .update(&mut cx, |repository_handle, _| {
2135 repository_handle.check_for_pushed_commits()
2136 })?
2137 .await??;
2138 Ok(proto::CheckForPushedCommitsResponse {
2139 pushed_to: branches
2140 .into_iter()
2141 .map(|commit| commit.to_string())
2142 .collect(),
2143 })
2144 }
2145
2146 async fn handle_git_diff(
2147 this: Entity<Self>,
2148 envelope: TypedEnvelope<proto::GitDiff>,
2149 mut cx: AsyncApp,
2150 ) -> Result<proto::GitDiffResponse> {
2151 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2152 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2153 let diff_type = match envelope.payload.diff_type() {
2154 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2155 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2156 };
2157
2158 let mut diff = repository_handle
2159 .update(&mut cx, |repository_handle, cx| {
2160 repository_handle.diff(diff_type, cx)
2161 })?
2162 .await??;
2163 const ONE_MB: usize = 1_000_000;
2164 if diff.len() > ONE_MB {
2165 diff = diff.chars().take(ONE_MB).collect()
2166 }
2167
2168 Ok(proto::GitDiffResponse { diff })
2169 }
2170
2171 async fn handle_open_unstaged_diff(
2172 this: Entity<Self>,
2173 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2174 mut cx: AsyncApp,
2175 ) -> Result<proto::OpenUnstagedDiffResponse> {
2176 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2177 let diff = this
2178 .update(&mut cx, |this, cx| {
2179 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2180 Some(this.open_unstaged_diff(buffer, cx))
2181 })?
2182 .context("missing buffer")?
2183 .await?;
2184 this.update(&mut cx, |this, _| {
2185 let shared_diffs = this
2186 .shared_diffs
2187 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2188 .or_default();
2189 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2190 })?;
2191 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2192 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2193 }
2194
2195 async fn handle_open_uncommitted_diff(
2196 this: Entity<Self>,
2197 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2198 mut cx: AsyncApp,
2199 ) -> Result<proto::OpenUncommittedDiffResponse> {
2200 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2201 let diff = this
2202 .update(&mut cx, |this, cx| {
2203 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2204 Some(this.open_uncommitted_diff(buffer, cx))
2205 })?
2206 .context("missing buffer")?
2207 .await?;
2208 this.update(&mut cx, |this, _| {
2209 let shared_diffs = this
2210 .shared_diffs
2211 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2212 .or_default();
2213 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2214 })?;
2215 diff.read_with(&cx, |diff, cx| {
2216 use proto::open_uncommitted_diff_response::Mode;
2217
2218 let unstaged_diff = diff.secondary_diff();
2219 let index_snapshot = unstaged_diff.and_then(|diff| {
2220 let diff = diff.read(cx);
2221 diff.base_text_exists().then(|| diff.base_text())
2222 });
2223
2224 let mode;
2225 let staged_text;
2226 let committed_text;
2227 if diff.base_text_exists() {
2228 let committed_snapshot = diff.base_text();
2229 committed_text = Some(committed_snapshot.text());
2230 if let Some(index_text) = index_snapshot {
2231 if index_text.remote_id() == committed_snapshot.remote_id() {
2232 mode = Mode::IndexMatchesHead;
2233 staged_text = None;
2234 } else {
2235 mode = Mode::IndexAndHead;
2236 staged_text = Some(index_text.text());
2237 }
2238 } else {
2239 mode = Mode::IndexAndHead;
2240 staged_text = None;
2241 }
2242 } else {
2243 mode = Mode::IndexAndHead;
2244 committed_text = None;
2245 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2246 }
2247
2248 proto::OpenUncommittedDiffResponse {
2249 committed_text,
2250 staged_text,
2251 mode: mode.into(),
2252 }
2253 })
2254 }
2255
2256 async fn handle_update_diff_bases(
2257 this: Entity<Self>,
2258 request: TypedEnvelope<proto::UpdateDiffBases>,
2259 mut cx: AsyncApp,
2260 ) -> Result<()> {
2261 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2262 this.update(&mut cx, |this, cx| {
2263 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2264 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2265 {
2266 let buffer = buffer.read(cx).text_snapshot();
2267 diff_state.update(cx, |diff_state, cx| {
2268 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2269 })
2270 }
2271 })
2272 }
2273
2274 async fn handle_blame_buffer(
2275 this: Entity<Self>,
2276 envelope: TypedEnvelope<proto::BlameBuffer>,
2277 mut cx: AsyncApp,
2278 ) -> Result<proto::BlameBufferResponse> {
2279 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2280 let version = deserialize_version(&envelope.payload.version);
2281 let buffer = this.read_with(&cx, |this, cx| {
2282 this.buffer_store.read(cx).get_existing(buffer_id)
2283 })??;
2284 buffer
2285 .update(&mut cx, |buffer, _| {
2286 buffer.wait_for_version(version.clone())
2287 })?
2288 .await?;
2289 let blame = this
2290 .update(&mut cx, |this, cx| {
2291 this.blame_buffer(&buffer, Some(version), cx)
2292 })?
2293 .await?;
2294 Ok(serialize_blame_buffer_response(blame))
2295 }
2296
2297 async fn handle_get_permalink_to_line(
2298 this: Entity<Self>,
2299 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2300 mut cx: AsyncApp,
2301 ) -> Result<proto::GetPermalinkToLineResponse> {
2302 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2303 // let version = deserialize_version(&envelope.payload.version);
2304 let selection = {
2305 let proto_selection = envelope
2306 .payload
2307 .selection
2308 .context("no selection to get permalink for defined")?;
2309 proto_selection.start as u32..proto_selection.end as u32
2310 };
2311 let buffer = this.read_with(&cx, |this, cx| {
2312 this.buffer_store.read(cx).get_existing(buffer_id)
2313 })??;
2314 let permalink = this
2315 .update(&mut cx, |this, cx| {
2316 this.get_permalink_to_line(&buffer, selection, cx)
2317 })?
2318 .await?;
2319 Ok(proto::GetPermalinkToLineResponse {
2320 permalink: permalink.to_string(),
2321 })
2322 }
2323
2324 fn repository_for_request(
2325 this: &Entity<Self>,
2326 id: RepositoryId,
2327 cx: &mut AsyncApp,
2328 ) -> Result<Entity<Repository>> {
2329 this.read_with(cx, |this, _| {
2330 this.repositories
2331 .get(&id)
2332 .context("missing repository handle")
2333 .cloned()
2334 })?
2335 }
2336
2337 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2338 self.repositories
2339 .iter()
2340 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2341 .collect()
2342 }
2343
2344 fn process_updated_entries(
2345 &self,
2346 worktree: &Entity<Worktree>,
2347 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2348 cx: &mut App,
2349 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2350 let path_style = worktree.read(cx).path_style();
2351 let mut repo_paths = self
2352 .repositories
2353 .values()
2354 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2355 .collect::<Vec<_>>();
2356 let mut entries: Vec<_> = updated_entries
2357 .iter()
2358 .map(|(path, _, _)| path.clone())
2359 .collect();
2360 entries.sort();
2361 let worktree = worktree.read(cx);
2362
2363 let entries = entries
2364 .into_iter()
2365 .map(|path| worktree.absolutize(&path))
2366 .collect::<Arc<[_]>>();
2367
2368 let executor = cx.background_executor().clone();
2369 cx.background_executor().spawn(async move {
2370 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2371 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2372 let mut tasks = FuturesOrdered::new();
2373 for (repo_path, repo) in repo_paths.into_iter().rev() {
2374 let entries = entries.clone();
2375 let task = executor.spawn(async move {
2376 // Find all repository paths that belong to this repo
2377 let mut ix = entries.partition_point(|path| path < &*repo_path);
2378 if ix == entries.len() {
2379 return None;
2380 };
2381
2382 let mut paths = Vec::new();
2383 // All paths prefixed by a given repo will constitute a continuous range.
2384 while let Some(path) = entries.get(ix)
2385 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2386 &repo_path, path, path_style,
2387 )
2388 {
2389 paths.push((repo_path, ix));
2390 ix += 1;
2391 }
2392 if paths.is_empty() {
2393 None
2394 } else {
2395 Some((repo, paths))
2396 }
2397 });
2398 tasks.push_back(task);
2399 }
2400
2401 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2402 let mut path_was_used = vec![false; entries.len()];
2403 let tasks = tasks.collect::<Vec<_>>().await;
2404 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2405 // We always want to assign a path to it's innermost repository.
2406 for t in tasks {
2407 let Some((repo, paths)) = t else {
2408 continue;
2409 };
2410 let entry = paths_by_git_repo.entry(repo).or_default();
2411 for (repo_path, ix) in paths {
2412 if path_was_used[ix] {
2413 continue;
2414 }
2415 path_was_used[ix] = true;
2416 entry.push(repo_path);
2417 }
2418 }
2419
2420 paths_by_git_repo
2421 })
2422 }
2423}
2424
2425impl BufferGitState {
2426 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2427 Self {
2428 unstaged_diff: Default::default(),
2429 uncommitted_diff: Default::default(),
2430 recalculate_diff_task: Default::default(),
2431 language: Default::default(),
2432 language_registry: Default::default(),
2433 recalculating_tx: postage::watch::channel_with(false).0,
2434 hunk_staging_operation_count: 0,
2435 hunk_staging_operation_count_as_of_write: 0,
2436 head_text: Default::default(),
2437 index_text: Default::default(),
2438 head_changed: Default::default(),
2439 index_changed: Default::default(),
2440 language_changed: Default::default(),
2441 conflict_updated_futures: Default::default(),
2442 conflict_set: Default::default(),
2443 reparse_conflict_markers_task: Default::default(),
2444 }
2445 }
2446
2447 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2448 self.language = buffer.read(cx).language().cloned();
2449 self.language_changed = true;
2450 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2451 }
2452
2453 fn reparse_conflict_markers(
2454 &mut self,
2455 buffer: text::BufferSnapshot,
2456 cx: &mut Context<Self>,
2457 ) -> oneshot::Receiver<()> {
2458 let (tx, rx) = oneshot::channel();
2459
2460 let Some(conflict_set) = self
2461 .conflict_set
2462 .as_ref()
2463 .and_then(|conflict_set| conflict_set.upgrade())
2464 else {
2465 return rx;
2466 };
2467
2468 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2469 if conflict_set.has_conflict {
2470 Some(conflict_set.snapshot())
2471 } else {
2472 None
2473 }
2474 });
2475
2476 if let Some(old_snapshot) = old_snapshot {
2477 self.conflict_updated_futures.push(tx);
2478 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2479 let (snapshot, changed_range) = cx
2480 .background_spawn(async move {
2481 let new_snapshot = ConflictSet::parse(&buffer);
2482 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2483 (new_snapshot, changed_range)
2484 })
2485 .await;
2486 this.update(cx, |this, cx| {
2487 if let Some(conflict_set) = &this.conflict_set {
2488 conflict_set
2489 .update(cx, |conflict_set, cx| {
2490 conflict_set.set_snapshot(snapshot, changed_range, cx);
2491 })
2492 .ok();
2493 }
2494 let futures = std::mem::take(&mut this.conflict_updated_futures);
2495 for tx in futures {
2496 tx.send(()).ok();
2497 }
2498 })
2499 }))
2500 }
2501
2502 rx
2503 }
2504
2505 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2506 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2507 }
2508
2509 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2510 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2511 }
2512
2513 fn handle_base_texts_updated(
2514 &mut self,
2515 buffer: text::BufferSnapshot,
2516 message: proto::UpdateDiffBases,
2517 cx: &mut Context<Self>,
2518 ) {
2519 use proto::update_diff_bases::Mode;
2520
2521 let Some(mode) = Mode::from_i32(message.mode) else {
2522 return;
2523 };
2524
2525 let diff_bases_change = match mode {
2526 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2527 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2528 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2529 Mode::IndexAndHead => DiffBasesChange::SetEach {
2530 index: message.staged_text,
2531 head: message.committed_text,
2532 },
2533 };
2534
2535 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2536 }
2537
2538 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2539 if *self.recalculating_tx.borrow() {
2540 let mut rx = self.recalculating_tx.subscribe();
2541 Some(async move {
2542 loop {
2543 let is_recalculating = rx.recv().await;
2544 if is_recalculating != Some(true) {
2545 break;
2546 }
2547 }
2548 })
2549 } else {
2550 None
2551 }
2552 }
2553
2554 fn diff_bases_changed(
2555 &mut self,
2556 buffer: text::BufferSnapshot,
2557 diff_bases_change: Option<DiffBasesChange>,
2558 cx: &mut Context<Self>,
2559 ) {
2560 match diff_bases_change {
2561 Some(DiffBasesChange::SetIndex(index)) => {
2562 self.index_text = index.map(|mut index| {
2563 text::LineEnding::normalize(&mut index);
2564 Arc::new(index)
2565 });
2566 self.index_changed = true;
2567 }
2568 Some(DiffBasesChange::SetHead(head)) => {
2569 self.head_text = head.map(|mut head| {
2570 text::LineEnding::normalize(&mut head);
2571 Arc::new(head)
2572 });
2573 self.head_changed = true;
2574 }
2575 Some(DiffBasesChange::SetBoth(text)) => {
2576 let text = text.map(|mut text| {
2577 text::LineEnding::normalize(&mut text);
2578 Arc::new(text)
2579 });
2580 self.head_text = text.clone();
2581 self.index_text = text;
2582 self.head_changed = true;
2583 self.index_changed = true;
2584 }
2585 Some(DiffBasesChange::SetEach { index, head }) => {
2586 self.index_text = index.map(|mut index| {
2587 text::LineEnding::normalize(&mut index);
2588 Arc::new(index)
2589 });
2590 self.index_changed = true;
2591 self.head_text = head.map(|mut head| {
2592 text::LineEnding::normalize(&mut head);
2593 Arc::new(head)
2594 });
2595 self.head_changed = true;
2596 }
2597 None => {}
2598 }
2599
2600 self.recalculate_diffs(buffer, cx)
2601 }
2602
2603 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2604 *self.recalculating_tx.borrow_mut() = true;
2605
2606 let language = self.language.clone();
2607 let language_registry = self.language_registry.clone();
2608 let unstaged_diff = self.unstaged_diff();
2609 let uncommitted_diff = self.uncommitted_diff();
2610 let head = self.head_text.clone();
2611 let index = self.index_text.clone();
2612 let index_changed = self.index_changed;
2613 let head_changed = self.head_changed;
2614 let language_changed = self.language_changed;
2615 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2616 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2617 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2618 (None, None) => true,
2619 _ => false,
2620 };
2621 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2622 log::debug!(
2623 "start recalculating diffs for buffer {}",
2624 buffer.remote_id()
2625 );
2626
2627 let mut new_unstaged_diff = None;
2628 if let Some(unstaged_diff) = &unstaged_diff {
2629 new_unstaged_diff = Some(
2630 BufferDiff::update_diff(
2631 unstaged_diff.clone(),
2632 buffer.clone(),
2633 index,
2634 index_changed,
2635 language_changed,
2636 language.clone(),
2637 language_registry.clone(),
2638 cx,
2639 )
2640 .await?,
2641 );
2642 }
2643
2644 let mut new_uncommitted_diff = None;
2645 if let Some(uncommitted_diff) = &uncommitted_diff {
2646 new_uncommitted_diff = if index_matches_head {
2647 new_unstaged_diff.clone()
2648 } else {
2649 Some(
2650 BufferDiff::update_diff(
2651 uncommitted_diff.clone(),
2652 buffer.clone(),
2653 head,
2654 head_changed,
2655 language_changed,
2656 language.clone(),
2657 language_registry.clone(),
2658 cx,
2659 )
2660 .await?,
2661 )
2662 }
2663 }
2664
2665 let cancel = this.update(cx, |this, _| {
2666 // This checks whether all pending stage/unstage operations
2667 // have quiesced (i.e. both the corresponding write and the
2668 // read of that write have completed). If not, then we cancel
2669 // this recalculation attempt to avoid invalidating pending
2670 // state too quickly; another recalculation will come along
2671 // later and clear the pending state once the state of the index has settled.
2672 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2673 *this.recalculating_tx.borrow_mut() = false;
2674 true
2675 } else {
2676 false
2677 }
2678 })?;
2679 if cancel {
2680 log::debug!(
2681 concat!(
2682 "aborting recalculating diffs for buffer {}",
2683 "due to subsequent hunk operations",
2684 ),
2685 buffer.remote_id()
2686 );
2687 return Ok(());
2688 }
2689
2690 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2691 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2692 {
2693 unstaged_diff.update(cx, |diff, cx| {
2694 if language_changed {
2695 diff.language_changed(cx);
2696 }
2697 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2698 })?
2699 } else {
2700 None
2701 };
2702
2703 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2704 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2705 {
2706 uncommitted_diff.update(cx, |diff, cx| {
2707 if language_changed {
2708 diff.language_changed(cx);
2709 }
2710 diff.set_snapshot_with_secondary(
2711 new_uncommitted_diff,
2712 &buffer,
2713 unstaged_changed_range,
2714 true,
2715 cx,
2716 );
2717 })?;
2718 }
2719
2720 log::debug!(
2721 "finished recalculating diffs for buffer {}",
2722 buffer.remote_id()
2723 );
2724
2725 if let Some(this) = this.upgrade() {
2726 this.update(cx, |this, _| {
2727 this.index_changed = false;
2728 this.head_changed = false;
2729 this.language_changed = false;
2730 *this.recalculating_tx.borrow_mut() = false;
2731 })?;
2732 }
2733
2734 Ok(())
2735 }));
2736 }
2737}
2738
2739fn make_remote_delegate(
2740 this: Entity<GitStore>,
2741 project_id: u64,
2742 repository_id: RepositoryId,
2743 askpass_id: u64,
2744 cx: &mut AsyncApp,
2745) -> AskPassDelegate {
2746 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2747 this.update(cx, |this, cx| {
2748 let Some((client, _)) = this.downstream_client() else {
2749 return;
2750 };
2751 let response = client.request(proto::AskPassRequest {
2752 project_id,
2753 repository_id: repository_id.to_proto(),
2754 askpass_id,
2755 prompt,
2756 });
2757 cx.spawn(async move |_, _| {
2758 let mut response = response.await?.response;
2759 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2760 .ok();
2761 response.zeroize();
2762 anyhow::Ok(())
2763 })
2764 .detach_and_log_err(cx);
2765 })
2766 .log_err();
2767 })
2768}
2769
2770impl RepositoryId {
2771 pub fn to_proto(self) -> u64 {
2772 self.0
2773 }
2774
2775 pub fn from_proto(id: u64) -> Self {
2776 RepositoryId(id)
2777 }
2778}
2779
2780impl RepositorySnapshot {
2781 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2782 Self {
2783 id,
2784 statuses_by_path: Default::default(),
2785 work_directory_abs_path,
2786 branch: None,
2787 head_commit: None,
2788 scan_id: 0,
2789 merge: Default::default(),
2790 remote_origin_url: None,
2791 remote_upstream_url: None,
2792 stash_entries: Default::default(),
2793 path_style,
2794 }
2795 }
2796
2797 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2798 proto::UpdateRepository {
2799 branch_summary: self.branch.as_ref().map(branch_to_proto),
2800 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2801 updated_statuses: self
2802 .statuses_by_path
2803 .iter()
2804 .map(|entry| entry.to_proto())
2805 .collect(),
2806 removed_statuses: Default::default(),
2807 current_merge_conflicts: self
2808 .merge
2809 .conflicted_paths
2810 .iter()
2811 .map(|repo_path| repo_path.to_proto())
2812 .collect(),
2813 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2814 project_id,
2815 id: self.id.to_proto(),
2816 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2817 entry_ids: vec![self.id.to_proto()],
2818 scan_id: self.scan_id,
2819 is_last_update: true,
2820 stash_entries: self
2821 .stash_entries
2822 .entries
2823 .iter()
2824 .map(stash_to_proto)
2825 .collect(),
2826 }
2827 }
2828
2829 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2830 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2831 let mut removed_statuses: Vec<String> = Vec::new();
2832
2833 let mut new_statuses = self.statuses_by_path.iter().peekable();
2834 let mut old_statuses = old.statuses_by_path.iter().peekable();
2835
2836 let mut current_new_entry = new_statuses.next();
2837 let mut current_old_entry = old_statuses.next();
2838 loop {
2839 match (current_new_entry, current_old_entry) {
2840 (Some(new_entry), Some(old_entry)) => {
2841 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2842 Ordering::Less => {
2843 updated_statuses.push(new_entry.to_proto());
2844 current_new_entry = new_statuses.next();
2845 }
2846 Ordering::Equal => {
2847 if new_entry.status != old_entry.status {
2848 updated_statuses.push(new_entry.to_proto());
2849 }
2850 current_old_entry = old_statuses.next();
2851 current_new_entry = new_statuses.next();
2852 }
2853 Ordering::Greater => {
2854 removed_statuses.push(old_entry.repo_path.to_proto());
2855 current_old_entry = old_statuses.next();
2856 }
2857 }
2858 }
2859 (None, Some(old_entry)) => {
2860 removed_statuses.push(old_entry.repo_path.to_proto());
2861 current_old_entry = old_statuses.next();
2862 }
2863 (Some(new_entry), None) => {
2864 updated_statuses.push(new_entry.to_proto());
2865 current_new_entry = new_statuses.next();
2866 }
2867 (None, None) => break,
2868 }
2869 }
2870
2871 proto::UpdateRepository {
2872 branch_summary: self.branch.as_ref().map(branch_to_proto),
2873 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2874 updated_statuses,
2875 removed_statuses,
2876 current_merge_conflicts: self
2877 .merge
2878 .conflicted_paths
2879 .iter()
2880 .map(|path| path.to_proto())
2881 .collect(),
2882 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2883 project_id,
2884 id: self.id.to_proto(),
2885 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2886 entry_ids: vec![],
2887 scan_id: self.scan_id,
2888 is_last_update: true,
2889 stash_entries: self
2890 .stash_entries
2891 .entries
2892 .iter()
2893 .map(stash_to_proto)
2894 .collect(),
2895 }
2896 }
2897
2898 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2899 self.statuses_by_path.iter().cloned()
2900 }
2901
2902 pub fn status_summary(&self) -> GitSummary {
2903 self.statuses_by_path.summary().item_summary
2904 }
2905
2906 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2907 self.statuses_by_path
2908 .get(&PathKey(path.0.clone()), ())
2909 .cloned()
2910 }
2911
2912 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2913 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
2914 }
2915
2916 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
2917 self.path_style
2918 .join(&self.work_directory_abs_path, repo_path.as_std_path())
2919 .unwrap()
2920 .into()
2921 }
2922
2923 #[inline]
2924 fn abs_path_to_repo_path_inner(
2925 work_directory_abs_path: &Path,
2926 abs_path: &Path,
2927 path_style: PathStyle,
2928 ) -> Option<RepoPath> {
2929 abs_path
2930 .strip_prefix(&work_directory_abs_path)
2931 .ok()
2932 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
2933 }
2934
2935 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2936 self.merge.conflicted_paths.contains(repo_path)
2937 }
2938
2939 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2940 let had_conflict_on_last_merge_head_change =
2941 self.merge.conflicted_paths.contains(repo_path);
2942 let has_conflict_currently = self
2943 .status_for_path(repo_path)
2944 .is_some_and(|entry| entry.status.is_conflicted());
2945 had_conflict_on_last_merge_head_change || has_conflict_currently
2946 }
2947
2948 /// This is the name that will be displayed in the repository selector for this repository.
2949 pub fn display_name(&self) -> SharedString {
2950 self.work_directory_abs_path
2951 .file_name()
2952 .unwrap_or_default()
2953 .to_string_lossy()
2954 .to_string()
2955 .into()
2956 }
2957}
2958
2959pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2960 proto::StashEntry {
2961 oid: entry.oid.as_bytes().to_vec(),
2962 message: entry.message.clone(),
2963 branch: entry.branch.clone(),
2964 index: entry.index as u64,
2965 timestamp: entry.timestamp,
2966 }
2967}
2968
2969pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2970 Ok(StashEntry {
2971 oid: Oid::from_bytes(&entry.oid)?,
2972 message: entry.message.clone(),
2973 index: entry.index as usize,
2974 branch: entry.branch.clone(),
2975 timestamp: entry.timestamp,
2976 })
2977}
2978
2979impl MergeDetails {
2980 async fn load(
2981 backend: &Arc<dyn GitRepository>,
2982 status: &SumTree<StatusEntry>,
2983 prev_snapshot: &RepositorySnapshot,
2984 ) -> Result<(MergeDetails, bool)> {
2985 log::debug!("load merge details");
2986 let message = backend.merge_message().await;
2987 let heads = backend
2988 .revparse_batch(vec![
2989 "MERGE_HEAD".into(),
2990 "CHERRY_PICK_HEAD".into(),
2991 "REBASE_HEAD".into(),
2992 "REVERT_HEAD".into(),
2993 "APPLY_HEAD".into(),
2994 ])
2995 .await
2996 .log_err()
2997 .unwrap_or_default()
2998 .into_iter()
2999 .map(|opt| opt.map(SharedString::from))
3000 .collect::<Vec<_>>();
3001 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3002 let conflicted_paths = if merge_heads_changed {
3003 let current_conflicted_paths = TreeSet::from_ordered_entries(
3004 status
3005 .iter()
3006 .filter(|entry| entry.status.is_conflicted())
3007 .map(|entry| entry.repo_path.clone()),
3008 );
3009
3010 // It can happen that we run a scan while a lengthy merge is in progress
3011 // that will eventually result in conflicts, but before those conflicts
3012 // are reported by `git status`. Since for the moment we only care about
3013 // the merge heads state for the purposes of tracking conflicts, don't update
3014 // this state until we see some conflicts.
3015 if heads.iter().any(Option::is_some)
3016 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3017 && current_conflicted_paths.is_empty()
3018 {
3019 log::debug!("not updating merge heads because no conflicts found");
3020 return Ok((
3021 MergeDetails {
3022 message: message.map(SharedString::from),
3023 ..prev_snapshot.merge.clone()
3024 },
3025 false,
3026 ));
3027 }
3028
3029 current_conflicted_paths
3030 } else {
3031 prev_snapshot.merge.conflicted_paths.clone()
3032 };
3033 let details = MergeDetails {
3034 conflicted_paths,
3035 message: message.map(SharedString::from),
3036 heads,
3037 };
3038 Ok((details, merge_heads_changed))
3039 }
3040}
3041
3042impl Repository {
3043 pub fn snapshot(&self) -> RepositorySnapshot {
3044 self.snapshot.clone()
3045 }
3046
3047 fn local(
3048 id: RepositoryId,
3049 work_directory_abs_path: Arc<Path>,
3050 dot_git_abs_path: Arc<Path>,
3051 repository_dir_abs_path: Arc<Path>,
3052 common_dir_abs_path: Arc<Path>,
3053 project_environment: WeakEntity<ProjectEnvironment>,
3054 fs: Arc<dyn Fs>,
3055 git_store: WeakEntity<GitStore>,
3056 cx: &mut Context<Self>,
3057 ) -> Self {
3058 let snapshot =
3059 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3060 Repository {
3061 this: cx.weak_entity(),
3062 git_store,
3063 snapshot,
3064 commit_message_buffer: None,
3065 askpass_delegates: Default::default(),
3066 paths_needing_status_update: Default::default(),
3067 latest_askpass_id: 0,
3068 job_sender: Repository::spawn_local_git_worker(
3069 work_directory_abs_path,
3070 dot_git_abs_path,
3071 repository_dir_abs_path,
3072 common_dir_abs_path,
3073 project_environment,
3074 fs,
3075 cx,
3076 ),
3077 job_id: 0,
3078 active_jobs: Default::default(),
3079 }
3080 }
3081
3082 fn remote(
3083 id: RepositoryId,
3084 work_directory_abs_path: Arc<Path>,
3085 path_style: PathStyle,
3086 project_id: ProjectId,
3087 client: AnyProtoClient,
3088 git_store: WeakEntity<GitStore>,
3089 cx: &mut Context<Self>,
3090 ) -> Self {
3091 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3092 Self {
3093 this: cx.weak_entity(),
3094 snapshot,
3095 commit_message_buffer: None,
3096 git_store,
3097 paths_needing_status_update: Default::default(),
3098 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3099 askpass_delegates: Default::default(),
3100 latest_askpass_id: 0,
3101 active_jobs: Default::default(),
3102 job_id: 0,
3103 }
3104 }
3105
3106 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3107 self.git_store.upgrade()
3108 }
3109
3110 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3111 let this = cx.weak_entity();
3112 let git_store = self.git_store.clone();
3113 let _ = self.send_keyed_job(
3114 Some(GitJobKey::ReloadBufferDiffBases),
3115 None,
3116 |state, mut cx| async move {
3117 let RepositoryState::Local { backend, .. } = state else {
3118 log::error!("tried to recompute diffs for a non-local repository");
3119 return Ok(());
3120 };
3121
3122 let Some(this) = this.upgrade() else {
3123 return Ok(());
3124 };
3125
3126 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3127 git_store.update(cx, |git_store, cx| {
3128 git_store
3129 .diffs
3130 .iter()
3131 .filter_map(|(buffer_id, diff_state)| {
3132 let buffer_store = git_store.buffer_store.read(cx);
3133 let buffer = buffer_store.get(*buffer_id)?;
3134 let file = File::from_dyn(buffer.read(cx).file())?;
3135 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3136 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3137 log::debug!(
3138 "start reload diff bases for repo path {}",
3139 repo_path.as_unix_str()
3140 );
3141 diff_state.update(cx, |diff_state, _| {
3142 let has_unstaged_diff = diff_state
3143 .unstaged_diff
3144 .as_ref()
3145 .is_some_and(|diff| diff.is_upgradable());
3146 let has_uncommitted_diff = diff_state
3147 .uncommitted_diff
3148 .as_ref()
3149 .is_some_and(|set| set.is_upgradable());
3150
3151 Some((
3152 buffer,
3153 repo_path,
3154 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3155 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3156 ))
3157 })
3158 })
3159 .collect::<Vec<_>>()
3160 })
3161 })??;
3162
3163 let buffer_diff_base_changes = cx
3164 .background_spawn(async move {
3165 let mut changes = Vec::new();
3166 for (buffer, repo_path, current_index_text, current_head_text) in
3167 &repo_diff_state_updates
3168 {
3169 let index_text = if current_index_text.is_some() {
3170 backend.load_index_text(repo_path.clone()).await
3171 } else {
3172 None
3173 };
3174 let head_text = if current_head_text.is_some() {
3175 backend.load_committed_text(repo_path.clone()).await
3176 } else {
3177 None
3178 };
3179
3180 let change =
3181 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3182 (Some(current_index), Some(current_head)) => {
3183 let index_changed =
3184 index_text.as_ref() != current_index.as_deref();
3185 let head_changed =
3186 head_text.as_ref() != current_head.as_deref();
3187 if index_changed && head_changed {
3188 if index_text == head_text {
3189 Some(DiffBasesChange::SetBoth(head_text))
3190 } else {
3191 Some(DiffBasesChange::SetEach {
3192 index: index_text,
3193 head: head_text,
3194 })
3195 }
3196 } else if index_changed {
3197 Some(DiffBasesChange::SetIndex(index_text))
3198 } else if head_changed {
3199 Some(DiffBasesChange::SetHead(head_text))
3200 } else {
3201 None
3202 }
3203 }
3204 (Some(current_index), None) => {
3205 let index_changed =
3206 index_text.as_ref() != current_index.as_deref();
3207 index_changed
3208 .then_some(DiffBasesChange::SetIndex(index_text))
3209 }
3210 (None, Some(current_head)) => {
3211 let head_changed =
3212 head_text.as_ref() != current_head.as_deref();
3213 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3214 }
3215 (None, None) => None,
3216 };
3217
3218 changes.push((buffer.clone(), change))
3219 }
3220 changes
3221 })
3222 .await;
3223
3224 git_store.update(&mut cx, |git_store, cx| {
3225 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3226 let buffer_snapshot = buffer.read(cx).text_snapshot();
3227 let buffer_id = buffer_snapshot.remote_id();
3228 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3229 continue;
3230 };
3231
3232 let downstream_client = git_store.downstream_client();
3233 diff_state.update(cx, |diff_state, cx| {
3234 use proto::update_diff_bases::Mode;
3235
3236 if let Some((diff_bases_change, (client, project_id))) =
3237 diff_bases_change.clone().zip(downstream_client)
3238 {
3239 let (staged_text, committed_text, mode) = match diff_bases_change {
3240 DiffBasesChange::SetIndex(index) => {
3241 (index, None, Mode::IndexOnly)
3242 }
3243 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3244 DiffBasesChange::SetEach { index, head } => {
3245 (index, head, Mode::IndexAndHead)
3246 }
3247 DiffBasesChange::SetBoth(text) => {
3248 (None, text, Mode::IndexMatchesHead)
3249 }
3250 };
3251 client
3252 .send(proto::UpdateDiffBases {
3253 project_id: project_id.to_proto(),
3254 buffer_id: buffer_id.to_proto(),
3255 staged_text,
3256 committed_text,
3257 mode: mode as i32,
3258 })
3259 .log_err();
3260 }
3261
3262 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3263 });
3264 }
3265 })
3266 },
3267 );
3268 }
3269
3270 pub fn send_job<F, Fut, R>(
3271 &mut self,
3272 status: Option<SharedString>,
3273 job: F,
3274 ) -> oneshot::Receiver<R>
3275 where
3276 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3277 Fut: Future<Output = R> + 'static,
3278 R: Send + 'static,
3279 {
3280 self.send_keyed_job(None, status, job)
3281 }
3282
3283 fn send_keyed_job<F, Fut, R>(
3284 &mut self,
3285 key: Option<GitJobKey>,
3286 status: Option<SharedString>,
3287 job: F,
3288 ) -> oneshot::Receiver<R>
3289 where
3290 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3291 Fut: Future<Output = R> + 'static,
3292 R: Send + 'static,
3293 {
3294 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3295 let job_id = post_inc(&mut self.job_id);
3296 let this = self.this.clone();
3297 self.job_sender
3298 .unbounded_send(GitJob {
3299 key,
3300 job: Box::new(move |state, cx: &mut AsyncApp| {
3301 let job = job(state, cx.clone());
3302 cx.spawn(async move |cx| {
3303 if let Some(s) = status.clone() {
3304 this.update(cx, |this, cx| {
3305 this.active_jobs.insert(
3306 job_id,
3307 JobInfo {
3308 start: Instant::now(),
3309 message: s.clone(),
3310 },
3311 );
3312
3313 cx.notify();
3314 })
3315 .ok();
3316 }
3317 let result = job.await;
3318
3319 this.update(cx, |this, cx| {
3320 this.active_jobs.remove(&job_id);
3321 cx.notify();
3322 })
3323 .ok();
3324
3325 result_tx.send(result).ok();
3326 })
3327 }),
3328 })
3329 .ok();
3330 result_rx
3331 }
3332
3333 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3334 let Some(git_store) = self.git_store.upgrade() else {
3335 return;
3336 };
3337 let entity = cx.entity();
3338 git_store.update(cx, |git_store, cx| {
3339 let Some((&id, _)) = git_store
3340 .repositories
3341 .iter()
3342 .find(|(_, handle)| *handle == &entity)
3343 else {
3344 return;
3345 };
3346 git_store.active_repo_id = Some(id);
3347 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3348 });
3349 }
3350
3351 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3352 self.snapshot.status()
3353 }
3354
3355 pub fn cached_stash(&self) -> GitStash {
3356 self.snapshot.stash_entries.clone()
3357 }
3358
3359 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3360 let git_store = self.git_store.upgrade()?;
3361 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3362 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3363 let abs_path = SanitizedPath::new(&abs_path);
3364 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3365 Some(ProjectPath {
3366 worktree_id: worktree.read(cx).id(),
3367 path: relative_path,
3368 })
3369 }
3370
3371 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3372 let git_store = self.git_store.upgrade()?;
3373 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3374 let abs_path = worktree_store.absolutize(path, cx)?;
3375 self.snapshot.abs_path_to_repo_path(&abs_path)
3376 }
3377
3378 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3379 other
3380 .read(cx)
3381 .snapshot
3382 .work_directory_abs_path
3383 .starts_with(&self.snapshot.work_directory_abs_path)
3384 }
3385
3386 pub fn open_commit_buffer(
3387 &mut self,
3388 languages: Option<Arc<LanguageRegistry>>,
3389 buffer_store: Entity<BufferStore>,
3390 cx: &mut Context<Self>,
3391 ) -> Task<Result<Entity<Buffer>>> {
3392 let id = self.id;
3393 if let Some(buffer) = self.commit_message_buffer.clone() {
3394 return Task::ready(Ok(buffer));
3395 }
3396 let this = cx.weak_entity();
3397
3398 let rx = self.send_job(None, move |state, mut cx| async move {
3399 let Some(this) = this.upgrade() else {
3400 bail!("git store was dropped");
3401 };
3402 match state {
3403 RepositoryState::Local { .. } => {
3404 this.update(&mut cx, |_, cx| {
3405 Self::open_local_commit_buffer(languages, buffer_store, cx)
3406 })?
3407 .await
3408 }
3409 RepositoryState::Remote { project_id, client } => {
3410 let request = client.request(proto::OpenCommitMessageBuffer {
3411 project_id: project_id.0,
3412 repository_id: id.to_proto(),
3413 });
3414 let response = request.await.context("requesting to open commit buffer")?;
3415 let buffer_id = BufferId::new(response.buffer_id)?;
3416 let buffer = buffer_store
3417 .update(&mut cx, |buffer_store, cx| {
3418 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3419 })?
3420 .await?;
3421 if let Some(language_registry) = languages {
3422 let git_commit_language =
3423 language_registry.language_for_name("Git Commit").await?;
3424 buffer.update(&mut cx, |buffer, cx| {
3425 buffer.set_language(Some(git_commit_language), cx);
3426 })?;
3427 }
3428 this.update(&mut cx, |this, _| {
3429 this.commit_message_buffer = Some(buffer.clone());
3430 })?;
3431 Ok(buffer)
3432 }
3433 }
3434 });
3435
3436 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3437 }
3438
3439 fn open_local_commit_buffer(
3440 language_registry: Option<Arc<LanguageRegistry>>,
3441 buffer_store: Entity<BufferStore>,
3442 cx: &mut Context<Self>,
3443 ) -> Task<Result<Entity<Buffer>>> {
3444 cx.spawn(async move |repository, cx| {
3445 let buffer = buffer_store
3446 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3447 .await?;
3448
3449 if let Some(language_registry) = language_registry {
3450 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3451 buffer.update(cx, |buffer, cx| {
3452 buffer.set_language(Some(git_commit_language), cx);
3453 })?;
3454 }
3455
3456 repository.update(cx, |repository, _| {
3457 repository.commit_message_buffer = Some(buffer.clone());
3458 })?;
3459 Ok(buffer)
3460 })
3461 }
3462
3463 pub fn checkout_files(
3464 &mut self,
3465 commit: &str,
3466 paths: Vec<RepoPath>,
3467 _cx: &mut App,
3468 ) -> oneshot::Receiver<Result<()>> {
3469 let commit = commit.to_string();
3470 let id = self.id;
3471
3472 self.send_job(
3473 Some(format!("git checkout {}", commit).into()),
3474 move |git_repo, _| async move {
3475 match git_repo {
3476 RepositoryState::Local {
3477 backend,
3478 environment,
3479 ..
3480 } => {
3481 backend
3482 .checkout_files(commit, paths, environment.clone())
3483 .await
3484 }
3485 RepositoryState::Remote { project_id, client } => {
3486 client
3487 .request(proto::GitCheckoutFiles {
3488 project_id: project_id.0,
3489 repository_id: id.to_proto(),
3490 commit,
3491 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3492 })
3493 .await?;
3494
3495 Ok(())
3496 }
3497 }
3498 },
3499 )
3500 }
3501
3502 pub fn reset(
3503 &mut self,
3504 commit: String,
3505 reset_mode: ResetMode,
3506 _cx: &mut App,
3507 ) -> oneshot::Receiver<Result<()>> {
3508 let id = self.id;
3509
3510 self.send_job(None, move |git_repo, _| async move {
3511 match git_repo {
3512 RepositoryState::Local {
3513 backend,
3514 environment,
3515 ..
3516 } => backend.reset(commit, reset_mode, environment).await,
3517 RepositoryState::Remote { project_id, client } => {
3518 client
3519 .request(proto::GitReset {
3520 project_id: project_id.0,
3521 repository_id: id.to_proto(),
3522 commit,
3523 mode: match reset_mode {
3524 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3525 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3526 },
3527 })
3528 .await?;
3529
3530 Ok(())
3531 }
3532 }
3533 })
3534 }
3535
3536 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3537 let id = self.id;
3538 self.send_job(None, move |git_repo, _cx| async move {
3539 match git_repo {
3540 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3541 RepositoryState::Remote { project_id, client } => {
3542 let resp = client
3543 .request(proto::GitShow {
3544 project_id: project_id.0,
3545 repository_id: id.to_proto(),
3546 commit,
3547 })
3548 .await?;
3549
3550 Ok(CommitDetails {
3551 sha: resp.sha.into(),
3552 message: resp.message.into(),
3553 commit_timestamp: resp.commit_timestamp,
3554 author_email: resp.author_email.into(),
3555 author_name: resp.author_name.into(),
3556 })
3557 }
3558 }
3559 })
3560 }
3561
3562 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3563 let id = self.id;
3564 self.send_job(None, move |git_repo, cx| async move {
3565 match git_repo {
3566 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3567 RepositoryState::Remote {
3568 client, project_id, ..
3569 } => {
3570 let response = client
3571 .request(proto::LoadCommitDiff {
3572 project_id: project_id.0,
3573 repository_id: id.to_proto(),
3574 commit,
3575 })
3576 .await?;
3577 Ok(CommitDiff {
3578 files: response
3579 .files
3580 .into_iter()
3581 .map(|file| {
3582 Ok(CommitFile {
3583 path: RepoPath::from_proto(&file.path)?,
3584 old_text: file.old_text,
3585 new_text: file.new_text,
3586 })
3587 })
3588 .collect::<Result<Vec<_>>>()?,
3589 })
3590 }
3591 }
3592 })
3593 }
3594
3595 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3596 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3597 }
3598
3599 pub fn stage_entries(
3600 &self,
3601 entries: Vec<RepoPath>,
3602 cx: &mut Context<Self>,
3603 ) -> Task<anyhow::Result<()>> {
3604 if entries.is_empty() {
3605 return Task::ready(Ok(()));
3606 }
3607 let id = self.id;
3608
3609 let mut save_futures = Vec::new();
3610 if let Some(buffer_store) = self.buffer_store(cx) {
3611 buffer_store.update(cx, |buffer_store, cx| {
3612 for path in &entries {
3613 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3614 continue;
3615 };
3616 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3617 && buffer
3618 .read(cx)
3619 .file()
3620 .is_some_and(|file| file.disk_state().exists())
3621 {
3622 save_futures.push(buffer_store.save_buffer(buffer, cx));
3623 }
3624 }
3625 })
3626 }
3627
3628 cx.spawn(async move |this, cx| {
3629 for save_future in save_futures {
3630 save_future.await?;
3631 }
3632
3633 this.update(cx, |this, _| {
3634 this.send_job(None, move |git_repo, _cx| async move {
3635 match git_repo {
3636 RepositoryState::Local {
3637 backend,
3638 environment,
3639 ..
3640 } => backend.stage_paths(entries, environment.clone()).await,
3641 RepositoryState::Remote { project_id, client } => {
3642 client
3643 .request(proto::Stage {
3644 project_id: project_id.0,
3645 repository_id: id.to_proto(),
3646 paths: entries
3647 .into_iter()
3648 .map(|repo_path| repo_path.to_proto())
3649 .collect(),
3650 })
3651 .await
3652 .context("sending stage request")?;
3653
3654 Ok(())
3655 }
3656 }
3657 })
3658 })?
3659 .await??;
3660
3661 Ok(())
3662 })
3663 }
3664
3665 pub fn unstage_entries(
3666 &self,
3667 entries: Vec<RepoPath>,
3668 cx: &mut Context<Self>,
3669 ) -> Task<anyhow::Result<()>> {
3670 if entries.is_empty() {
3671 return Task::ready(Ok(()));
3672 }
3673 let id = self.id;
3674
3675 let mut save_futures = Vec::new();
3676 if let Some(buffer_store) = self.buffer_store(cx) {
3677 buffer_store.update(cx, |buffer_store, cx| {
3678 for path in &entries {
3679 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3680 continue;
3681 };
3682 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3683 && buffer
3684 .read(cx)
3685 .file()
3686 .is_some_and(|file| file.disk_state().exists())
3687 {
3688 save_futures.push(buffer_store.save_buffer(buffer, cx));
3689 }
3690 }
3691 })
3692 }
3693
3694 cx.spawn(async move |this, cx| {
3695 for save_future in save_futures {
3696 save_future.await?;
3697 }
3698
3699 this.update(cx, |this, _| {
3700 this.send_job(None, move |git_repo, _cx| async move {
3701 match git_repo {
3702 RepositoryState::Local {
3703 backend,
3704 environment,
3705 ..
3706 } => backend.unstage_paths(entries, environment).await,
3707 RepositoryState::Remote { project_id, client } => {
3708 client
3709 .request(proto::Unstage {
3710 project_id: project_id.0,
3711 repository_id: id.to_proto(),
3712 paths: entries
3713 .into_iter()
3714 .map(|repo_path| repo_path.to_proto())
3715 .collect(),
3716 })
3717 .await
3718 .context("sending unstage request")?;
3719
3720 Ok(())
3721 }
3722 }
3723 })
3724 })?
3725 .await??;
3726
3727 Ok(())
3728 })
3729 }
3730
3731 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3732 let to_stage = self
3733 .cached_status()
3734 .filter(|entry| !entry.status.staging().is_fully_staged())
3735 .map(|entry| entry.repo_path)
3736 .collect();
3737 self.stage_entries(to_stage, cx)
3738 }
3739
3740 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3741 let to_unstage = self
3742 .cached_status()
3743 .filter(|entry| entry.status.staging().has_staged())
3744 .map(|entry| entry.repo_path)
3745 .collect();
3746 self.unstage_entries(to_unstage, cx)
3747 }
3748
3749 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3750 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3751
3752 self.stash_entries(to_stash, cx)
3753 }
3754
3755 pub fn stash_entries(
3756 &mut self,
3757 entries: Vec<RepoPath>,
3758 cx: &mut Context<Self>,
3759 ) -> Task<anyhow::Result<()>> {
3760 let id = self.id;
3761
3762 cx.spawn(async move |this, cx| {
3763 this.update(cx, |this, _| {
3764 this.send_job(None, move |git_repo, _cx| async move {
3765 match git_repo {
3766 RepositoryState::Local {
3767 backend,
3768 environment,
3769 ..
3770 } => backend.stash_paths(entries, environment).await,
3771 RepositoryState::Remote { project_id, client } => {
3772 client
3773 .request(proto::Stash {
3774 project_id: project_id.0,
3775 repository_id: id.to_proto(),
3776 paths: entries
3777 .into_iter()
3778 .map(|repo_path| repo_path.to_proto())
3779 .collect(),
3780 })
3781 .await
3782 .context("sending stash request")?;
3783 Ok(())
3784 }
3785 }
3786 })
3787 })?
3788 .await??;
3789 Ok(())
3790 })
3791 }
3792
3793 pub fn stash_pop(
3794 &mut self,
3795 index: Option<usize>,
3796 cx: &mut Context<Self>,
3797 ) -> Task<anyhow::Result<()>> {
3798 let id = self.id;
3799 cx.spawn(async move |this, cx| {
3800 this.update(cx, |this, _| {
3801 this.send_job(None, move |git_repo, _cx| async move {
3802 match git_repo {
3803 RepositoryState::Local {
3804 backend,
3805 environment,
3806 ..
3807 } => backend.stash_pop(index, environment).await,
3808 RepositoryState::Remote { project_id, client } => {
3809 client
3810 .request(proto::StashPop {
3811 project_id: project_id.0,
3812 repository_id: id.to_proto(),
3813 stash_index: index.map(|i| i as u64),
3814 })
3815 .await
3816 .context("sending stash pop request")?;
3817 Ok(())
3818 }
3819 }
3820 })
3821 })?
3822 .await??;
3823 Ok(())
3824 })
3825 }
3826
3827 pub fn stash_apply(
3828 &mut self,
3829 index: Option<usize>,
3830 cx: &mut Context<Self>,
3831 ) -> Task<anyhow::Result<()>> {
3832 let id = self.id;
3833 cx.spawn(async move |this, cx| {
3834 this.update(cx, |this, _| {
3835 this.send_job(None, move |git_repo, _cx| async move {
3836 match git_repo {
3837 RepositoryState::Local {
3838 backend,
3839 environment,
3840 ..
3841 } => backend.stash_apply(index, environment).await,
3842 RepositoryState::Remote { project_id, client } => {
3843 client
3844 .request(proto::StashApply {
3845 project_id: project_id.0,
3846 repository_id: id.to_proto(),
3847 stash_index: index.map(|i| i as u64),
3848 })
3849 .await
3850 .context("sending stash apply request")?;
3851 Ok(())
3852 }
3853 }
3854 })
3855 })?
3856 .await??;
3857 Ok(())
3858 })
3859 }
3860
3861 pub fn stash_drop(
3862 &mut self,
3863 index: Option<usize>,
3864 cx: &mut Context<Self>,
3865 ) -> oneshot::Receiver<anyhow::Result<()>> {
3866 let id = self.id;
3867 let updates_tx = self
3868 .git_store()
3869 .and_then(|git_store| match &git_store.read(cx).state {
3870 GitStoreState::Local { downstream, .. } => downstream
3871 .as_ref()
3872 .map(|downstream| downstream.updates_tx.clone()),
3873 _ => None,
3874 });
3875 let this = cx.weak_entity();
3876 self.send_job(None, move |git_repo, mut cx| async move {
3877 match git_repo {
3878 RepositoryState::Local {
3879 backend,
3880 environment,
3881 ..
3882 } => {
3883 // TODO would be nice to not have to do this manually
3884 let result = backend.stash_drop(index, environment).await;
3885 if result.is_ok()
3886 && let Ok(stash_entries) = backend.stash_entries().await
3887 {
3888 let snapshot = this.update(&mut cx, |this, cx| {
3889 this.snapshot.stash_entries = stash_entries;
3890 cx.emit(RepositoryEvent::StashEntriesChanged);
3891 this.snapshot.clone()
3892 })?;
3893 if let Some(updates_tx) = updates_tx {
3894 updates_tx
3895 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3896 .ok();
3897 }
3898 }
3899
3900 result
3901 }
3902 RepositoryState::Remote { project_id, client } => {
3903 client
3904 .request(proto::StashDrop {
3905 project_id: project_id.0,
3906 repository_id: id.to_proto(),
3907 stash_index: index.map(|i| i as u64),
3908 })
3909 .await
3910 .context("sending stash pop request")?;
3911 Ok(())
3912 }
3913 }
3914 })
3915 }
3916
3917 pub fn commit(
3918 &mut self,
3919 message: SharedString,
3920 name_and_email: Option<(SharedString, SharedString)>,
3921 options: CommitOptions,
3922 _cx: &mut App,
3923 ) -> oneshot::Receiver<Result<()>> {
3924 let id = self.id;
3925
3926 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3927 match git_repo {
3928 RepositoryState::Local {
3929 backend,
3930 environment,
3931 ..
3932 } => {
3933 backend
3934 .commit(message, name_and_email, options, environment)
3935 .await
3936 }
3937 RepositoryState::Remote { project_id, client } => {
3938 let (name, email) = name_and_email.unzip();
3939 client
3940 .request(proto::Commit {
3941 project_id: project_id.0,
3942 repository_id: id.to_proto(),
3943 message: String::from(message),
3944 name: name.map(String::from),
3945 email: email.map(String::from),
3946 options: Some(proto::commit::CommitOptions {
3947 amend: options.amend,
3948 signoff: options.signoff,
3949 }),
3950 })
3951 .await
3952 .context("sending commit request")?;
3953
3954 Ok(())
3955 }
3956 }
3957 })
3958 }
3959
3960 pub fn fetch(
3961 &mut self,
3962 fetch_options: FetchOptions,
3963 askpass: AskPassDelegate,
3964 _cx: &mut App,
3965 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3966 let askpass_delegates = self.askpass_delegates.clone();
3967 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3968 let id = self.id;
3969
3970 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3971 match git_repo {
3972 RepositoryState::Local {
3973 backend,
3974 environment,
3975 ..
3976 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3977 RepositoryState::Remote { project_id, client } => {
3978 askpass_delegates.lock().insert(askpass_id, askpass);
3979 let _defer = util::defer(|| {
3980 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3981 debug_assert!(askpass_delegate.is_some());
3982 });
3983
3984 let response = client
3985 .request(proto::Fetch {
3986 project_id: project_id.0,
3987 repository_id: id.to_proto(),
3988 askpass_id,
3989 remote: fetch_options.to_proto(),
3990 })
3991 .await
3992 .context("sending fetch request")?;
3993
3994 Ok(RemoteCommandOutput {
3995 stdout: response.stdout,
3996 stderr: response.stderr,
3997 })
3998 }
3999 }
4000 })
4001 }
4002
4003 pub fn push(
4004 &mut self,
4005 branch: SharedString,
4006 remote: SharedString,
4007 options: Option<PushOptions>,
4008 askpass: AskPassDelegate,
4009 cx: &mut Context<Self>,
4010 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4011 let askpass_delegates = self.askpass_delegates.clone();
4012 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4013 let id = self.id;
4014
4015 let args = options
4016 .map(|option| match option {
4017 PushOptions::SetUpstream => " --set-upstream",
4018 PushOptions::Force => " --force-with-lease",
4019 })
4020 .unwrap_or("");
4021
4022 let updates_tx = self
4023 .git_store()
4024 .and_then(|git_store| match &git_store.read(cx).state {
4025 GitStoreState::Local { downstream, .. } => downstream
4026 .as_ref()
4027 .map(|downstream| downstream.updates_tx.clone()),
4028 _ => None,
4029 });
4030
4031 let this = cx.weak_entity();
4032 self.send_job(
4033 Some(format!("git push {} {} {}", args, remote, branch).into()),
4034 move |git_repo, mut cx| async move {
4035 match git_repo {
4036 RepositoryState::Local {
4037 backend,
4038 environment,
4039 ..
4040 } => {
4041 let result = backend
4042 .push(
4043 branch.to_string(),
4044 remote.to_string(),
4045 options,
4046 askpass,
4047 environment.clone(),
4048 cx.clone(),
4049 )
4050 .await;
4051 // TODO would be nice to not have to do this manually
4052 if result.is_ok() {
4053 let branches = backend.branches().await?;
4054 let branch = branches.into_iter().find(|branch| branch.is_head);
4055 log::info!("head branch after scan is {branch:?}");
4056 let snapshot = this.update(&mut cx, |this, cx| {
4057 this.snapshot.branch = branch;
4058 cx.emit(RepositoryEvent::BranchChanged);
4059 this.snapshot.clone()
4060 })?;
4061 if let Some(updates_tx) = updates_tx {
4062 updates_tx
4063 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4064 .ok();
4065 }
4066 }
4067 result
4068 }
4069 RepositoryState::Remote { project_id, client } => {
4070 askpass_delegates.lock().insert(askpass_id, askpass);
4071 let _defer = util::defer(|| {
4072 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4073 debug_assert!(askpass_delegate.is_some());
4074 });
4075 let response = client
4076 .request(proto::Push {
4077 project_id: project_id.0,
4078 repository_id: id.to_proto(),
4079 askpass_id,
4080 branch_name: branch.to_string(),
4081 remote_name: remote.to_string(),
4082 options: options.map(|options| match options {
4083 PushOptions::Force => proto::push::PushOptions::Force,
4084 PushOptions::SetUpstream => {
4085 proto::push::PushOptions::SetUpstream
4086 }
4087 }
4088 as i32),
4089 })
4090 .await
4091 .context("sending push request")?;
4092
4093 Ok(RemoteCommandOutput {
4094 stdout: response.stdout,
4095 stderr: response.stderr,
4096 })
4097 }
4098 }
4099 },
4100 )
4101 }
4102
4103 pub fn pull(
4104 &mut self,
4105 branch: SharedString,
4106 remote: SharedString,
4107 askpass: AskPassDelegate,
4108 _cx: &mut App,
4109 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4110 let askpass_delegates = self.askpass_delegates.clone();
4111 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4112 let id = self.id;
4113
4114 self.send_job(
4115 Some(format!("git pull {} {}", remote, branch).into()),
4116 move |git_repo, cx| async move {
4117 match git_repo {
4118 RepositoryState::Local {
4119 backend,
4120 environment,
4121 ..
4122 } => {
4123 backend
4124 .pull(
4125 branch.to_string(),
4126 remote.to_string(),
4127 askpass,
4128 environment.clone(),
4129 cx,
4130 )
4131 .await
4132 }
4133 RepositoryState::Remote { project_id, client } => {
4134 askpass_delegates.lock().insert(askpass_id, askpass);
4135 let _defer = util::defer(|| {
4136 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4137 debug_assert!(askpass_delegate.is_some());
4138 });
4139 let response = client
4140 .request(proto::Pull {
4141 project_id: project_id.0,
4142 repository_id: id.to_proto(),
4143 askpass_id,
4144 branch_name: branch.to_string(),
4145 remote_name: remote.to_string(),
4146 })
4147 .await
4148 .context("sending pull request")?;
4149
4150 Ok(RemoteCommandOutput {
4151 stdout: response.stdout,
4152 stderr: response.stderr,
4153 })
4154 }
4155 }
4156 },
4157 )
4158 }
4159
4160 fn spawn_set_index_text_job(
4161 &mut self,
4162 path: RepoPath,
4163 content: Option<String>,
4164 hunk_staging_operation_count: Option<usize>,
4165 cx: &mut Context<Self>,
4166 ) -> oneshot::Receiver<anyhow::Result<()>> {
4167 let id = self.id;
4168 let this = cx.weak_entity();
4169 let git_store = self.git_store.clone();
4170 self.send_keyed_job(
4171 Some(GitJobKey::WriteIndex(path.clone())),
4172 None,
4173 move |git_repo, mut cx| async move {
4174 log::debug!(
4175 "start updating index text for buffer {}",
4176 path.as_unix_str()
4177 );
4178 match git_repo {
4179 RepositoryState::Local {
4180 backend,
4181 environment,
4182 ..
4183 } => {
4184 backend
4185 .set_index_text(path.clone(), content, environment.clone())
4186 .await?;
4187 }
4188 RepositoryState::Remote { project_id, client } => {
4189 client
4190 .request(proto::SetIndexText {
4191 project_id: project_id.0,
4192 repository_id: id.to_proto(),
4193 path: path.to_proto(),
4194 text: content,
4195 })
4196 .await?;
4197 }
4198 }
4199 log::debug!(
4200 "finish updating index text for buffer {}",
4201 path.as_unix_str()
4202 );
4203
4204 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4205 let project_path = this
4206 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4207 .ok()
4208 .flatten();
4209 git_store.update(&mut cx, |git_store, cx| {
4210 let buffer_id = git_store
4211 .buffer_store
4212 .read(cx)
4213 .get_by_path(&project_path?)?
4214 .read(cx)
4215 .remote_id();
4216 let diff_state = git_store.diffs.get(&buffer_id)?;
4217 diff_state.update(cx, |diff_state, _| {
4218 diff_state.hunk_staging_operation_count_as_of_write =
4219 hunk_staging_operation_count;
4220 });
4221 Some(())
4222 })?;
4223 }
4224 Ok(())
4225 },
4226 )
4227 }
4228
4229 pub fn get_remotes(
4230 &mut self,
4231 branch_name: Option<String>,
4232 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4233 let id = self.id;
4234 self.send_job(None, move |repo, _cx| async move {
4235 match repo {
4236 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4237 RepositoryState::Remote { project_id, client } => {
4238 let response = client
4239 .request(proto::GetRemotes {
4240 project_id: project_id.0,
4241 repository_id: id.to_proto(),
4242 branch_name,
4243 })
4244 .await?;
4245
4246 let remotes = response
4247 .remotes
4248 .into_iter()
4249 .map(|remotes| git::repository::Remote {
4250 name: remotes.name.into(),
4251 })
4252 .collect();
4253
4254 Ok(remotes)
4255 }
4256 }
4257 })
4258 }
4259
4260 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4261 let id = self.id;
4262 self.send_job(None, move |repo, _| async move {
4263 match repo {
4264 RepositoryState::Local { backend, .. } => backend.branches().await,
4265 RepositoryState::Remote { project_id, client } => {
4266 let response = client
4267 .request(proto::GitGetBranches {
4268 project_id: project_id.0,
4269 repository_id: id.to_proto(),
4270 })
4271 .await?;
4272
4273 let branches = response
4274 .branches
4275 .into_iter()
4276 .map(|branch| proto_to_branch(&branch))
4277 .collect();
4278
4279 Ok(branches)
4280 }
4281 }
4282 })
4283 }
4284
4285 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4286 let id = self.id;
4287 self.send_job(None, move |repo, _| async move {
4288 match repo {
4289 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4290 RepositoryState::Remote { project_id, client } => {
4291 let response = client
4292 .request(proto::GetDefaultBranch {
4293 project_id: project_id.0,
4294 repository_id: id.to_proto(),
4295 })
4296 .await?;
4297
4298 anyhow::Ok(response.branch.map(SharedString::from))
4299 }
4300 }
4301 })
4302 }
4303
4304 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4305 let id = self.id;
4306 self.send_job(None, move |repo, _cx| async move {
4307 match repo {
4308 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4309 RepositoryState::Remote { project_id, client } => {
4310 let response = client
4311 .request(proto::GitDiff {
4312 project_id: project_id.0,
4313 repository_id: id.to_proto(),
4314 diff_type: match diff_type {
4315 DiffType::HeadToIndex => {
4316 proto::git_diff::DiffType::HeadToIndex.into()
4317 }
4318 DiffType::HeadToWorktree => {
4319 proto::git_diff::DiffType::HeadToWorktree.into()
4320 }
4321 },
4322 })
4323 .await?;
4324
4325 Ok(response.diff)
4326 }
4327 }
4328 })
4329 }
4330
4331 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4332 let id = self.id;
4333 self.send_job(
4334 Some(format!("git switch -c {branch_name}").into()),
4335 move |repo, _cx| async move {
4336 match repo {
4337 RepositoryState::Local { backend, .. } => {
4338 backend.create_branch(branch_name).await
4339 }
4340 RepositoryState::Remote { project_id, client } => {
4341 client
4342 .request(proto::GitCreateBranch {
4343 project_id: project_id.0,
4344 repository_id: id.to_proto(),
4345 branch_name,
4346 })
4347 .await?;
4348
4349 Ok(())
4350 }
4351 }
4352 },
4353 )
4354 }
4355
4356 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4357 let id = self.id;
4358 self.send_job(
4359 Some(format!("git switch {branch_name}").into()),
4360 move |repo, _cx| async move {
4361 match repo {
4362 RepositoryState::Local { backend, .. } => {
4363 backend.change_branch(branch_name).await
4364 }
4365 RepositoryState::Remote { project_id, client } => {
4366 client
4367 .request(proto::GitChangeBranch {
4368 project_id: project_id.0,
4369 repository_id: id.to_proto(),
4370 branch_name,
4371 })
4372 .await?;
4373
4374 Ok(())
4375 }
4376 }
4377 },
4378 )
4379 }
4380
4381 pub fn rename_branch(
4382 &mut self,
4383 branch: String,
4384 new_name: String,
4385 ) -> oneshot::Receiver<Result<()>> {
4386 let id = self.id;
4387 self.send_job(
4388 Some(format!("git branch -m {branch} {new_name}").into()),
4389 move |repo, _cx| async move {
4390 match repo {
4391 RepositoryState::Local { backend, .. } => {
4392 backend.rename_branch(branch, new_name).await
4393 }
4394 RepositoryState::Remote { project_id, client } => {
4395 client
4396 .request(proto::GitRenameBranch {
4397 project_id: project_id.0,
4398 repository_id: id.to_proto(),
4399 branch,
4400 new_name,
4401 })
4402 .await?;
4403
4404 Ok(())
4405 }
4406 }
4407 },
4408 )
4409 }
4410
4411 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4412 let id = self.id;
4413 self.send_job(None, move |repo, _cx| async move {
4414 match repo {
4415 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4416 RepositoryState::Remote { project_id, client } => {
4417 let response = client
4418 .request(proto::CheckForPushedCommits {
4419 project_id: project_id.0,
4420 repository_id: id.to_proto(),
4421 })
4422 .await?;
4423
4424 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4425
4426 Ok(branches)
4427 }
4428 }
4429 })
4430 }
4431
4432 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4433 self.send_job(None, |repo, _cx| async move {
4434 match repo {
4435 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4436 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4437 }
4438 })
4439 }
4440
4441 pub fn restore_checkpoint(
4442 &mut self,
4443 checkpoint: GitRepositoryCheckpoint,
4444 ) -> oneshot::Receiver<Result<()>> {
4445 self.send_job(None, move |repo, _cx| async move {
4446 match repo {
4447 RepositoryState::Local { backend, .. } => {
4448 backend.restore_checkpoint(checkpoint).await
4449 }
4450 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4451 }
4452 })
4453 }
4454
4455 pub(crate) fn apply_remote_update(
4456 &mut self,
4457 update: proto::UpdateRepository,
4458 cx: &mut Context<Self>,
4459 ) -> Result<()> {
4460 let conflicted_paths = TreeSet::from_ordered_entries(
4461 update
4462 .current_merge_conflicts
4463 .into_iter()
4464 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4465 );
4466 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4467 let new_head_commit = update
4468 .head_commit_details
4469 .as_ref()
4470 .map(proto_to_commit_details);
4471 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4472 cx.emit(RepositoryEvent::BranchChanged)
4473 }
4474 self.snapshot.branch = new_branch;
4475 self.snapshot.head_commit = new_head_commit;
4476
4477 self.snapshot.merge.conflicted_paths = conflicted_paths;
4478 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4479 let new_stash_entries = GitStash {
4480 entries: update
4481 .stash_entries
4482 .iter()
4483 .filter_map(|entry| proto_to_stash(entry).ok())
4484 .collect(),
4485 };
4486 if self.snapshot.stash_entries != new_stash_entries {
4487 cx.emit(RepositoryEvent::StashEntriesChanged)
4488 }
4489 self.snapshot.stash_entries = new_stash_entries;
4490
4491 let edits = update
4492 .removed_statuses
4493 .into_iter()
4494 .filter_map(|path| {
4495 Some(sum_tree::Edit::Remove(PathKey(
4496 RelPath::from_proto(&path).log_err()?,
4497 )))
4498 })
4499 .chain(
4500 update
4501 .updated_statuses
4502 .into_iter()
4503 .filter_map(|updated_status| {
4504 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4505 }),
4506 )
4507 .collect::<Vec<_>>();
4508 if !edits.is_empty() {
4509 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4510 }
4511 self.snapshot.statuses_by_path.edit(edits, ());
4512 if update.is_last_update {
4513 self.snapshot.scan_id = update.scan_id;
4514 }
4515 Ok(())
4516 }
4517
4518 pub fn compare_checkpoints(
4519 &mut self,
4520 left: GitRepositoryCheckpoint,
4521 right: GitRepositoryCheckpoint,
4522 ) -> oneshot::Receiver<Result<bool>> {
4523 self.send_job(None, move |repo, _cx| async move {
4524 match repo {
4525 RepositoryState::Local { backend, .. } => {
4526 backend.compare_checkpoints(left, right).await
4527 }
4528 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4529 }
4530 })
4531 }
4532
4533 pub fn diff_checkpoints(
4534 &mut self,
4535 base_checkpoint: GitRepositoryCheckpoint,
4536 target_checkpoint: GitRepositoryCheckpoint,
4537 ) -> oneshot::Receiver<Result<String>> {
4538 self.send_job(None, move |repo, _cx| async move {
4539 match repo {
4540 RepositoryState::Local { backend, .. } => {
4541 backend
4542 .diff_checkpoints(base_checkpoint, target_checkpoint)
4543 .await
4544 }
4545 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4546 }
4547 })
4548 }
4549
4550 fn schedule_scan(
4551 &mut self,
4552 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4553 cx: &mut Context<Self>,
4554 ) {
4555 let this = cx.weak_entity();
4556 let _ = self.send_keyed_job(
4557 Some(GitJobKey::ReloadGitState),
4558 None,
4559 |state, mut cx| async move {
4560 log::debug!("run scheduled git status scan");
4561
4562 let Some(this) = this.upgrade() else {
4563 return Ok(());
4564 };
4565 let RepositoryState::Local { backend, .. } = state else {
4566 bail!("not a local repository")
4567 };
4568 let (snapshot, events) = this
4569 .update(&mut cx, |this, _| {
4570 this.paths_needing_status_update.clear();
4571 compute_snapshot(
4572 this.id,
4573 this.work_directory_abs_path.clone(),
4574 this.snapshot.clone(),
4575 backend.clone(),
4576 )
4577 })?
4578 .await?;
4579 this.update(&mut cx, |this, cx| {
4580 this.snapshot = snapshot.clone();
4581 for event in events {
4582 cx.emit(event);
4583 }
4584 })?;
4585 if let Some(updates_tx) = updates_tx {
4586 updates_tx
4587 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4588 .ok();
4589 }
4590 Ok(())
4591 },
4592 );
4593 }
4594
4595 fn spawn_local_git_worker(
4596 work_directory_abs_path: Arc<Path>,
4597 dot_git_abs_path: Arc<Path>,
4598 _repository_dir_abs_path: Arc<Path>,
4599 _common_dir_abs_path: Arc<Path>,
4600 project_environment: WeakEntity<ProjectEnvironment>,
4601 fs: Arc<dyn Fs>,
4602 cx: &mut Context<Self>,
4603 ) -> mpsc::UnboundedSender<GitJob> {
4604 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4605
4606 cx.spawn(async move |_, cx| {
4607 let environment = project_environment
4608 .upgrade()
4609 .context("missing project environment")?
4610 .update(cx, |project_environment, cx| {
4611 project_environment.get_local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
4612 })?
4613 .await
4614 .unwrap_or_else(|| {
4615 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4616 HashMap::default()
4617 });
4618 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4619 let backend = cx
4620 .background_spawn(async move {
4621 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4622 .or_else(|| which::which("git").ok());
4623 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4624 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4625 })
4626 .await?;
4627
4628 if let Some(git_hosting_provider_registry) =
4629 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4630 {
4631 git_hosting_providers::register_additional_providers(
4632 git_hosting_provider_registry,
4633 backend.clone(),
4634 );
4635 }
4636
4637 let state = RepositoryState::Local {
4638 backend,
4639 environment: Arc::new(environment),
4640 };
4641 let mut jobs = VecDeque::new();
4642 loop {
4643 while let Ok(Some(next_job)) = job_rx.try_next() {
4644 jobs.push_back(next_job);
4645 }
4646
4647 if let Some(job) = jobs.pop_front() {
4648 if let Some(current_key) = &job.key
4649 && jobs
4650 .iter()
4651 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4652 {
4653 continue;
4654 }
4655 (job.job)(state.clone(), cx).await;
4656 } else if let Some(job) = job_rx.next().await {
4657 jobs.push_back(job);
4658 } else {
4659 break;
4660 }
4661 }
4662 anyhow::Ok(())
4663 })
4664 .detach_and_log_err(cx);
4665
4666 job_tx
4667 }
4668
4669 fn spawn_remote_git_worker(
4670 project_id: ProjectId,
4671 client: AnyProtoClient,
4672 cx: &mut Context<Self>,
4673 ) -> mpsc::UnboundedSender<GitJob> {
4674 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4675
4676 cx.spawn(async move |_, cx| {
4677 let state = RepositoryState::Remote { project_id, client };
4678 let mut jobs = VecDeque::new();
4679 loop {
4680 while let Ok(Some(next_job)) = job_rx.try_next() {
4681 jobs.push_back(next_job);
4682 }
4683
4684 if let Some(job) = jobs.pop_front() {
4685 if let Some(current_key) = &job.key
4686 && jobs
4687 .iter()
4688 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4689 {
4690 continue;
4691 }
4692 (job.job)(state.clone(), cx).await;
4693 } else if let Some(job) = job_rx.next().await {
4694 jobs.push_back(job);
4695 } else {
4696 break;
4697 }
4698 }
4699 anyhow::Ok(())
4700 })
4701 .detach_and_log_err(cx);
4702
4703 job_tx
4704 }
4705
4706 fn load_staged_text(
4707 &mut self,
4708 buffer_id: BufferId,
4709 repo_path: RepoPath,
4710 cx: &App,
4711 ) -> Task<Result<Option<String>>> {
4712 let rx = self.send_job(None, move |state, _| async move {
4713 match state {
4714 RepositoryState::Local { backend, .. } => {
4715 anyhow::Ok(backend.load_index_text(repo_path).await)
4716 }
4717 RepositoryState::Remote { project_id, client } => {
4718 let response = client
4719 .request(proto::OpenUnstagedDiff {
4720 project_id: project_id.to_proto(),
4721 buffer_id: buffer_id.to_proto(),
4722 })
4723 .await?;
4724 Ok(response.staged_text)
4725 }
4726 }
4727 });
4728 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4729 }
4730
4731 fn load_committed_text(
4732 &mut self,
4733 buffer_id: BufferId,
4734 repo_path: RepoPath,
4735 cx: &App,
4736 ) -> Task<Result<DiffBasesChange>> {
4737 let rx = self.send_job(None, move |state, _| async move {
4738 match state {
4739 RepositoryState::Local { backend, .. } => {
4740 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4741 let staged_text = backend.load_index_text(repo_path).await;
4742 let diff_bases_change = if committed_text == staged_text {
4743 DiffBasesChange::SetBoth(committed_text)
4744 } else {
4745 DiffBasesChange::SetEach {
4746 index: staged_text,
4747 head: committed_text,
4748 }
4749 };
4750 anyhow::Ok(diff_bases_change)
4751 }
4752 RepositoryState::Remote { project_id, client } => {
4753 use proto::open_uncommitted_diff_response::Mode;
4754
4755 let response = client
4756 .request(proto::OpenUncommittedDiff {
4757 project_id: project_id.to_proto(),
4758 buffer_id: buffer_id.to_proto(),
4759 })
4760 .await?;
4761 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4762 let bases = match mode {
4763 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4764 Mode::IndexAndHead => DiffBasesChange::SetEach {
4765 head: response.committed_text,
4766 index: response.staged_text,
4767 },
4768 };
4769 Ok(bases)
4770 }
4771 }
4772 });
4773
4774 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4775 }
4776
4777 fn paths_changed(
4778 &mut self,
4779 paths: Vec<RepoPath>,
4780 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4781 cx: &mut Context<Self>,
4782 ) {
4783 self.paths_needing_status_update.extend(paths);
4784
4785 let this = cx.weak_entity();
4786 let _ = self.send_keyed_job(
4787 Some(GitJobKey::RefreshStatuses),
4788 None,
4789 |state, mut cx| async move {
4790 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4791 (
4792 this.snapshot.clone(),
4793 mem::take(&mut this.paths_needing_status_update),
4794 )
4795 })?;
4796 let RepositoryState::Local { backend, .. } = state else {
4797 bail!("not a local repository")
4798 };
4799
4800 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4801 if paths.is_empty() {
4802 return Ok(());
4803 }
4804 let statuses = backend.status(&paths).await?;
4805 let stash_entries = backend.stash_entries().await?;
4806
4807 let changed_path_statuses = cx
4808 .background_spawn(async move {
4809 let mut changed_path_statuses = Vec::new();
4810 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4811 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4812
4813 for (repo_path, status) in &*statuses.entries {
4814 changed_paths.remove(repo_path);
4815 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4816 && cursor.item().is_some_and(|entry| entry.status == *status)
4817 {
4818 continue;
4819 }
4820
4821 changed_path_statuses.push(Edit::Insert(StatusEntry {
4822 repo_path: repo_path.clone(),
4823 status: *status,
4824 }));
4825 }
4826 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4827 for path in changed_paths.into_iter() {
4828 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4829 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4830 }
4831 }
4832 changed_path_statuses
4833 })
4834 .await;
4835
4836 this.update(&mut cx, |this, cx| {
4837 if this.snapshot.stash_entries != stash_entries {
4838 cx.emit(RepositoryEvent::StashEntriesChanged);
4839 this.snapshot.stash_entries = stash_entries;
4840 }
4841
4842 if !changed_path_statuses.is_empty() {
4843 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
4844 this.snapshot
4845 .statuses_by_path
4846 .edit(changed_path_statuses, ());
4847 this.snapshot.scan_id += 1;
4848 }
4849
4850 if let Some(updates_tx) = updates_tx {
4851 updates_tx
4852 .unbounded_send(DownstreamUpdate::UpdateRepository(
4853 this.snapshot.clone(),
4854 ))
4855 .ok();
4856 }
4857 })
4858 },
4859 );
4860 }
4861
4862 /// currently running git command and when it started
4863 pub fn current_job(&self) -> Option<JobInfo> {
4864 self.active_jobs.values().next().cloned()
4865 }
4866
4867 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4868 self.send_job(None, |_, _| async {})
4869 }
4870}
4871
4872fn get_permalink_in_rust_registry_src(
4873 provider_registry: Arc<GitHostingProviderRegistry>,
4874 path: PathBuf,
4875 selection: Range<u32>,
4876) -> Result<url::Url> {
4877 #[derive(Deserialize)]
4878 struct CargoVcsGit {
4879 sha1: String,
4880 }
4881
4882 #[derive(Deserialize)]
4883 struct CargoVcsInfo {
4884 git: CargoVcsGit,
4885 path_in_vcs: String,
4886 }
4887
4888 #[derive(Deserialize)]
4889 struct CargoPackage {
4890 repository: String,
4891 }
4892
4893 #[derive(Deserialize)]
4894 struct CargoToml {
4895 package: CargoPackage,
4896 }
4897
4898 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4899 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4900 Some((dir, json))
4901 }) else {
4902 bail!("No .cargo_vcs_info.json found in parent directories")
4903 };
4904 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4905 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4906 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4907 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4908 .context("parsing package.repository field of manifest")?;
4909 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4910 let permalink = provider.build_permalink(
4911 remote,
4912 BuildPermalinkParams::new(
4913 &cargo_vcs_info.git.sha1,
4914 &RepoPath(
4915 RelPath::new(&path, PathStyle::local())
4916 .context("invalid path")?
4917 .into_arc(),
4918 ),
4919 Some(selection),
4920 ),
4921 );
4922 Ok(permalink)
4923}
4924
4925fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4926 let Some(blame) = blame else {
4927 return proto::BlameBufferResponse {
4928 blame_response: None,
4929 };
4930 };
4931
4932 let entries = blame
4933 .entries
4934 .into_iter()
4935 .map(|entry| proto::BlameEntry {
4936 sha: entry.sha.as_bytes().into(),
4937 start_line: entry.range.start,
4938 end_line: entry.range.end,
4939 original_line_number: entry.original_line_number,
4940 author: entry.author,
4941 author_mail: entry.author_mail,
4942 author_time: entry.author_time,
4943 author_tz: entry.author_tz,
4944 committer: entry.committer_name,
4945 committer_mail: entry.committer_email,
4946 committer_time: entry.committer_time,
4947 committer_tz: entry.committer_tz,
4948 summary: entry.summary,
4949 previous: entry.previous,
4950 filename: entry.filename,
4951 })
4952 .collect::<Vec<_>>();
4953
4954 let messages = blame
4955 .messages
4956 .into_iter()
4957 .map(|(oid, message)| proto::CommitMessage {
4958 oid: oid.as_bytes().into(),
4959 message,
4960 })
4961 .collect::<Vec<_>>();
4962
4963 proto::BlameBufferResponse {
4964 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4965 entries,
4966 messages,
4967 remote_url: blame.remote_url,
4968 }),
4969 }
4970}
4971
4972fn deserialize_blame_buffer_response(
4973 response: proto::BlameBufferResponse,
4974) -> Option<git::blame::Blame> {
4975 let response = response.blame_response?;
4976 let entries = response
4977 .entries
4978 .into_iter()
4979 .filter_map(|entry| {
4980 Some(git::blame::BlameEntry {
4981 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4982 range: entry.start_line..entry.end_line,
4983 original_line_number: entry.original_line_number,
4984 committer_name: entry.committer,
4985 committer_time: entry.committer_time,
4986 committer_tz: entry.committer_tz,
4987 committer_email: entry.committer_mail,
4988 author: entry.author,
4989 author_mail: entry.author_mail,
4990 author_time: entry.author_time,
4991 author_tz: entry.author_tz,
4992 summary: entry.summary,
4993 previous: entry.previous,
4994 filename: entry.filename,
4995 })
4996 })
4997 .collect::<Vec<_>>();
4998
4999 let messages = response
5000 .messages
5001 .into_iter()
5002 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5003 .collect::<HashMap<_, _>>();
5004
5005 Some(Blame {
5006 entries,
5007 messages,
5008 remote_url: response.remote_url,
5009 })
5010}
5011
5012fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5013 proto::Branch {
5014 is_head: branch.is_head,
5015 ref_name: branch.ref_name.to_string(),
5016 unix_timestamp: branch
5017 .most_recent_commit
5018 .as_ref()
5019 .map(|commit| commit.commit_timestamp as u64),
5020 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5021 ref_name: upstream.ref_name.to_string(),
5022 tracking: upstream
5023 .tracking
5024 .status()
5025 .map(|upstream| proto::UpstreamTracking {
5026 ahead: upstream.ahead as u64,
5027 behind: upstream.behind as u64,
5028 }),
5029 }),
5030 most_recent_commit: branch
5031 .most_recent_commit
5032 .as_ref()
5033 .map(|commit| proto::CommitSummary {
5034 sha: commit.sha.to_string(),
5035 subject: commit.subject.to_string(),
5036 commit_timestamp: commit.commit_timestamp,
5037 author_name: commit.author_name.to_string(),
5038 }),
5039 }
5040}
5041
5042fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5043 git::repository::Branch {
5044 is_head: proto.is_head,
5045 ref_name: proto.ref_name.clone().into(),
5046 upstream: proto
5047 .upstream
5048 .as_ref()
5049 .map(|upstream| git::repository::Upstream {
5050 ref_name: upstream.ref_name.to_string().into(),
5051 tracking: upstream
5052 .tracking
5053 .as_ref()
5054 .map(|tracking| {
5055 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5056 ahead: tracking.ahead as u32,
5057 behind: tracking.behind as u32,
5058 })
5059 })
5060 .unwrap_or(git::repository::UpstreamTracking::Gone),
5061 }),
5062 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5063 git::repository::CommitSummary {
5064 sha: commit.sha.to_string().into(),
5065 subject: commit.subject.to_string().into(),
5066 commit_timestamp: commit.commit_timestamp,
5067 author_name: commit.author_name.to_string().into(),
5068 has_parent: true,
5069 }
5070 }),
5071 }
5072}
5073
5074fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5075 proto::GitCommitDetails {
5076 sha: commit.sha.to_string(),
5077 message: commit.message.to_string(),
5078 commit_timestamp: commit.commit_timestamp,
5079 author_email: commit.author_email.to_string(),
5080 author_name: commit.author_name.to_string(),
5081 }
5082}
5083
5084fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5085 CommitDetails {
5086 sha: proto.sha.clone().into(),
5087 message: proto.message.clone().into(),
5088 commit_timestamp: proto.commit_timestamp,
5089 author_email: proto.author_email.clone().into(),
5090 author_name: proto.author_name.clone().into(),
5091 }
5092}
5093
5094async fn compute_snapshot(
5095 id: RepositoryId,
5096 work_directory_abs_path: Arc<Path>,
5097 prev_snapshot: RepositorySnapshot,
5098 backend: Arc<dyn GitRepository>,
5099) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5100 let mut events = Vec::new();
5101 let branches = backend.branches().await?;
5102 let branch = branches.into_iter().find(|branch| branch.is_head);
5103 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5104 let stash_entries = backend.stash_entries().await?;
5105 let statuses_by_path = SumTree::from_iter(
5106 statuses
5107 .entries
5108 .iter()
5109 .map(|(repo_path, status)| StatusEntry {
5110 repo_path: repo_path.clone(),
5111 status: *status,
5112 }),
5113 (),
5114 );
5115 let (merge_details, merge_heads_changed) =
5116 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5117 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5118
5119 if merge_heads_changed {
5120 events.push(RepositoryEvent::MergeHeadsChanged);
5121 }
5122
5123 if statuses_by_path != prev_snapshot.statuses_by_path {
5124 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5125 }
5126
5127 // Useful when branch is None in detached head state
5128 let head_commit = match backend.head_sha().await {
5129 Some(head_sha) => backend.show(head_sha).await.log_err(),
5130 None => None,
5131 };
5132
5133 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5134 events.push(RepositoryEvent::BranchChanged);
5135 }
5136
5137 // Used by edit prediction data collection
5138 let remote_origin_url = backend.remote_url("origin");
5139 let remote_upstream_url = backend.remote_url("upstream");
5140
5141 let snapshot = RepositorySnapshot {
5142 id,
5143 statuses_by_path,
5144 work_directory_abs_path,
5145 path_style: prev_snapshot.path_style,
5146 scan_id: prev_snapshot.scan_id + 1,
5147 branch,
5148 head_commit,
5149 merge: merge_details,
5150 remote_origin_url,
5151 remote_upstream_url,
5152 stash_entries,
5153 };
5154
5155 Ok((snapshot, events))
5156}
5157
5158fn status_from_proto(
5159 simple_status: i32,
5160 status: Option<proto::GitFileStatus>,
5161) -> anyhow::Result<FileStatus> {
5162 use proto::git_file_status::Variant;
5163
5164 let Some(variant) = status.and_then(|status| status.variant) else {
5165 let code = proto::GitStatus::from_i32(simple_status)
5166 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5167 let result = match code {
5168 proto::GitStatus::Added => TrackedStatus {
5169 worktree_status: StatusCode::Added,
5170 index_status: StatusCode::Unmodified,
5171 }
5172 .into(),
5173 proto::GitStatus::Modified => TrackedStatus {
5174 worktree_status: StatusCode::Modified,
5175 index_status: StatusCode::Unmodified,
5176 }
5177 .into(),
5178 proto::GitStatus::Conflict => UnmergedStatus {
5179 first_head: UnmergedStatusCode::Updated,
5180 second_head: UnmergedStatusCode::Updated,
5181 }
5182 .into(),
5183 proto::GitStatus::Deleted => TrackedStatus {
5184 worktree_status: StatusCode::Deleted,
5185 index_status: StatusCode::Unmodified,
5186 }
5187 .into(),
5188 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5189 };
5190 return Ok(result);
5191 };
5192
5193 let result = match variant {
5194 Variant::Untracked(_) => FileStatus::Untracked,
5195 Variant::Ignored(_) => FileStatus::Ignored,
5196 Variant::Unmerged(unmerged) => {
5197 let [first_head, second_head] =
5198 [unmerged.first_head, unmerged.second_head].map(|head| {
5199 let code = proto::GitStatus::from_i32(head)
5200 .with_context(|| format!("Invalid git status code: {head}"))?;
5201 let result = match code {
5202 proto::GitStatus::Added => UnmergedStatusCode::Added,
5203 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5204 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5205 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5206 };
5207 Ok(result)
5208 });
5209 let [first_head, second_head] = [first_head?, second_head?];
5210 UnmergedStatus {
5211 first_head,
5212 second_head,
5213 }
5214 .into()
5215 }
5216 Variant::Tracked(tracked) => {
5217 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5218 .map(|status| {
5219 let code = proto::GitStatus::from_i32(status)
5220 .with_context(|| format!("Invalid git status code: {status}"))?;
5221 let result = match code {
5222 proto::GitStatus::Modified => StatusCode::Modified,
5223 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5224 proto::GitStatus::Added => StatusCode::Added,
5225 proto::GitStatus::Deleted => StatusCode::Deleted,
5226 proto::GitStatus::Renamed => StatusCode::Renamed,
5227 proto::GitStatus::Copied => StatusCode::Copied,
5228 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5229 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5230 };
5231 Ok(result)
5232 });
5233 let [index_status, worktree_status] = [index_status?, worktree_status?];
5234 TrackedStatus {
5235 index_status,
5236 worktree_status,
5237 }
5238 .into()
5239 }
5240 };
5241 Ok(result)
5242}
5243
5244fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5245 use proto::git_file_status::{Tracked, Unmerged, Variant};
5246
5247 let variant = match status {
5248 FileStatus::Untracked => Variant::Untracked(Default::default()),
5249 FileStatus::Ignored => Variant::Ignored(Default::default()),
5250 FileStatus::Unmerged(UnmergedStatus {
5251 first_head,
5252 second_head,
5253 }) => Variant::Unmerged(Unmerged {
5254 first_head: unmerged_status_to_proto(first_head),
5255 second_head: unmerged_status_to_proto(second_head),
5256 }),
5257 FileStatus::Tracked(TrackedStatus {
5258 index_status,
5259 worktree_status,
5260 }) => Variant::Tracked(Tracked {
5261 index_status: tracked_status_to_proto(index_status),
5262 worktree_status: tracked_status_to_proto(worktree_status),
5263 }),
5264 };
5265 proto::GitFileStatus {
5266 variant: Some(variant),
5267 }
5268}
5269
5270fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5271 match code {
5272 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5273 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5274 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5275 }
5276}
5277
5278fn tracked_status_to_proto(code: StatusCode) -> i32 {
5279 match code {
5280 StatusCode::Added => proto::GitStatus::Added as _,
5281 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5282 StatusCode::Modified => proto::GitStatus::Modified as _,
5283 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5284 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5285 StatusCode::Copied => proto::GitStatus::Copied as _,
5286 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5287 }
5288}