1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use task::Shell;
66use text::{Bias, BufferId};
67use util::{
68 ResultExt, debug_panic,
69 paths::{PathStyle, SanitizedPath},
70 post_inc,
71 rel_path::RelPath,
72};
73use worktree::{
74 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
75 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
76};
77use zeroize::Zeroize;
78
79pub struct GitStore {
80 state: GitStoreState,
81 buffer_store: Entity<BufferStore>,
82 worktree_store: Entity<WorktreeStore>,
83 repositories: HashMap<RepositoryId, Entity<Repository>>,
84 active_repo_id: Option<RepositoryId>,
85 #[allow(clippy::type_complexity)]
86 loading_diffs:
87 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
88 diffs: HashMap<BufferId, Entity<BufferGitState>>,
89 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
90 _subscriptions: Vec<Subscription>,
91}
92
93#[derive(Default)]
94struct SharedDiffs {
95 unstaged: Option<Entity<BufferDiff>>,
96 uncommitted: Option<Entity<BufferDiff>>,
97}
98
99struct BufferGitState {
100 unstaged_diff: Option<WeakEntity<BufferDiff>>,
101 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
102 conflict_set: Option<WeakEntity<ConflictSet>>,
103 recalculate_diff_task: Option<Task<Result<()>>>,
104 reparse_conflict_markers_task: Option<Task<Result<()>>>,
105 language: Option<Arc<Language>>,
106 language_registry: Option<Arc<LanguageRegistry>>,
107 conflict_updated_futures: Vec<oneshot::Sender<()>>,
108 recalculating_tx: postage::watch::Sender<bool>,
109
110 /// These operation counts are used to ensure that head and index text
111 /// values read from the git repository are up-to-date with any hunk staging
112 /// operations that have been performed on the BufferDiff.
113 ///
114 /// The operation count is incremented immediately when the user initiates a
115 /// hunk stage/unstage operation. Then, upon finishing writing the new index
116 /// text do disk, the `operation count as of write` is updated to reflect
117 /// the operation count that prompted the write.
118 hunk_staging_operation_count: usize,
119 hunk_staging_operation_count_as_of_write: usize,
120
121 head_text: Option<Arc<String>>,
122 index_text: Option<Arc<String>>,
123 head_changed: bool,
124 index_changed: bool,
125 language_changed: bool,
126}
127
128#[derive(Clone, Debug)]
129enum DiffBasesChange {
130 SetIndex(Option<String>),
131 SetHead(Option<String>),
132 SetEach {
133 index: Option<String>,
134 head: Option<String>,
135 },
136 SetBoth(Option<String>),
137}
138
139#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
140enum DiffKind {
141 Unstaged,
142 Uncommitted,
143}
144
145enum GitStoreState {
146 Local {
147 next_repository_id: Arc<AtomicU64>,
148 downstream: Option<LocalDownstreamState>,
149 project_environment: Entity<ProjectEnvironment>,
150 fs: Arc<dyn Fs>,
151 },
152 Remote {
153 upstream_client: AnyProtoClient,
154 upstream_project_id: u64,
155 downstream: Option<(AnyProtoClient, ProjectId)>,
156 },
157}
158
159enum DownstreamUpdate {
160 UpdateRepository(RepositorySnapshot),
161 RemoveRepository(RepositoryId),
162}
163
164struct LocalDownstreamState {
165 client: AnyProtoClient,
166 project_id: ProjectId,
167 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
168 _task: Task<Result<()>>,
169}
170
171#[derive(Clone, Debug)]
172pub struct GitStoreCheckpoint {
173 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
174}
175
176#[derive(Clone, Debug, PartialEq, Eq)]
177pub struct StatusEntry {
178 pub repo_path: RepoPath,
179 pub status: FileStatus,
180}
181
182impl StatusEntry {
183 fn to_proto(&self) -> proto::StatusEntry {
184 let simple_status = match self.status {
185 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
186 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
187 FileStatus::Tracked(TrackedStatus {
188 index_status,
189 worktree_status,
190 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
191 worktree_status
192 } else {
193 index_status
194 }),
195 };
196
197 proto::StatusEntry {
198 repo_path: self.repo_path.to_proto(),
199 simple_status,
200 status: Some(status_to_proto(self.status)),
201 }
202 }
203}
204
205impl TryFrom<proto::StatusEntry> for StatusEntry {
206 type Error = anyhow::Error;
207
208 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
209 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
210 let status = status_from_proto(value.simple_status, value.status)?;
211 Ok(Self { repo_path, status })
212 }
213}
214
215impl sum_tree::Item for StatusEntry {
216 type Summary = PathSummary<GitSummary>;
217
218 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
219 PathSummary {
220 max_path: self.repo_path.0.clone(),
221 item_summary: self.status.summary(),
222 }
223 }
224}
225
226impl sum_tree::KeyedItem for StatusEntry {
227 type Key = PathKey;
228
229 fn key(&self) -> Self::Key {
230 PathKey(self.repo_path.0.clone())
231 }
232}
233
234#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
235pub struct RepositoryId(pub u64);
236
237#[derive(Clone, Debug, Default, PartialEq, Eq)]
238pub struct MergeDetails {
239 pub conflicted_paths: TreeSet<RepoPath>,
240 pub message: Option<SharedString>,
241 pub heads: Vec<Option<SharedString>>,
242}
243
244#[derive(Clone, Debug, PartialEq, Eq)]
245pub struct RepositorySnapshot {
246 pub id: RepositoryId,
247 pub statuses_by_path: SumTree<StatusEntry>,
248 pub work_directory_abs_path: Arc<Path>,
249 pub path_style: PathStyle,
250 pub branch: Option<Branch>,
251 pub head_commit: Option<CommitDetails>,
252 pub scan_id: u64,
253 pub merge: MergeDetails,
254 pub remote_origin_url: Option<String>,
255 pub remote_upstream_url: Option<String>,
256 pub stash_entries: GitStash,
257}
258
259type JobId = u64;
260
261#[derive(Clone, Debug, PartialEq, Eq)]
262pub struct JobInfo {
263 pub start: Instant,
264 pub message: SharedString,
265}
266
267pub struct Repository {
268 this: WeakEntity<Self>,
269 snapshot: RepositorySnapshot,
270 commit_message_buffer: Option<Entity<Buffer>>,
271 git_store: WeakEntity<GitStore>,
272 // For a local repository, holds paths that have had worktree events since the last status scan completed,
273 // and that should be examined during the next status scan.
274 paths_needing_status_update: BTreeSet<RepoPath>,
275 job_sender: mpsc::UnboundedSender<GitJob>,
276 active_jobs: HashMap<JobId, JobInfo>,
277 job_id: JobId,
278 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
279 latest_askpass_id: u64,
280}
281
282impl std::ops::Deref for Repository {
283 type Target = RepositorySnapshot;
284
285 fn deref(&self) -> &Self::Target {
286 &self.snapshot
287 }
288}
289
290#[derive(Clone)]
291pub enum RepositoryState {
292 Local {
293 backend: Arc<dyn GitRepository>,
294 environment: Arc<HashMap<String, String>>,
295 },
296 Remote {
297 project_id: ProjectId,
298 client: AnyProtoClient,
299 },
300}
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub enum RepositoryEvent {
304 Updated { full_scan: bool, new_instance: bool },
305 MergeHeadsChanged,
306 PathsChanged,
307}
308
309#[derive(Clone, Debug)]
310pub struct JobsUpdated;
311
312#[derive(Debug)]
313pub enum GitStoreEvent {
314 ActiveRepositoryChanged(Option<RepositoryId>),
315 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
316 RepositoryAdded(RepositoryId),
317 RepositoryRemoved(RepositoryId),
318 IndexWriteError(anyhow::Error),
319 JobsUpdated,
320 ConflictsUpdated,
321}
322
323impl EventEmitter<RepositoryEvent> for Repository {}
324impl EventEmitter<JobsUpdated> for Repository {}
325impl EventEmitter<GitStoreEvent> for GitStore {}
326
327pub struct GitJob {
328 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
329 key: Option<GitJobKey>,
330}
331
332#[derive(PartialEq, Eq)]
333enum GitJobKey {
334 WriteIndex(RepoPath),
335 ReloadBufferDiffBases,
336 RefreshStatuses,
337 ReloadGitState,
338}
339
340impl GitStore {
341 pub fn local(
342 worktree_store: &Entity<WorktreeStore>,
343 buffer_store: Entity<BufferStore>,
344 environment: Entity<ProjectEnvironment>,
345 fs: Arc<dyn Fs>,
346 cx: &mut Context<Self>,
347 ) -> Self {
348 Self::new(
349 worktree_store.clone(),
350 buffer_store,
351 GitStoreState::Local {
352 next_repository_id: Arc::new(AtomicU64::new(1)),
353 downstream: None,
354 project_environment: environment,
355 fs,
356 },
357 cx,
358 )
359 }
360
361 pub fn remote(
362 worktree_store: &Entity<WorktreeStore>,
363 buffer_store: Entity<BufferStore>,
364 upstream_client: AnyProtoClient,
365 project_id: u64,
366 cx: &mut Context<Self>,
367 ) -> Self {
368 Self::new(
369 worktree_store.clone(),
370 buffer_store,
371 GitStoreState::Remote {
372 upstream_client,
373 upstream_project_id: project_id,
374 downstream: None,
375 },
376 cx,
377 )
378 }
379
380 fn new(
381 worktree_store: Entity<WorktreeStore>,
382 buffer_store: Entity<BufferStore>,
383 state: GitStoreState,
384 cx: &mut Context<Self>,
385 ) -> Self {
386 let _subscriptions = vec![
387 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
388 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
389 ];
390
391 GitStore {
392 state,
393 buffer_store,
394 worktree_store,
395 repositories: HashMap::default(),
396 active_repo_id: None,
397 _subscriptions,
398 loading_diffs: HashMap::default(),
399 shared_diffs: HashMap::default(),
400 diffs: HashMap::default(),
401 }
402 }
403
404 pub fn init(client: &AnyProtoClient) {
405 client.add_entity_request_handler(Self::handle_get_remotes);
406 client.add_entity_request_handler(Self::handle_get_branches);
407 client.add_entity_request_handler(Self::handle_get_default_branch);
408 client.add_entity_request_handler(Self::handle_change_branch);
409 client.add_entity_request_handler(Self::handle_create_branch);
410 client.add_entity_request_handler(Self::handle_rename_branch);
411 client.add_entity_request_handler(Self::handle_git_init);
412 client.add_entity_request_handler(Self::handle_push);
413 client.add_entity_request_handler(Self::handle_pull);
414 client.add_entity_request_handler(Self::handle_fetch);
415 client.add_entity_request_handler(Self::handle_stage);
416 client.add_entity_request_handler(Self::handle_unstage);
417 client.add_entity_request_handler(Self::handle_stash);
418 client.add_entity_request_handler(Self::handle_stash_pop);
419 client.add_entity_request_handler(Self::handle_stash_apply);
420 client.add_entity_request_handler(Self::handle_stash_drop);
421 client.add_entity_request_handler(Self::handle_commit);
422 client.add_entity_request_handler(Self::handle_reset);
423 client.add_entity_request_handler(Self::handle_show);
424 client.add_entity_request_handler(Self::handle_load_commit_diff);
425 client.add_entity_request_handler(Self::handle_checkout_files);
426 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
427 client.add_entity_request_handler(Self::handle_set_index_text);
428 client.add_entity_request_handler(Self::handle_askpass);
429 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
430 client.add_entity_request_handler(Self::handle_git_diff);
431 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
432 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
433 client.add_entity_message_handler(Self::handle_update_diff_bases);
434 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
435 client.add_entity_request_handler(Self::handle_blame_buffer);
436 client.add_entity_message_handler(Self::handle_update_repository);
437 client.add_entity_message_handler(Self::handle_remove_repository);
438 client.add_entity_request_handler(Self::handle_git_clone);
439 }
440
441 pub fn is_local(&self) -> bool {
442 matches!(self.state, GitStoreState::Local { .. })
443 }
444 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
445 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
446 let id = repo.read(cx).id;
447 if self.active_repo_id != Some(id) {
448 self.active_repo_id = Some(id);
449 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
450 }
451 }
452 }
453
454 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
455 match &mut self.state {
456 GitStoreState::Remote {
457 downstream: downstream_client,
458 ..
459 } => {
460 for repo in self.repositories.values() {
461 let update = repo.read(cx).snapshot.initial_update(project_id);
462 for update in split_repository_update(update) {
463 client.send(update).log_err();
464 }
465 }
466 *downstream_client = Some((client, ProjectId(project_id)));
467 }
468 GitStoreState::Local {
469 downstream: downstream_client,
470 ..
471 } => {
472 let mut snapshots = HashMap::default();
473 let (updates_tx, mut updates_rx) = mpsc::unbounded();
474 for repo in self.repositories.values() {
475 updates_tx
476 .unbounded_send(DownstreamUpdate::UpdateRepository(
477 repo.read(cx).snapshot.clone(),
478 ))
479 .ok();
480 }
481 *downstream_client = Some(LocalDownstreamState {
482 client: client.clone(),
483 project_id: ProjectId(project_id),
484 updates_tx,
485 _task: cx.spawn(async move |this, cx| {
486 cx.background_spawn(async move {
487 while let Some(update) = updates_rx.next().await {
488 match update {
489 DownstreamUpdate::UpdateRepository(snapshot) => {
490 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
491 {
492 let update =
493 snapshot.build_update(old_snapshot, project_id);
494 *old_snapshot = snapshot;
495 for update in split_repository_update(update) {
496 client.send(update)?;
497 }
498 } else {
499 let update = snapshot.initial_update(project_id);
500 for update in split_repository_update(update) {
501 client.send(update)?;
502 }
503 snapshots.insert(snapshot.id, snapshot);
504 }
505 }
506 DownstreamUpdate::RemoveRepository(id) => {
507 client.send(proto::RemoveRepository {
508 project_id,
509 id: id.to_proto(),
510 })?;
511 }
512 }
513 }
514 anyhow::Ok(())
515 })
516 .await
517 .ok();
518 this.update(cx, |this, _| {
519 if let GitStoreState::Local {
520 downstream: downstream_client,
521 ..
522 } = &mut this.state
523 {
524 downstream_client.take();
525 } else {
526 unreachable!("unshared called on remote store");
527 }
528 })
529 }),
530 });
531 }
532 }
533 }
534
535 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
536 match &mut self.state {
537 GitStoreState::Local {
538 downstream: downstream_client,
539 ..
540 } => {
541 downstream_client.take();
542 }
543 GitStoreState::Remote {
544 downstream: downstream_client,
545 ..
546 } => {
547 downstream_client.take();
548 }
549 }
550 self.shared_diffs.clear();
551 }
552
553 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
554 self.shared_diffs.remove(peer_id);
555 }
556
557 pub fn active_repository(&self) -> Option<Entity<Repository>> {
558 self.active_repo_id
559 .as_ref()
560 .map(|id| self.repositories[id].clone())
561 }
562
563 pub fn open_unstaged_diff(
564 &mut self,
565 buffer: Entity<Buffer>,
566 cx: &mut Context<Self>,
567 ) -> Task<Result<Entity<BufferDiff>>> {
568 let buffer_id = buffer.read(cx).remote_id();
569 if let Some(diff_state) = self.diffs.get(&buffer_id)
570 && let Some(unstaged_diff) = diff_state
571 .read(cx)
572 .unstaged_diff
573 .as_ref()
574 .and_then(|weak| weak.upgrade())
575 {
576 if let Some(task) =
577 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
578 {
579 return cx.background_executor().spawn(async move {
580 task.await;
581 Ok(unstaged_diff)
582 });
583 }
584 return Task::ready(Ok(unstaged_diff));
585 }
586
587 let Some((repo, repo_path)) =
588 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
589 else {
590 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
591 };
592
593 let task = self
594 .loading_diffs
595 .entry((buffer_id, DiffKind::Unstaged))
596 .or_insert_with(|| {
597 let staged_text = repo.update(cx, |repo, cx| {
598 repo.load_staged_text(buffer_id, repo_path, cx)
599 });
600 cx.spawn(async move |this, cx| {
601 Self::open_diff_internal(
602 this,
603 DiffKind::Unstaged,
604 staged_text.await.map(DiffBasesChange::SetIndex),
605 buffer,
606 cx,
607 )
608 .await
609 .map_err(Arc::new)
610 })
611 .shared()
612 })
613 .clone();
614
615 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
616 }
617
618 pub fn open_uncommitted_diff(
619 &mut self,
620 buffer: Entity<Buffer>,
621 cx: &mut Context<Self>,
622 ) -> Task<Result<Entity<BufferDiff>>> {
623 let buffer_id = buffer.read(cx).remote_id();
624
625 if let Some(diff_state) = self.diffs.get(&buffer_id)
626 && let Some(uncommitted_diff) = diff_state
627 .read(cx)
628 .uncommitted_diff
629 .as_ref()
630 .and_then(|weak| weak.upgrade())
631 {
632 if let Some(task) =
633 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
634 {
635 return cx.background_executor().spawn(async move {
636 task.await;
637 Ok(uncommitted_diff)
638 });
639 }
640 return Task::ready(Ok(uncommitted_diff));
641 }
642
643 let Some((repo, repo_path)) =
644 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
645 else {
646 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
647 };
648
649 let task = self
650 .loading_diffs
651 .entry((buffer_id, DiffKind::Uncommitted))
652 .or_insert_with(|| {
653 let changes = repo.update(cx, |repo, cx| {
654 repo.load_committed_text(buffer_id, repo_path, cx)
655 });
656
657 cx.spawn(async move |this, cx| {
658 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
659 .await
660 .map_err(Arc::new)
661 })
662 .shared()
663 })
664 .clone();
665
666 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
667 }
668
669 async fn open_diff_internal(
670 this: WeakEntity<Self>,
671 kind: DiffKind,
672 texts: Result<DiffBasesChange>,
673 buffer_entity: Entity<Buffer>,
674 cx: &mut AsyncApp,
675 ) -> Result<Entity<BufferDiff>> {
676 let diff_bases_change = match texts {
677 Err(e) => {
678 this.update(cx, |this, cx| {
679 let buffer = buffer_entity.read(cx);
680 let buffer_id = buffer.remote_id();
681 this.loading_diffs.remove(&(buffer_id, kind));
682 })?;
683 return Err(e);
684 }
685 Ok(change) => change,
686 };
687
688 this.update(cx, |this, cx| {
689 let buffer = buffer_entity.read(cx);
690 let buffer_id = buffer.remote_id();
691 let language = buffer.language().cloned();
692 let language_registry = buffer.language_registry();
693 let text_snapshot = buffer.text_snapshot();
694 this.loading_diffs.remove(&(buffer_id, kind));
695
696 let git_store = cx.weak_entity();
697 let diff_state = this
698 .diffs
699 .entry(buffer_id)
700 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
701
702 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
703
704 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
705 diff_state.update(cx, |diff_state, cx| {
706 diff_state.language = language;
707 diff_state.language_registry = language_registry;
708
709 match kind {
710 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
711 DiffKind::Uncommitted => {
712 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
713 diff
714 } else {
715 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
716 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
717 unstaged_diff
718 };
719
720 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
721 diff_state.uncommitted_diff = Some(diff.downgrade())
722 }
723 }
724
725 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
726 let rx = diff_state.wait_for_recalculation();
727
728 anyhow::Ok(async move {
729 if let Some(rx) = rx {
730 rx.await;
731 }
732 Ok(diff)
733 })
734 })
735 })??
736 .await
737 }
738
739 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
740 let diff_state = self.diffs.get(&buffer_id)?;
741 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
742 }
743
744 pub fn get_uncommitted_diff(
745 &self,
746 buffer_id: BufferId,
747 cx: &App,
748 ) -> Option<Entity<BufferDiff>> {
749 let diff_state = self.diffs.get(&buffer_id)?;
750 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
751 }
752
753 pub fn open_conflict_set(
754 &mut self,
755 buffer: Entity<Buffer>,
756 cx: &mut Context<Self>,
757 ) -> Entity<ConflictSet> {
758 log::debug!("open conflict set");
759 let buffer_id = buffer.read(cx).remote_id();
760
761 if let Some(git_state) = self.diffs.get(&buffer_id)
762 && let Some(conflict_set) = git_state
763 .read(cx)
764 .conflict_set
765 .as_ref()
766 .and_then(|weak| weak.upgrade())
767 {
768 let conflict_set = conflict_set;
769 let buffer_snapshot = buffer.read(cx).text_snapshot();
770
771 git_state.update(cx, |state, cx| {
772 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
773 });
774
775 return conflict_set;
776 }
777
778 let is_unmerged = self
779 .repository_and_path_for_buffer_id(buffer_id, cx)
780 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
781 let git_store = cx.weak_entity();
782 let buffer_git_state = self
783 .diffs
784 .entry(buffer_id)
785 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
786 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
787
788 self._subscriptions
789 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
790 cx.emit(GitStoreEvent::ConflictsUpdated);
791 }));
792
793 buffer_git_state.update(cx, |state, cx| {
794 state.conflict_set = Some(conflict_set.downgrade());
795 let buffer_snapshot = buffer.read(cx).text_snapshot();
796 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
797 });
798
799 conflict_set
800 }
801
802 pub fn project_path_git_status(
803 &self,
804 project_path: &ProjectPath,
805 cx: &App,
806 ) -> Option<FileStatus> {
807 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
808 Some(repo.read(cx).status_for_path(&repo_path)?.status)
809 }
810
811 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
812 let mut work_directory_abs_paths = Vec::new();
813 let mut checkpoints = Vec::new();
814 for repository in self.repositories.values() {
815 repository.update(cx, |repository, _| {
816 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
817 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
818 });
819 }
820
821 cx.background_executor().spawn(async move {
822 let checkpoints = future::try_join_all(checkpoints).await?;
823 Ok(GitStoreCheckpoint {
824 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
825 .into_iter()
826 .zip(checkpoints)
827 .collect(),
828 })
829 })
830 }
831
832 pub fn restore_checkpoint(
833 &self,
834 checkpoint: GitStoreCheckpoint,
835 cx: &mut App,
836 ) -> Task<Result<()>> {
837 let repositories_by_work_dir_abs_path = self
838 .repositories
839 .values()
840 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
841 .collect::<HashMap<_, _>>();
842
843 let mut tasks = Vec::new();
844 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
845 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
846 let restore = repository.update(cx, |repository, _| {
847 repository.restore_checkpoint(checkpoint)
848 });
849 tasks.push(async move { restore.await? });
850 }
851 }
852 cx.background_spawn(async move {
853 future::try_join_all(tasks).await?;
854 Ok(())
855 })
856 }
857
858 /// Compares two checkpoints, returning true if they are equal.
859 pub fn compare_checkpoints(
860 &self,
861 left: GitStoreCheckpoint,
862 mut right: GitStoreCheckpoint,
863 cx: &mut App,
864 ) -> Task<Result<bool>> {
865 let repositories_by_work_dir_abs_path = self
866 .repositories
867 .values()
868 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
869 .collect::<HashMap<_, _>>();
870
871 let mut tasks = Vec::new();
872 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
873 if let Some(right_checkpoint) = right
874 .checkpoints_by_work_dir_abs_path
875 .remove(&work_dir_abs_path)
876 {
877 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
878 {
879 let compare = repository.update(cx, |repository, _| {
880 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
881 });
882
883 tasks.push(async move { compare.await? });
884 }
885 } else {
886 return Task::ready(Ok(false));
887 }
888 }
889 cx.background_spawn(async move {
890 Ok(future::try_join_all(tasks)
891 .await?
892 .into_iter()
893 .all(|result| result))
894 })
895 }
896
897 /// Blames a buffer.
898 pub fn blame_buffer(
899 &self,
900 buffer: &Entity<Buffer>,
901 version: Option<clock::Global>,
902 cx: &mut App,
903 ) -> Task<Result<Option<Blame>>> {
904 let buffer = buffer.read(cx);
905 let Some((repo, repo_path)) =
906 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
907 else {
908 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
909 };
910 let content = match &version {
911 Some(version) => buffer.rope_for_version(version),
912 None => buffer.as_rope().clone(),
913 };
914 let version = version.unwrap_or(buffer.version());
915 let buffer_id = buffer.remote_id();
916
917 let rx = repo.update(cx, |repo, _| {
918 repo.send_job(None, move |state, _| async move {
919 match state {
920 RepositoryState::Local { backend, .. } => backend
921 .blame(repo_path.clone(), content)
922 .await
923 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
924 .map(Some),
925 RepositoryState::Remote { project_id, client } => {
926 let response = client
927 .request(proto::BlameBuffer {
928 project_id: project_id.to_proto(),
929 buffer_id: buffer_id.into(),
930 version: serialize_version(&version),
931 })
932 .await?;
933 Ok(deserialize_blame_buffer_response(response))
934 }
935 }
936 })
937 });
938
939 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
940 }
941
942 pub fn get_permalink_to_line(
943 &self,
944 buffer: &Entity<Buffer>,
945 selection: Range<u32>,
946 cx: &mut App,
947 ) -> Task<Result<url::Url>> {
948 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
949 return Task::ready(Err(anyhow!("buffer has no file")));
950 };
951
952 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
953 &(file.worktree.read(cx).id(), file.path.clone()).into(),
954 cx,
955 ) else {
956 // If we're not in a Git repo, check whether this is a Rust source
957 // file in the Cargo registry (presumably opened with go-to-definition
958 // from a normal Rust file). If so, we can put together a permalink
959 // using crate metadata.
960 if buffer
961 .read(cx)
962 .language()
963 .is_none_or(|lang| lang.name() != "Rust".into())
964 {
965 return Task::ready(Err(anyhow!("no permalink available")));
966 }
967 let file_path = file.worktree.read(cx).absolutize(&file.path);
968 return cx.spawn(async move |cx| {
969 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
970 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
971 .context("no permalink available")
972 });
973
974 // TODO remote case
975 };
976
977 let buffer_id = buffer.read(cx).remote_id();
978 let branch = repo.read(cx).branch.clone();
979 let remote = branch
980 .as_ref()
981 .and_then(|b| b.upstream.as_ref())
982 .and_then(|b| b.remote_name())
983 .unwrap_or("origin")
984 .to_string();
985
986 let rx = repo.update(cx, |repo, _| {
987 repo.send_job(None, move |state, cx| async move {
988 match state {
989 RepositoryState::Local { backend, .. } => {
990 let origin_url = backend
991 .remote_url(&remote)
992 .with_context(|| format!("remote \"{remote}\" not found"))?;
993
994 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
995
996 let provider_registry =
997 cx.update(GitHostingProviderRegistry::default_global)?;
998
999 let (provider, remote) =
1000 parse_git_remote_url(provider_registry, &origin_url)
1001 .context("parsing Git remote URL")?;
1002
1003 let path = repo_path.as_unix_str();
1004
1005 Ok(provider.build_permalink(
1006 remote,
1007 BuildPermalinkParams {
1008 sha: &sha,
1009 path,
1010 selection: Some(selection),
1011 },
1012 ))
1013 }
1014 RepositoryState::Remote { project_id, client } => {
1015 let response = client
1016 .request(proto::GetPermalinkToLine {
1017 project_id: project_id.to_proto(),
1018 buffer_id: buffer_id.into(),
1019 selection: Some(proto::Range {
1020 start: selection.start as u64,
1021 end: selection.end as u64,
1022 }),
1023 })
1024 .await?;
1025
1026 url::Url::parse(&response.permalink).context("failed to parse permalink")
1027 }
1028 }
1029 })
1030 });
1031 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1032 }
1033
1034 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1035 match &self.state {
1036 GitStoreState::Local {
1037 downstream: downstream_client,
1038 ..
1039 } => downstream_client
1040 .as_ref()
1041 .map(|state| (state.client.clone(), state.project_id)),
1042 GitStoreState::Remote {
1043 downstream: downstream_client,
1044 ..
1045 } => downstream_client.clone(),
1046 }
1047 }
1048
1049 fn upstream_client(&self) -> Option<AnyProtoClient> {
1050 match &self.state {
1051 GitStoreState::Local { .. } => None,
1052 GitStoreState::Remote {
1053 upstream_client, ..
1054 } => Some(upstream_client.clone()),
1055 }
1056 }
1057
1058 fn on_worktree_store_event(
1059 &mut self,
1060 worktree_store: Entity<WorktreeStore>,
1061 event: &WorktreeStoreEvent,
1062 cx: &mut Context<Self>,
1063 ) {
1064 let GitStoreState::Local {
1065 project_environment,
1066 downstream,
1067 next_repository_id,
1068 fs,
1069 } = &self.state
1070 else {
1071 return;
1072 };
1073
1074 match event {
1075 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1076 if let Some(worktree) = self
1077 .worktree_store
1078 .read(cx)
1079 .worktree_for_id(*worktree_id, cx)
1080 {
1081 let paths_by_git_repo =
1082 self.process_updated_entries(&worktree, updated_entries, cx);
1083 let downstream = downstream
1084 .as_ref()
1085 .map(|downstream| downstream.updates_tx.clone());
1086 cx.spawn(async move |_, cx| {
1087 let paths_by_git_repo = paths_by_git_repo.await;
1088 for (repo, paths) in paths_by_git_repo {
1089 repo.update(cx, |repo, cx| {
1090 repo.paths_changed(paths, downstream.clone(), cx);
1091 })
1092 .ok();
1093 }
1094 })
1095 .detach();
1096 }
1097 }
1098 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1099 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1100 else {
1101 return;
1102 };
1103 if !worktree.read(cx).is_visible() {
1104 log::debug!(
1105 "not adding repositories for local worktree {:?} because it's not visible",
1106 worktree.read(cx).abs_path()
1107 );
1108 return;
1109 }
1110 self.update_repositories_from_worktree(
1111 project_environment.clone(),
1112 next_repository_id.clone(),
1113 downstream
1114 .as_ref()
1115 .map(|downstream| downstream.updates_tx.clone()),
1116 changed_repos.clone(),
1117 fs.clone(),
1118 cx,
1119 );
1120 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1121 }
1122 _ => {}
1123 }
1124 }
1125 fn on_repository_event(
1126 &mut self,
1127 repo: Entity<Repository>,
1128 event: &RepositoryEvent,
1129 cx: &mut Context<Self>,
1130 ) {
1131 let id = repo.read(cx).id;
1132 let repo_snapshot = repo.read(cx).snapshot.clone();
1133 for (buffer_id, diff) in self.diffs.iter() {
1134 if let Some((buffer_repo, repo_path)) =
1135 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1136 && buffer_repo == repo
1137 {
1138 diff.update(cx, |diff, cx| {
1139 if let Some(conflict_set) = &diff.conflict_set {
1140 let conflict_status_changed =
1141 conflict_set.update(cx, |conflict_set, cx| {
1142 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1143 conflict_set.set_has_conflict(has_conflict, cx)
1144 })?;
1145 if conflict_status_changed {
1146 let buffer_store = self.buffer_store.read(cx);
1147 if let Some(buffer) = buffer_store.get(*buffer_id) {
1148 let _ = diff
1149 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1150 }
1151 }
1152 }
1153 anyhow::Ok(())
1154 })
1155 .ok();
1156 }
1157 }
1158 cx.emit(GitStoreEvent::RepositoryUpdated(
1159 id,
1160 event.clone(),
1161 self.active_repo_id == Some(id),
1162 ))
1163 }
1164
1165 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1166 cx.emit(GitStoreEvent::JobsUpdated)
1167 }
1168
1169 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1170 fn update_repositories_from_worktree(
1171 &mut self,
1172 project_environment: Entity<ProjectEnvironment>,
1173 next_repository_id: Arc<AtomicU64>,
1174 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1175 updated_git_repositories: UpdatedGitRepositoriesSet,
1176 fs: Arc<dyn Fs>,
1177 cx: &mut Context<Self>,
1178 ) {
1179 let mut removed_ids = Vec::new();
1180 for update in updated_git_repositories.iter() {
1181 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1182 let existing_work_directory_abs_path =
1183 repo.read(cx).work_directory_abs_path.clone();
1184 Some(&existing_work_directory_abs_path)
1185 == update.old_work_directory_abs_path.as_ref()
1186 || Some(&existing_work_directory_abs_path)
1187 == update.new_work_directory_abs_path.as_ref()
1188 }) {
1189 if let Some(new_work_directory_abs_path) =
1190 update.new_work_directory_abs_path.clone()
1191 {
1192 existing.update(cx, |existing, cx| {
1193 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1194 existing.schedule_scan(updates_tx.clone(), cx);
1195 });
1196 } else {
1197 removed_ids.push(*id);
1198 }
1199 } else if let UpdatedGitRepository {
1200 new_work_directory_abs_path: Some(work_directory_abs_path),
1201 dot_git_abs_path: Some(dot_git_abs_path),
1202 repository_dir_abs_path: Some(repository_dir_abs_path),
1203 common_dir_abs_path: Some(common_dir_abs_path),
1204 ..
1205 } = update
1206 {
1207 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1208 let git_store = cx.weak_entity();
1209 let repo = cx.new(|cx| {
1210 let mut repo = Repository::local(
1211 id,
1212 work_directory_abs_path.clone(),
1213 dot_git_abs_path.clone(),
1214 repository_dir_abs_path.clone(),
1215 common_dir_abs_path.clone(),
1216 project_environment.downgrade(),
1217 fs.clone(),
1218 git_store,
1219 cx,
1220 );
1221 repo.schedule_scan(updates_tx.clone(), cx);
1222 repo
1223 });
1224 self._subscriptions
1225 .push(cx.subscribe(&repo, Self::on_repository_event));
1226 self._subscriptions
1227 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1228 self.repositories.insert(id, repo);
1229 cx.emit(GitStoreEvent::RepositoryAdded(id));
1230 self.active_repo_id.get_or_insert_with(|| {
1231 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1232 id
1233 });
1234 }
1235 }
1236
1237 for id in removed_ids {
1238 if self.active_repo_id == Some(id) {
1239 self.active_repo_id = None;
1240 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1241 }
1242 self.repositories.remove(&id);
1243 if let Some(updates_tx) = updates_tx.as_ref() {
1244 updates_tx
1245 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1246 .ok();
1247 }
1248 }
1249 }
1250
1251 fn on_buffer_store_event(
1252 &mut self,
1253 _: Entity<BufferStore>,
1254 event: &BufferStoreEvent,
1255 cx: &mut Context<Self>,
1256 ) {
1257 match event {
1258 BufferStoreEvent::BufferAdded(buffer) => {
1259 cx.subscribe(buffer, |this, buffer, event, cx| {
1260 if let BufferEvent::LanguageChanged = event {
1261 let buffer_id = buffer.read(cx).remote_id();
1262 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1263 diff_state.update(cx, |diff_state, cx| {
1264 diff_state.buffer_language_changed(buffer, cx);
1265 });
1266 }
1267 }
1268 })
1269 .detach();
1270 }
1271 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1272 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1273 diffs.remove(buffer_id);
1274 }
1275 }
1276 BufferStoreEvent::BufferDropped(buffer_id) => {
1277 self.diffs.remove(buffer_id);
1278 for diffs in self.shared_diffs.values_mut() {
1279 diffs.remove(buffer_id);
1280 }
1281 }
1282
1283 _ => {}
1284 }
1285 }
1286
1287 pub fn recalculate_buffer_diffs(
1288 &mut self,
1289 buffers: Vec<Entity<Buffer>>,
1290 cx: &mut Context<Self>,
1291 ) -> impl Future<Output = ()> + use<> {
1292 let mut futures = Vec::new();
1293 for buffer in buffers {
1294 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1295 let buffer = buffer.read(cx).text_snapshot();
1296 diff_state.update(cx, |diff_state, cx| {
1297 diff_state.recalculate_diffs(buffer.clone(), cx);
1298 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1299 });
1300 futures.push(diff_state.update(cx, |diff_state, cx| {
1301 diff_state
1302 .reparse_conflict_markers(buffer, cx)
1303 .map(|_| {})
1304 .boxed()
1305 }));
1306 }
1307 }
1308 async move {
1309 futures::future::join_all(futures).await;
1310 }
1311 }
1312
1313 fn on_buffer_diff_event(
1314 &mut self,
1315 diff: Entity<buffer_diff::BufferDiff>,
1316 event: &BufferDiffEvent,
1317 cx: &mut Context<Self>,
1318 ) {
1319 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1320 let buffer_id = diff.read(cx).buffer_id;
1321 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1322 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1323 diff_state.hunk_staging_operation_count += 1;
1324 diff_state.hunk_staging_operation_count
1325 });
1326 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1327 let recv = repo.update(cx, |repo, cx| {
1328 log::debug!("hunks changed for {}", path.as_unix_str());
1329 repo.spawn_set_index_text_job(
1330 path,
1331 new_index_text.as_ref().map(|rope| rope.to_string()),
1332 Some(hunk_staging_operation_count),
1333 cx,
1334 )
1335 });
1336 let diff = diff.downgrade();
1337 cx.spawn(async move |this, cx| {
1338 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1339 diff.update(cx, |diff, cx| {
1340 diff.clear_pending_hunks(cx);
1341 })
1342 .ok();
1343 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1344 .ok();
1345 }
1346 })
1347 .detach();
1348 }
1349 }
1350 }
1351 }
1352
1353 fn local_worktree_git_repos_changed(
1354 &mut self,
1355 worktree: Entity<Worktree>,
1356 changed_repos: &UpdatedGitRepositoriesSet,
1357 cx: &mut Context<Self>,
1358 ) {
1359 log::debug!("local worktree repos changed");
1360 debug_assert!(worktree.read(cx).is_local());
1361
1362 for repository in self.repositories.values() {
1363 repository.update(cx, |repository, cx| {
1364 let repo_abs_path = &repository.work_directory_abs_path;
1365 if changed_repos.iter().any(|update| {
1366 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1367 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1368 }) {
1369 repository.reload_buffer_diff_bases(cx);
1370 }
1371 });
1372 }
1373 }
1374
1375 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1376 &self.repositories
1377 }
1378
1379 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1380 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1381 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1382 Some(status.status)
1383 }
1384
1385 pub fn repository_and_path_for_buffer_id(
1386 &self,
1387 buffer_id: BufferId,
1388 cx: &App,
1389 ) -> Option<(Entity<Repository>, RepoPath)> {
1390 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1391 let project_path = buffer.read(cx).project_path(cx)?;
1392 self.repository_and_path_for_project_path(&project_path, cx)
1393 }
1394
1395 pub fn repository_and_path_for_project_path(
1396 &self,
1397 path: &ProjectPath,
1398 cx: &App,
1399 ) -> Option<(Entity<Repository>, RepoPath)> {
1400 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1401 self.repositories
1402 .values()
1403 .filter_map(|repo| {
1404 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1405 Some((repo.clone(), repo_path))
1406 })
1407 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1408 }
1409
1410 pub fn git_init(
1411 &self,
1412 path: Arc<Path>,
1413 fallback_branch_name: String,
1414 cx: &App,
1415 ) -> Task<Result<()>> {
1416 match &self.state {
1417 GitStoreState::Local { fs, .. } => {
1418 let fs = fs.clone();
1419 cx.background_executor()
1420 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1421 }
1422 GitStoreState::Remote {
1423 upstream_client,
1424 upstream_project_id: project_id,
1425 ..
1426 } => {
1427 let client = upstream_client.clone();
1428 let project_id = *project_id;
1429 cx.background_executor().spawn(async move {
1430 client
1431 .request(proto::GitInit {
1432 project_id: project_id,
1433 abs_path: path.to_string_lossy().into_owned(),
1434 fallback_branch_name,
1435 })
1436 .await?;
1437 Ok(())
1438 })
1439 }
1440 }
1441 }
1442
1443 pub fn git_clone(
1444 &self,
1445 repo: String,
1446 path: impl Into<Arc<std::path::Path>>,
1447 cx: &App,
1448 ) -> Task<Result<()>> {
1449 let path = path.into();
1450 match &self.state {
1451 GitStoreState::Local { fs, .. } => {
1452 let fs = fs.clone();
1453 cx.background_executor()
1454 .spawn(async move { fs.git_clone(&repo, &path).await })
1455 }
1456 GitStoreState::Remote {
1457 upstream_client,
1458 upstream_project_id,
1459 ..
1460 } => {
1461 if upstream_client.is_via_collab() {
1462 return Task::ready(Err(anyhow!(
1463 "Git Clone isn't supported for project guests"
1464 )));
1465 }
1466 let request = upstream_client.request(proto::GitClone {
1467 project_id: *upstream_project_id,
1468 abs_path: path.to_string_lossy().into_owned(),
1469 remote_repo: repo,
1470 });
1471
1472 cx.background_spawn(async move {
1473 let result = request.await?;
1474
1475 match result.success {
1476 true => Ok(()),
1477 false => Err(anyhow!("Git Clone failed")),
1478 }
1479 })
1480 }
1481 }
1482 }
1483
1484 async fn handle_update_repository(
1485 this: Entity<Self>,
1486 envelope: TypedEnvelope<proto::UpdateRepository>,
1487 mut cx: AsyncApp,
1488 ) -> Result<()> {
1489 this.update(&mut cx, |this, cx| {
1490 let path_style = this.worktree_store.read(cx).path_style();
1491 let mut update = envelope.payload;
1492
1493 let id = RepositoryId::from_proto(update.id);
1494 let client = this.upstream_client().context("no upstream client")?;
1495
1496 let mut is_new = false;
1497 let repo = this.repositories.entry(id).or_insert_with(|| {
1498 is_new = true;
1499 let git_store = cx.weak_entity();
1500 cx.new(|cx| {
1501 Repository::remote(
1502 id,
1503 Path::new(&update.abs_path).into(),
1504 path_style,
1505 ProjectId(update.project_id),
1506 client,
1507 git_store,
1508 cx,
1509 )
1510 })
1511 });
1512 if is_new {
1513 this._subscriptions
1514 .push(cx.subscribe(repo, Self::on_repository_event))
1515 }
1516
1517 repo.update(cx, {
1518 let update = update.clone();
1519 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1520 })?;
1521
1522 this.active_repo_id.get_or_insert_with(|| {
1523 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1524 id
1525 });
1526
1527 if let Some((client, project_id)) = this.downstream_client() {
1528 update.project_id = project_id.to_proto();
1529 client.send(update).log_err();
1530 }
1531 Ok(())
1532 })?
1533 }
1534
1535 async fn handle_remove_repository(
1536 this: Entity<Self>,
1537 envelope: TypedEnvelope<proto::RemoveRepository>,
1538 mut cx: AsyncApp,
1539 ) -> Result<()> {
1540 this.update(&mut cx, |this, cx| {
1541 let mut update = envelope.payload;
1542 let id = RepositoryId::from_proto(update.id);
1543 this.repositories.remove(&id);
1544 if let Some((client, project_id)) = this.downstream_client() {
1545 update.project_id = project_id.to_proto();
1546 client.send(update).log_err();
1547 }
1548 if this.active_repo_id == Some(id) {
1549 this.active_repo_id = None;
1550 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1551 }
1552 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1553 })
1554 }
1555
1556 async fn handle_git_init(
1557 this: Entity<Self>,
1558 envelope: TypedEnvelope<proto::GitInit>,
1559 cx: AsyncApp,
1560 ) -> Result<proto::Ack> {
1561 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1562 let name = envelope.payload.fallback_branch_name;
1563 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1564 .await?;
1565
1566 Ok(proto::Ack {})
1567 }
1568
1569 async fn handle_git_clone(
1570 this: Entity<Self>,
1571 envelope: TypedEnvelope<proto::GitClone>,
1572 cx: AsyncApp,
1573 ) -> Result<proto::GitCloneResponse> {
1574 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1575 let repo_name = envelope.payload.remote_repo;
1576 let result = cx
1577 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1578 .await;
1579
1580 Ok(proto::GitCloneResponse {
1581 success: result.is_ok(),
1582 })
1583 }
1584
1585 async fn handle_fetch(
1586 this: Entity<Self>,
1587 envelope: TypedEnvelope<proto::Fetch>,
1588 mut cx: AsyncApp,
1589 ) -> Result<proto::RemoteMessageResponse> {
1590 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1591 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1592 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1593 let askpass_id = envelope.payload.askpass_id;
1594
1595 let askpass = make_remote_delegate(
1596 this,
1597 envelope.payload.project_id,
1598 repository_id,
1599 askpass_id,
1600 &mut cx,
1601 );
1602
1603 let remote_output = repository_handle
1604 .update(&mut cx, |repository_handle, cx| {
1605 repository_handle.fetch(fetch_options, askpass, cx)
1606 })?
1607 .await??;
1608
1609 Ok(proto::RemoteMessageResponse {
1610 stdout: remote_output.stdout,
1611 stderr: remote_output.stderr,
1612 })
1613 }
1614
1615 async fn handle_push(
1616 this: Entity<Self>,
1617 envelope: TypedEnvelope<proto::Push>,
1618 mut cx: AsyncApp,
1619 ) -> Result<proto::RemoteMessageResponse> {
1620 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1621 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1622
1623 let askpass_id = envelope.payload.askpass_id;
1624 let askpass = make_remote_delegate(
1625 this,
1626 envelope.payload.project_id,
1627 repository_id,
1628 askpass_id,
1629 &mut cx,
1630 );
1631
1632 let options = envelope
1633 .payload
1634 .options
1635 .as_ref()
1636 .map(|_| match envelope.payload.options() {
1637 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1638 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1639 });
1640
1641 let branch_name = envelope.payload.branch_name.into();
1642 let remote_name = envelope.payload.remote_name.into();
1643
1644 let remote_output = repository_handle
1645 .update(&mut cx, |repository_handle, cx| {
1646 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1647 })?
1648 .await??;
1649 Ok(proto::RemoteMessageResponse {
1650 stdout: remote_output.stdout,
1651 stderr: remote_output.stderr,
1652 })
1653 }
1654
1655 async fn handle_pull(
1656 this: Entity<Self>,
1657 envelope: TypedEnvelope<proto::Pull>,
1658 mut cx: AsyncApp,
1659 ) -> Result<proto::RemoteMessageResponse> {
1660 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1661 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1662 let askpass_id = envelope.payload.askpass_id;
1663 let askpass = make_remote_delegate(
1664 this,
1665 envelope.payload.project_id,
1666 repository_id,
1667 askpass_id,
1668 &mut cx,
1669 );
1670
1671 let branch_name = envelope.payload.branch_name.into();
1672 let remote_name = envelope.payload.remote_name.into();
1673
1674 let remote_message = repository_handle
1675 .update(&mut cx, |repository_handle, cx| {
1676 repository_handle.pull(branch_name, remote_name, askpass, cx)
1677 })?
1678 .await??;
1679
1680 Ok(proto::RemoteMessageResponse {
1681 stdout: remote_message.stdout,
1682 stderr: remote_message.stderr,
1683 })
1684 }
1685
1686 async fn handle_stage(
1687 this: Entity<Self>,
1688 envelope: TypedEnvelope<proto::Stage>,
1689 mut cx: AsyncApp,
1690 ) -> Result<proto::Ack> {
1691 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1692 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1693
1694 let entries = envelope
1695 .payload
1696 .paths
1697 .into_iter()
1698 .map(|path| RepoPath::new(&path))
1699 .collect::<Result<Vec<_>>>()?;
1700
1701 repository_handle
1702 .update(&mut cx, |repository_handle, cx| {
1703 repository_handle.stage_entries(entries, cx)
1704 })?
1705 .await?;
1706 Ok(proto::Ack {})
1707 }
1708
1709 async fn handle_unstage(
1710 this: Entity<Self>,
1711 envelope: TypedEnvelope<proto::Unstage>,
1712 mut cx: AsyncApp,
1713 ) -> Result<proto::Ack> {
1714 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1715 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1716
1717 let entries = envelope
1718 .payload
1719 .paths
1720 .into_iter()
1721 .map(|path| RepoPath::new(&path))
1722 .collect::<Result<Vec<_>>>()?;
1723
1724 repository_handle
1725 .update(&mut cx, |repository_handle, cx| {
1726 repository_handle.unstage_entries(entries, cx)
1727 })?
1728 .await?;
1729
1730 Ok(proto::Ack {})
1731 }
1732
1733 async fn handle_stash(
1734 this: Entity<Self>,
1735 envelope: TypedEnvelope<proto::Stash>,
1736 mut cx: AsyncApp,
1737 ) -> Result<proto::Ack> {
1738 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1739 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1740
1741 let entries = envelope
1742 .payload
1743 .paths
1744 .into_iter()
1745 .map(|path| RepoPath::new(&path))
1746 .collect::<Result<Vec<_>>>()?;
1747
1748 repository_handle
1749 .update(&mut cx, |repository_handle, cx| {
1750 repository_handle.stash_entries(entries, cx)
1751 })?
1752 .await?;
1753
1754 Ok(proto::Ack {})
1755 }
1756
1757 async fn handle_stash_pop(
1758 this: Entity<Self>,
1759 envelope: TypedEnvelope<proto::StashPop>,
1760 mut cx: AsyncApp,
1761 ) -> Result<proto::Ack> {
1762 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1763 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1764 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1765
1766 repository_handle
1767 .update(&mut cx, |repository_handle, cx| {
1768 repository_handle.stash_pop(stash_index, cx)
1769 })?
1770 .await?;
1771
1772 Ok(proto::Ack {})
1773 }
1774
1775 async fn handle_stash_apply(
1776 this: Entity<Self>,
1777 envelope: TypedEnvelope<proto::StashApply>,
1778 mut cx: AsyncApp,
1779 ) -> Result<proto::Ack> {
1780 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1781 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1782 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1783
1784 repository_handle
1785 .update(&mut cx, |repository_handle, cx| {
1786 repository_handle.stash_apply(stash_index, cx)
1787 })?
1788 .await?;
1789
1790 Ok(proto::Ack {})
1791 }
1792
1793 async fn handle_stash_drop(
1794 this: Entity<Self>,
1795 envelope: TypedEnvelope<proto::StashDrop>,
1796 mut cx: AsyncApp,
1797 ) -> Result<proto::Ack> {
1798 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1799 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1800 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1801
1802 repository_handle
1803 .update(&mut cx, |repository_handle, cx| {
1804 repository_handle.stash_drop(stash_index, cx)
1805 })?
1806 .await??;
1807
1808 Ok(proto::Ack {})
1809 }
1810
1811 async fn handle_set_index_text(
1812 this: Entity<Self>,
1813 envelope: TypedEnvelope<proto::SetIndexText>,
1814 mut cx: AsyncApp,
1815 ) -> Result<proto::Ack> {
1816 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1817 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1818 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1819
1820 repository_handle
1821 .update(&mut cx, |repository_handle, cx| {
1822 repository_handle.spawn_set_index_text_job(
1823 repo_path,
1824 envelope.payload.text,
1825 None,
1826 cx,
1827 )
1828 })?
1829 .await??;
1830 Ok(proto::Ack {})
1831 }
1832
1833 async fn handle_commit(
1834 this: Entity<Self>,
1835 envelope: TypedEnvelope<proto::Commit>,
1836 mut cx: AsyncApp,
1837 ) -> Result<proto::Ack> {
1838 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1839 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1840
1841 let message = SharedString::from(envelope.payload.message);
1842 let name = envelope.payload.name.map(SharedString::from);
1843 let email = envelope.payload.email.map(SharedString::from);
1844 let options = envelope.payload.options.unwrap_or_default();
1845
1846 repository_handle
1847 .update(&mut cx, |repository_handle, cx| {
1848 repository_handle.commit(
1849 message,
1850 name.zip(email),
1851 CommitOptions {
1852 amend: options.amend,
1853 signoff: options.signoff,
1854 },
1855 cx,
1856 )
1857 })?
1858 .await??;
1859 Ok(proto::Ack {})
1860 }
1861
1862 async fn handle_get_remotes(
1863 this: Entity<Self>,
1864 envelope: TypedEnvelope<proto::GetRemotes>,
1865 mut cx: AsyncApp,
1866 ) -> Result<proto::GetRemotesResponse> {
1867 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1868 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1869
1870 let branch_name = envelope.payload.branch_name;
1871
1872 let remotes = repository_handle
1873 .update(&mut cx, |repository_handle, _| {
1874 repository_handle.get_remotes(branch_name)
1875 })?
1876 .await??;
1877
1878 Ok(proto::GetRemotesResponse {
1879 remotes: remotes
1880 .into_iter()
1881 .map(|remotes| proto::get_remotes_response::Remote {
1882 name: remotes.name.to_string(),
1883 })
1884 .collect::<Vec<_>>(),
1885 })
1886 }
1887
1888 async fn handle_get_branches(
1889 this: Entity<Self>,
1890 envelope: TypedEnvelope<proto::GitGetBranches>,
1891 mut cx: AsyncApp,
1892 ) -> Result<proto::GitBranchesResponse> {
1893 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1894 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1895
1896 let branches = repository_handle
1897 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1898 .await??;
1899
1900 Ok(proto::GitBranchesResponse {
1901 branches: branches
1902 .into_iter()
1903 .map(|branch| branch_to_proto(&branch))
1904 .collect::<Vec<_>>(),
1905 })
1906 }
1907 async fn handle_get_default_branch(
1908 this: Entity<Self>,
1909 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1910 mut cx: AsyncApp,
1911 ) -> Result<proto::GetDefaultBranchResponse> {
1912 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1913 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1914
1915 let branch = repository_handle
1916 .update(&mut cx, |repository_handle, _| {
1917 repository_handle.default_branch()
1918 })?
1919 .await??
1920 .map(Into::into);
1921
1922 Ok(proto::GetDefaultBranchResponse { branch })
1923 }
1924 async fn handle_create_branch(
1925 this: Entity<Self>,
1926 envelope: TypedEnvelope<proto::GitCreateBranch>,
1927 mut cx: AsyncApp,
1928 ) -> Result<proto::Ack> {
1929 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1930 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1931 let branch_name = envelope.payload.branch_name;
1932
1933 repository_handle
1934 .update(&mut cx, |repository_handle, _| {
1935 repository_handle.create_branch(branch_name)
1936 })?
1937 .await??;
1938
1939 Ok(proto::Ack {})
1940 }
1941
1942 async fn handle_change_branch(
1943 this: Entity<Self>,
1944 envelope: TypedEnvelope<proto::GitChangeBranch>,
1945 mut cx: AsyncApp,
1946 ) -> Result<proto::Ack> {
1947 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1948 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1949 let branch_name = envelope.payload.branch_name;
1950
1951 repository_handle
1952 .update(&mut cx, |repository_handle, _| {
1953 repository_handle.change_branch(branch_name)
1954 })?
1955 .await??;
1956
1957 Ok(proto::Ack {})
1958 }
1959
1960 async fn handle_rename_branch(
1961 this: Entity<Self>,
1962 envelope: TypedEnvelope<proto::GitRenameBranch>,
1963 mut cx: AsyncApp,
1964 ) -> Result<proto::Ack> {
1965 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1966 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1967 let branch = envelope.payload.branch;
1968 let new_name = envelope.payload.new_name;
1969
1970 repository_handle
1971 .update(&mut cx, |repository_handle, _| {
1972 repository_handle.rename_branch(branch, new_name)
1973 })?
1974 .await??;
1975
1976 Ok(proto::Ack {})
1977 }
1978
1979 async fn handle_show(
1980 this: Entity<Self>,
1981 envelope: TypedEnvelope<proto::GitShow>,
1982 mut cx: AsyncApp,
1983 ) -> Result<proto::GitCommitDetails> {
1984 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1985 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1986
1987 let commit = repository_handle
1988 .update(&mut cx, |repository_handle, _| {
1989 repository_handle.show(envelope.payload.commit)
1990 })?
1991 .await??;
1992 Ok(proto::GitCommitDetails {
1993 sha: commit.sha.into(),
1994 message: commit.message.into(),
1995 commit_timestamp: commit.commit_timestamp,
1996 author_email: commit.author_email.into(),
1997 author_name: commit.author_name.into(),
1998 })
1999 }
2000
2001 async fn handle_load_commit_diff(
2002 this: Entity<Self>,
2003 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2004 mut cx: AsyncApp,
2005 ) -> Result<proto::LoadCommitDiffResponse> {
2006 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2007 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2008
2009 let commit_diff = repository_handle
2010 .update(&mut cx, |repository_handle, _| {
2011 repository_handle.load_commit_diff(envelope.payload.commit)
2012 })?
2013 .await??;
2014 Ok(proto::LoadCommitDiffResponse {
2015 files: commit_diff
2016 .files
2017 .into_iter()
2018 .map(|file| proto::CommitFile {
2019 path: file.path.to_proto(),
2020 old_text: file.old_text,
2021 new_text: file.new_text,
2022 })
2023 .collect(),
2024 })
2025 }
2026
2027 async fn handle_reset(
2028 this: Entity<Self>,
2029 envelope: TypedEnvelope<proto::GitReset>,
2030 mut cx: AsyncApp,
2031 ) -> Result<proto::Ack> {
2032 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2033 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2034
2035 let mode = match envelope.payload.mode() {
2036 git_reset::ResetMode::Soft => ResetMode::Soft,
2037 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2038 };
2039
2040 repository_handle
2041 .update(&mut cx, |repository_handle, cx| {
2042 repository_handle.reset(envelope.payload.commit, mode, cx)
2043 })?
2044 .await??;
2045 Ok(proto::Ack {})
2046 }
2047
2048 async fn handle_checkout_files(
2049 this: Entity<Self>,
2050 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2051 mut cx: AsyncApp,
2052 ) -> Result<proto::Ack> {
2053 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2054 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2055 let paths = envelope
2056 .payload
2057 .paths
2058 .iter()
2059 .map(|s| RepoPath::from_proto(s))
2060 .collect::<Result<Vec<_>>>()?;
2061
2062 repository_handle
2063 .update(&mut cx, |repository_handle, cx| {
2064 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2065 })?
2066 .await??;
2067 Ok(proto::Ack {})
2068 }
2069
2070 async fn handle_open_commit_message_buffer(
2071 this: Entity<Self>,
2072 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2073 mut cx: AsyncApp,
2074 ) -> Result<proto::OpenBufferResponse> {
2075 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2076 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2077 let buffer = repository
2078 .update(&mut cx, |repository, cx| {
2079 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2080 })?
2081 .await?;
2082
2083 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2084 this.update(&mut cx, |this, cx| {
2085 this.buffer_store.update(cx, |buffer_store, cx| {
2086 buffer_store
2087 .create_buffer_for_peer(
2088 &buffer,
2089 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2090 cx,
2091 )
2092 .detach_and_log_err(cx);
2093 })
2094 })?;
2095
2096 Ok(proto::OpenBufferResponse {
2097 buffer_id: buffer_id.to_proto(),
2098 })
2099 }
2100
2101 async fn handle_askpass(
2102 this: Entity<Self>,
2103 envelope: TypedEnvelope<proto::AskPassRequest>,
2104 mut cx: AsyncApp,
2105 ) -> Result<proto::AskPassResponse> {
2106 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2107 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2108
2109 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2110 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2111 debug_panic!("no askpass found");
2112 anyhow::bail!("no askpass found");
2113 };
2114
2115 let response = askpass
2116 .ask_password(envelope.payload.prompt)
2117 .await
2118 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2119
2120 delegates
2121 .lock()
2122 .insert(envelope.payload.askpass_id, askpass);
2123
2124 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2125 Ok(proto::AskPassResponse {
2126 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2127 })
2128 }
2129
2130 async fn handle_check_for_pushed_commits(
2131 this: Entity<Self>,
2132 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2133 mut cx: AsyncApp,
2134 ) -> Result<proto::CheckForPushedCommitsResponse> {
2135 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2136 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2137
2138 let branches = repository_handle
2139 .update(&mut cx, |repository_handle, _| {
2140 repository_handle.check_for_pushed_commits()
2141 })?
2142 .await??;
2143 Ok(proto::CheckForPushedCommitsResponse {
2144 pushed_to: branches
2145 .into_iter()
2146 .map(|commit| commit.to_string())
2147 .collect(),
2148 })
2149 }
2150
2151 async fn handle_git_diff(
2152 this: Entity<Self>,
2153 envelope: TypedEnvelope<proto::GitDiff>,
2154 mut cx: AsyncApp,
2155 ) -> Result<proto::GitDiffResponse> {
2156 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2157 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2158 let diff_type = match envelope.payload.diff_type() {
2159 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2160 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2161 };
2162
2163 let mut diff = repository_handle
2164 .update(&mut cx, |repository_handle, cx| {
2165 repository_handle.diff(diff_type, cx)
2166 })?
2167 .await??;
2168 const ONE_MB: usize = 1_000_000;
2169 if diff.len() > ONE_MB {
2170 diff = diff.chars().take(ONE_MB).collect()
2171 }
2172
2173 Ok(proto::GitDiffResponse { diff })
2174 }
2175
2176 async fn handle_open_unstaged_diff(
2177 this: Entity<Self>,
2178 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2179 mut cx: AsyncApp,
2180 ) -> Result<proto::OpenUnstagedDiffResponse> {
2181 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2182 let diff = this
2183 .update(&mut cx, |this, cx| {
2184 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2185 Some(this.open_unstaged_diff(buffer, cx))
2186 })?
2187 .context("missing buffer")?
2188 .await?;
2189 this.update(&mut cx, |this, _| {
2190 let shared_diffs = this
2191 .shared_diffs
2192 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2193 .or_default();
2194 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2195 })?;
2196 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2197 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2198 }
2199
2200 async fn handle_open_uncommitted_diff(
2201 this: Entity<Self>,
2202 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2203 mut cx: AsyncApp,
2204 ) -> Result<proto::OpenUncommittedDiffResponse> {
2205 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2206 let diff = this
2207 .update(&mut cx, |this, cx| {
2208 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2209 Some(this.open_uncommitted_diff(buffer, cx))
2210 })?
2211 .context("missing buffer")?
2212 .await?;
2213 this.update(&mut cx, |this, _| {
2214 let shared_diffs = this
2215 .shared_diffs
2216 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2217 .or_default();
2218 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2219 })?;
2220 diff.read_with(&cx, |diff, cx| {
2221 use proto::open_uncommitted_diff_response::Mode;
2222
2223 let unstaged_diff = diff.secondary_diff();
2224 let index_snapshot = unstaged_diff.and_then(|diff| {
2225 let diff = diff.read(cx);
2226 diff.base_text_exists().then(|| diff.base_text())
2227 });
2228
2229 let mode;
2230 let staged_text;
2231 let committed_text;
2232 if diff.base_text_exists() {
2233 let committed_snapshot = diff.base_text();
2234 committed_text = Some(committed_snapshot.text());
2235 if let Some(index_text) = index_snapshot {
2236 if index_text.remote_id() == committed_snapshot.remote_id() {
2237 mode = Mode::IndexMatchesHead;
2238 staged_text = None;
2239 } else {
2240 mode = Mode::IndexAndHead;
2241 staged_text = Some(index_text.text());
2242 }
2243 } else {
2244 mode = Mode::IndexAndHead;
2245 staged_text = None;
2246 }
2247 } else {
2248 mode = Mode::IndexAndHead;
2249 committed_text = None;
2250 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2251 }
2252
2253 proto::OpenUncommittedDiffResponse {
2254 committed_text,
2255 staged_text,
2256 mode: mode.into(),
2257 }
2258 })
2259 }
2260
2261 async fn handle_update_diff_bases(
2262 this: Entity<Self>,
2263 request: TypedEnvelope<proto::UpdateDiffBases>,
2264 mut cx: AsyncApp,
2265 ) -> Result<()> {
2266 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2267 this.update(&mut cx, |this, cx| {
2268 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2269 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2270 {
2271 let buffer = buffer.read(cx).text_snapshot();
2272 diff_state.update(cx, |diff_state, cx| {
2273 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2274 })
2275 }
2276 })
2277 }
2278
2279 async fn handle_blame_buffer(
2280 this: Entity<Self>,
2281 envelope: TypedEnvelope<proto::BlameBuffer>,
2282 mut cx: AsyncApp,
2283 ) -> Result<proto::BlameBufferResponse> {
2284 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2285 let version = deserialize_version(&envelope.payload.version);
2286 let buffer = this.read_with(&cx, |this, cx| {
2287 this.buffer_store.read(cx).get_existing(buffer_id)
2288 })??;
2289 buffer
2290 .update(&mut cx, |buffer, _| {
2291 buffer.wait_for_version(version.clone())
2292 })?
2293 .await?;
2294 let blame = this
2295 .update(&mut cx, |this, cx| {
2296 this.blame_buffer(&buffer, Some(version), cx)
2297 })?
2298 .await?;
2299 Ok(serialize_blame_buffer_response(blame))
2300 }
2301
2302 async fn handle_get_permalink_to_line(
2303 this: Entity<Self>,
2304 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2305 mut cx: AsyncApp,
2306 ) -> Result<proto::GetPermalinkToLineResponse> {
2307 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2308 // let version = deserialize_version(&envelope.payload.version);
2309 let selection = {
2310 let proto_selection = envelope
2311 .payload
2312 .selection
2313 .context("no selection to get permalink for defined")?;
2314 proto_selection.start as u32..proto_selection.end as u32
2315 };
2316 let buffer = this.read_with(&cx, |this, cx| {
2317 this.buffer_store.read(cx).get_existing(buffer_id)
2318 })??;
2319 let permalink = this
2320 .update(&mut cx, |this, cx| {
2321 this.get_permalink_to_line(&buffer, selection, cx)
2322 })?
2323 .await?;
2324 Ok(proto::GetPermalinkToLineResponse {
2325 permalink: permalink.to_string(),
2326 })
2327 }
2328
2329 fn repository_for_request(
2330 this: &Entity<Self>,
2331 id: RepositoryId,
2332 cx: &mut AsyncApp,
2333 ) -> Result<Entity<Repository>> {
2334 this.read_with(cx, |this, _| {
2335 this.repositories
2336 .get(&id)
2337 .context("missing repository handle")
2338 .cloned()
2339 })?
2340 }
2341
2342 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2343 self.repositories
2344 .iter()
2345 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2346 .collect()
2347 }
2348
2349 fn process_updated_entries(
2350 &self,
2351 worktree: &Entity<Worktree>,
2352 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2353 cx: &mut App,
2354 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2355 let path_style = worktree.read(cx).path_style();
2356 let mut repo_paths = self
2357 .repositories
2358 .values()
2359 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2360 .collect::<Vec<_>>();
2361 let mut entries: Vec<_> = updated_entries
2362 .iter()
2363 .map(|(path, _, _)| path.clone())
2364 .collect();
2365 entries.sort();
2366 let worktree = worktree.read(cx);
2367
2368 let entries = entries
2369 .into_iter()
2370 .map(|path| worktree.absolutize(&path))
2371 .collect::<Arc<[_]>>();
2372
2373 let executor = cx.background_executor().clone();
2374 cx.background_executor().spawn(async move {
2375 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2376 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2377 let mut tasks = FuturesOrdered::new();
2378 for (repo_path, repo) in repo_paths.into_iter().rev() {
2379 let entries = entries.clone();
2380 let task = executor.spawn(async move {
2381 // Find all repository paths that belong to this repo
2382 let mut ix = entries.partition_point(|path| path < &*repo_path);
2383 if ix == entries.len() {
2384 return None;
2385 };
2386
2387 let mut paths = Vec::new();
2388 // All paths prefixed by a given repo will constitute a continuous range.
2389 while let Some(path) = entries.get(ix)
2390 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2391 &repo_path, path, path_style,
2392 )
2393 {
2394 paths.push((repo_path, ix));
2395 ix += 1;
2396 }
2397 if paths.is_empty() {
2398 None
2399 } else {
2400 Some((repo, paths))
2401 }
2402 });
2403 tasks.push_back(task);
2404 }
2405
2406 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2407 let mut path_was_used = vec![false; entries.len()];
2408 let tasks = tasks.collect::<Vec<_>>().await;
2409 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2410 // We always want to assign a path to it's innermost repository.
2411 for t in tasks {
2412 let Some((repo, paths)) = t else {
2413 continue;
2414 };
2415 let entry = paths_by_git_repo.entry(repo).or_default();
2416 for (repo_path, ix) in paths {
2417 if path_was_used[ix] {
2418 continue;
2419 }
2420 path_was_used[ix] = true;
2421 entry.push(repo_path);
2422 }
2423 }
2424
2425 paths_by_git_repo
2426 })
2427 }
2428}
2429
2430impl BufferGitState {
2431 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2432 Self {
2433 unstaged_diff: Default::default(),
2434 uncommitted_diff: Default::default(),
2435 recalculate_diff_task: Default::default(),
2436 language: Default::default(),
2437 language_registry: Default::default(),
2438 recalculating_tx: postage::watch::channel_with(false).0,
2439 hunk_staging_operation_count: 0,
2440 hunk_staging_operation_count_as_of_write: 0,
2441 head_text: Default::default(),
2442 index_text: Default::default(),
2443 head_changed: Default::default(),
2444 index_changed: Default::default(),
2445 language_changed: Default::default(),
2446 conflict_updated_futures: Default::default(),
2447 conflict_set: Default::default(),
2448 reparse_conflict_markers_task: Default::default(),
2449 }
2450 }
2451
2452 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2453 self.language = buffer.read(cx).language().cloned();
2454 self.language_changed = true;
2455 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2456 }
2457
2458 fn reparse_conflict_markers(
2459 &mut self,
2460 buffer: text::BufferSnapshot,
2461 cx: &mut Context<Self>,
2462 ) -> oneshot::Receiver<()> {
2463 let (tx, rx) = oneshot::channel();
2464
2465 let Some(conflict_set) = self
2466 .conflict_set
2467 .as_ref()
2468 .and_then(|conflict_set| conflict_set.upgrade())
2469 else {
2470 return rx;
2471 };
2472
2473 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2474 if conflict_set.has_conflict {
2475 Some(conflict_set.snapshot())
2476 } else {
2477 None
2478 }
2479 });
2480
2481 if let Some(old_snapshot) = old_snapshot {
2482 self.conflict_updated_futures.push(tx);
2483 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2484 let (snapshot, changed_range) = cx
2485 .background_spawn(async move {
2486 let new_snapshot = ConflictSet::parse(&buffer);
2487 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2488 (new_snapshot, changed_range)
2489 })
2490 .await;
2491 this.update(cx, |this, cx| {
2492 if let Some(conflict_set) = &this.conflict_set {
2493 conflict_set
2494 .update(cx, |conflict_set, cx| {
2495 conflict_set.set_snapshot(snapshot, changed_range, cx);
2496 })
2497 .ok();
2498 }
2499 let futures = std::mem::take(&mut this.conflict_updated_futures);
2500 for tx in futures {
2501 tx.send(()).ok();
2502 }
2503 })
2504 }))
2505 }
2506
2507 rx
2508 }
2509
2510 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2511 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2512 }
2513
2514 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2515 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2516 }
2517
2518 fn handle_base_texts_updated(
2519 &mut self,
2520 buffer: text::BufferSnapshot,
2521 message: proto::UpdateDiffBases,
2522 cx: &mut Context<Self>,
2523 ) {
2524 use proto::update_diff_bases::Mode;
2525
2526 let Some(mode) = Mode::from_i32(message.mode) else {
2527 return;
2528 };
2529
2530 let diff_bases_change = match mode {
2531 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2532 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2533 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2534 Mode::IndexAndHead => DiffBasesChange::SetEach {
2535 index: message.staged_text,
2536 head: message.committed_text,
2537 },
2538 };
2539
2540 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2541 }
2542
2543 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2544 if *self.recalculating_tx.borrow() {
2545 let mut rx = self.recalculating_tx.subscribe();
2546 Some(async move {
2547 loop {
2548 let is_recalculating = rx.recv().await;
2549 if is_recalculating != Some(true) {
2550 break;
2551 }
2552 }
2553 })
2554 } else {
2555 None
2556 }
2557 }
2558
2559 fn diff_bases_changed(
2560 &mut self,
2561 buffer: text::BufferSnapshot,
2562 diff_bases_change: Option<DiffBasesChange>,
2563 cx: &mut Context<Self>,
2564 ) {
2565 match diff_bases_change {
2566 Some(DiffBasesChange::SetIndex(index)) => {
2567 self.index_text = index.map(|mut index| {
2568 text::LineEnding::normalize(&mut index);
2569 Arc::new(index)
2570 });
2571 self.index_changed = true;
2572 }
2573 Some(DiffBasesChange::SetHead(head)) => {
2574 self.head_text = head.map(|mut head| {
2575 text::LineEnding::normalize(&mut head);
2576 Arc::new(head)
2577 });
2578 self.head_changed = true;
2579 }
2580 Some(DiffBasesChange::SetBoth(text)) => {
2581 let text = text.map(|mut text| {
2582 text::LineEnding::normalize(&mut text);
2583 Arc::new(text)
2584 });
2585 self.head_text = text.clone();
2586 self.index_text = text;
2587 self.head_changed = true;
2588 self.index_changed = true;
2589 }
2590 Some(DiffBasesChange::SetEach { index, head }) => {
2591 self.index_text = index.map(|mut index| {
2592 text::LineEnding::normalize(&mut index);
2593 Arc::new(index)
2594 });
2595 self.index_changed = true;
2596 self.head_text = head.map(|mut head| {
2597 text::LineEnding::normalize(&mut head);
2598 Arc::new(head)
2599 });
2600 self.head_changed = true;
2601 }
2602 None => {}
2603 }
2604
2605 self.recalculate_diffs(buffer, cx)
2606 }
2607
2608 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2609 *self.recalculating_tx.borrow_mut() = true;
2610
2611 let language = self.language.clone();
2612 let language_registry = self.language_registry.clone();
2613 let unstaged_diff = self.unstaged_diff();
2614 let uncommitted_diff = self.uncommitted_diff();
2615 let head = self.head_text.clone();
2616 let index = self.index_text.clone();
2617 let index_changed = self.index_changed;
2618 let head_changed = self.head_changed;
2619 let language_changed = self.language_changed;
2620 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2621 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2622 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2623 (None, None) => true,
2624 _ => false,
2625 };
2626 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2627 log::debug!(
2628 "start recalculating diffs for buffer {}",
2629 buffer.remote_id()
2630 );
2631
2632 let mut new_unstaged_diff = None;
2633 if let Some(unstaged_diff) = &unstaged_diff {
2634 new_unstaged_diff = Some(
2635 BufferDiff::update_diff(
2636 unstaged_diff.clone(),
2637 buffer.clone(),
2638 index,
2639 index_changed,
2640 language_changed,
2641 language.clone(),
2642 language_registry.clone(),
2643 cx,
2644 )
2645 .await?,
2646 );
2647 }
2648
2649 let mut new_uncommitted_diff = None;
2650 if let Some(uncommitted_diff) = &uncommitted_diff {
2651 new_uncommitted_diff = if index_matches_head {
2652 new_unstaged_diff.clone()
2653 } else {
2654 Some(
2655 BufferDiff::update_diff(
2656 uncommitted_diff.clone(),
2657 buffer.clone(),
2658 head,
2659 head_changed,
2660 language_changed,
2661 language.clone(),
2662 language_registry.clone(),
2663 cx,
2664 )
2665 .await?,
2666 )
2667 }
2668 }
2669
2670 let cancel = this.update(cx, |this, _| {
2671 // This checks whether all pending stage/unstage operations
2672 // have quiesced (i.e. both the corresponding write and the
2673 // read of that write have completed). If not, then we cancel
2674 // this recalculation attempt to avoid invalidating pending
2675 // state too quickly; another recalculation will come along
2676 // later and clear the pending state once the state of the index has settled.
2677 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2678 *this.recalculating_tx.borrow_mut() = false;
2679 true
2680 } else {
2681 false
2682 }
2683 })?;
2684 if cancel {
2685 log::debug!(
2686 concat!(
2687 "aborting recalculating diffs for buffer {}",
2688 "due to subsequent hunk operations",
2689 ),
2690 buffer.remote_id()
2691 );
2692 return Ok(());
2693 }
2694
2695 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2696 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2697 {
2698 unstaged_diff.update(cx, |diff, cx| {
2699 if language_changed {
2700 diff.language_changed(cx);
2701 }
2702 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2703 })?
2704 } else {
2705 None
2706 };
2707
2708 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2709 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2710 {
2711 uncommitted_diff.update(cx, |diff, cx| {
2712 if language_changed {
2713 diff.language_changed(cx);
2714 }
2715 diff.set_snapshot_with_secondary(
2716 new_uncommitted_diff,
2717 &buffer,
2718 unstaged_changed_range,
2719 true,
2720 cx,
2721 );
2722 })?;
2723 }
2724
2725 log::debug!(
2726 "finished recalculating diffs for buffer {}",
2727 buffer.remote_id()
2728 );
2729
2730 if let Some(this) = this.upgrade() {
2731 this.update(cx, |this, _| {
2732 this.index_changed = false;
2733 this.head_changed = false;
2734 this.language_changed = false;
2735 *this.recalculating_tx.borrow_mut() = false;
2736 })?;
2737 }
2738
2739 Ok(())
2740 }));
2741 }
2742}
2743
2744fn make_remote_delegate(
2745 this: Entity<GitStore>,
2746 project_id: u64,
2747 repository_id: RepositoryId,
2748 askpass_id: u64,
2749 cx: &mut AsyncApp,
2750) -> AskPassDelegate {
2751 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2752 this.update(cx, |this, cx| {
2753 let Some((client, _)) = this.downstream_client() else {
2754 return;
2755 };
2756 let response = client.request(proto::AskPassRequest {
2757 project_id,
2758 repository_id: repository_id.to_proto(),
2759 askpass_id,
2760 prompt,
2761 });
2762 cx.spawn(async move |_, _| {
2763 let mut response = response.await?.response;
2764 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2765 .ok();
2766 response.zeroize();
2767 anyhow::Ok(())
2768 })
2769 .detach_and_log_err(cx);
2770 })
2771 .log_err();
2772 })
2773}
2774
2775impl RepositoryId {
2776 pub fn to_proto(self) -> u64 {
2777 self.0
2778 }
2779
2780 pub fn from_proto(id: u64) -> Self {
2781 RepositoryId(id)
2782 }
2783}
2784
2785impl RepositorySnapshot {
2786 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2787 Self {
2788 id,
2789 statuses_by_path: Default::default(),
2790 work_directory_abs_path,
2791 branch: None,
2792 head_commit: None,
2793 scan_id: 0,
2794 merge: Default::default(),
2795 remote_origin_url: None,
2796 remote_upstream_url: None,
2797 stash_entries: Default::default(),
2798 path_style,
2799 }
2800 }
2801
2802 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2803 proto::UpdateRepository {
2804 branch_summary: self.branch.as_ref().map(branch_to_proto),
2805 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2806 updated_statuses: self
2807 .statuses_by_path
2808 .iter()
2809 .map(|entry| entry.to_proto())
2810 .collect(),
2811 removed_statuses: Default::default(),
2812 current_merge_conflicts: self
2813 .merge
2814 .conflicted_paths
2815 .iter()
2816 .map(|repo_path| repo_path.to_proto())
2817 .collect(),
2818 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2819 project_id,
2820 id: self.id.to_proto(),
2821 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2822 entry_ids: vec![self.id.to_proto()],
2823 scan_id: self.scan_id,
2824 is_last_update: true,
2825 stash_entries: self
2826 .stash_entries
2827 .entries
2828 .iter()
2829 .map(stash_to_proto)
2830 .collect(),
2831 }
2832 }
2833
2834 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2835 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2836 let mut removed_statuses: Vec<String> = Vec::new();
2837
2838 let mut new_statuses = self.statuses_by_path.iter().peekable();
2839 let mut old_statuses = old.statuses_by_path.iter().peekable();
2840
2841 let mut current_new_entry = new_statuses.next();
2842 let mut current_old_entry = old_statuses.next();
2843 loop {
2844 match (current_new_entry, current_old_entry) {
2845 (Some(new_entry), Some(old_entry)) => {
2846 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2847 Ordering::Less => {
2848 updated_statuses.push(new_entry.to_proto());
2849 current_new_entry = new_statuses.next();
2850 }
2851 Ordering::Equal => {
2852 if new_entry.status != old_entry.status {
2853 updated_statuses.push(new_entry.to_proto());
2854 }
2855 current_old_entry = old_statuses.next();
2856 current_new_entry = new_statuses.next();
2857 }
2858 Ordering::Greater => {
2859 removed_statuses.push(old_entry.repo_path.to_proto());
2860 current_old_entry = old_statuses.next();
2861 }
2862 }
2863 }
2864 (None, Some(old_entry)) => {
2865 removed_statuses.push(old_entry.repo_path.to_proto());
2866 current_old_entry = old_statuses.next();
2867 }
2868 (Some(new_entry), None) => {
2869 updated_statuses.push(new_entry.to_proto());
2870 current_new_entry = new_statuses.next();
2871 }
2872 (None, None) => break,
2873 }
2874 }
2875
2876 proto::UpdateRepository {
2877 branch_summary: self.branch.as_ref().map(branch_to_proto),
2878 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2879 updated_statuses,
2880 removed_statuses,
2881 current_merge_conflicts: self
2882 .merge
2883 .conflicted_paths
2884 .iter()
2885 .map(|path| path.to_proto())
2886 .collect(),
2887 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2888 project_id,
2889 id: self.id.to_proto(),
2890 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2891 entry_ids: vec![],
2892 scan_id: self.scan_id,
2893 is_last_update: true,
2894 stash_entries: self
2895 .stash_entries
2896 .entries
2897 .iter()
2898 .map(stash_to_proto)
2899 .collect(),
2900 }
2901 }
2902
2903 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2904 self.statuses_by_path.iter().cloned()
2905 }
2906
2907 pub fn status_summary(&self) -> GitSummary {
2908 self.statuses_by_path.summary().item_summary
2909 }
2910
2911 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2912 self.statuses_by_path
2913 .get(&PathKey(path.0.clone()), ())
2914 .cloned()
2915 }
2916
2917 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2918 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
2919 }
2920
2921 #[inline]
2922 fn abs_path_to_repo_path_inner(
2923 work_directory_abs_path: &Path,
2924 abs_path: &Path,
2925 path_style: PathStyle,
2926 ) -> Option<RepoPath> {
2927 abs_path
2928 .strip_prefix(&work_directory_abs_path)
2929 .ok()
2930 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
2931 }
2932
2933 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2934 self.merge.conflicted_paths.contains(repo_path)
2935 }
2936
2937 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2938 let had_conflict_on_last_merge_head_change =
2939 self.merge.conflicted_paths.contains(repo_path);
2940 let has_conflict_currently = self
2941 .status_for_path(repo_path)
2942 .is_some_and(|entry| entry.status.is_conflicted());
2943 had_conflict_on_last_merge_head_change || has_conflict_currently
2944 }
2945
2946 /// This is the name that will be displayed in the repository selector for this repository.
2947 pub fn display_name(&self) -> SharedString {
2948 self.work_directory_abs_path
2949 .file_name()
2950 .unwrap_or_default()
2951 .to_string_lossy()
2952 .to_string()
2953 .into()
2954 }
2955}
2956
2957pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2958 proto::StashEntry {
2959 oid: entry.oid.as_bytes().to_vec(),
2960 message: entry.message.clone(),
2961 branch: entry.branch.clone(),
2962 index: entry.index as u64,
2963 timestamp: entry.timestamp,
2964 }
2965}
2966
2967pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2968 Ok(StashEntry {
2969 oid: Oid::from_bytes(&entry.oid)?,
2970 message: entry.message.clone(),
2971 index: entry.index as usize,
2972 branch: entry.branch.clone(),
2973 timestamp: entry.timestamp,
2974 })
2975}
2976
2977impl MergeDetails {
2978 async fn load(
2979 backend: &Arc<dyn GitRepository>,
2980 status: &SumTree<StatusEntry>,
2981 prev_snapshot: &RepositorySnapshot,
2982 ) -> Result<(MergeDetails, bool)> {
2983 log::debug!("load merge details");
2984 let message = backend.merge_message().await;
2985 let heads = backend
2986 .revparse_batch(vec![
2987 "MERGE_HEAD".into(),
2988 "CHERRY_PICK_HEAD".into(),
2989 "REBASE_HEAD".into(),
2990 "REVERT_HEAD".into(),
2991 "APPLY_HEAD".into(),
2992 ])
2993 .await
2994 .log_err()
2995 .unwrap_or_default()
2996 .into_iter()
2997 .map(|opt| opt.map(SharedString::from))
2998 .collect::<Vec<_>>();
2999 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3000 let conflicted_paths = if merge_heads_changed {
3001 let current_conflicted_paths = TreeSet::from_ordered_entries(
3002 status
3003 .iter()
3004 .filter(|entry| entry.status.is_conflicted())
3005 .map(|entry| entry.repo_path.clone()),
3006 );
3007
3008 // It can happen that we run a scan while a lengthy merge is in progress
3009 // that will eventually result in conflicts, but before those conflicts
3010 // are reported by `git status`. Since for the moment we only care about
3011 // the merge heads state for the purposes of tracking conflicts, don't update
3012 // this state until we see some conflicts.
3013 if heads.iter().any(Option::is_some)
3014 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3015 && current_conflicted_paths.is_empty()
3016 {
3017 log::debug!("not updating merge heads because no conflicts found");
3018 return Ok((
3019 MergeDetails {
3020 message: message.map(SharedString::from),
3021 ..prev_snapshot.merge.clone()
3022 },
3023 false,
3024 ));
3025 }
3026
3027 current_conflicted_paths
3028 } else {
3029 prev_snapshot.merge.conflicted_paths.clone()
3030 };
3031 let details = MergeDetails {
3032 conflicted_paths,
3033 message: message.map(SharedString::from),
3034 heads,
3035 };
3036 Ok((details, merge_heads_changed))
3037 }
3038}
3039
3040impl Repository {
3041 pub fn snapshot(&self) -> RepositorySnapshot {
3042 self.snapshot.clone()
3043 }
3044
3045 fn local(
3046 id: RepositoryId,
3047 work_directory_abs_path: Arc<Path>,
3048 dot_git_abs_path: Arc<Path>,
3049 repository_dir_abs_path: Arc<Path>,
3050 common_dir_abs_path: Arc<Path>,
3051 project_environment: WeakEntity<ProjectEnvironment>,
3052 fs: Arc<dyn Fs>,
3053 git_store: WeakEntity<GitStore>,
3054 cx: &mut Context<Self>,
3055 ) -> Self {
3056 let snapshot =
3057 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3058 Repository {
3059 this: cx.weak_entity(),
3060 git_store,
3061 snapshot,
3062 commit_message_buffer: None,
3063 askpass_delegates: Default::default(),
3064 paths_needing_status_update: Default::default(),
3065 latest_askpass_id: 0,
3066 job_sender: Repository::spawn_local_git_worker(
3067 work_directory_abs_path,
3068 dot_git_abs_path,
3069 repository_dir_abs_path,
3070 common_dir_abs_path,
3071 project_environment,
3072 fs,
3073 cx,
3074 ),
3075 job_id: 0,
3076 active_jobs: Default::default(),
3077 }
3078 }
3079
3080 fn remote(
3081 id: RepositoryId,
3082 work_directory_abs_path: Arc<Path>,
3083 path_style: PathStyle,
3084 project_id: ProjectId,
3085 client: AnyProtoClient,
3086 git_store: WeakEntity<GitStore>,
3087 cx: &mut Context<Self>,
3088 ) -> Self {
3089 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3090 Self {
3091 this: cx.weak_entity(),
3092 snapshot,
3093 commit_message_buffer: None,
3094 git_store,
3095 paths_needing_status_update: Default::default(),
3096 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3097 askpass_delegates: Default::default(),
3098 latest_askpass_id: 0,
3099 active_jobs: Default::default(),
3100 job_id: 0,
3101 }
3102 }
3103
3104 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3105 self.git_store.upgrade()
3106 }
3107
3108 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3109 let this = cx.weak_entity();
3110 let git_store = self.git_store.clone();
3111 let _ = self.send_keyed_job(
3112 Some(GitJobKey::ReloadBufferDiffBases),
3113 None,
3114 |state, mut cx| async move {
3115 let RepositoryState::Local { backend, .. } = state else {
3116 log::error!("tried to recompute diffs for a non-local repository");
3117 return Ok(());
3118 };
3119
3120 let Some(this) = this.upgrade() else {
3121 return Ok(());
3122 };
3123
3124 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3125 git_store.update(cx, |git_store, cx| {
3126 git_store
3127 .diffs
3128 .iter()
3129 .filter_map(|(buffer_id, diff_state)| {
3130 let buffer_store = git_store.buffer_store.read(cx);
3131 let buffer = buffer_store.get(*buffer_id)?;
3132 let file = File::from_dyn(buffer.read(cx).file())?;
3133 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3134 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3135 log::debug!(
3136 "start reload diff bases for repo path {}",
3137 repo_path.as_unix_str()
3138 );
3139 diff_state.update(cx, |diff_state, _| {
3140 let has_unstaged_diff = diff_state
3141 .unstaged_diff
3142 .as_ref()
3143 .is_some_and(|diff| diff.is_upgradable());
3144 let has_uncommitted_diff = diff_state
3145 .uncommitted_diff
3146 .as_ref()
3147 .is_some_and(|set| set.is_upgradable());
3148
3149 Some((
3150 buffer,
3151 repo_path,
3152 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3153 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3154 ))
3155 })
3156 })
3157 .collect::<Vec<_>>()
3158 })
3159 })??;
3160
3161 let buffer_diff_base_changes = cx
3162 .background_spawn(async move {
3163 let mut changes = Vec::new();
3164 for (buffer, repo_path, current_index_text, current_head_text) in
3165 &repo_diff_state_updates
3166 {
3167 let index_text = if current_index_text.is_some() {
3168 backend.load_index_text(repo_path.clone()).await
3169 } else {
3170 None
3171 };
3172 let head_text = if current_head_text.is_some() {
3173 backend.load_committed_text(repo_path.clone()).await
3174 } else {
3175 None
3176 };
3177
3178 let change =
3179 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3180 (Some(current_index), Some(current_head)) => {
3181 let index_changed =
3182 index_text.as_ref() != current_index.as_deref();
3183 let head_changed =
3184 head_text.as_ref() != current_head.as_deref();
3185 if index_changed && head_changed {
3186 if index_text == head_text {
3187 Some(DiffBasesChange::SetBoth(head_text))
3188 } else {
3189 Some(DiffBasesChange::SetEach {
3190 index: index_text,
3191 head: head_text,
3192 })
3193 }
3194 } else if index_changed {
3195 Some(DiffBasesChange::SetIndex(index_text))
3196 } else if head_changed {
3197 Some(DiffBasesChange::SetHead(head_text))
3198 } else {
3199 None
3200 }
3201 }
3202 (Some(current_index), None) => {
3203 let index_changed =
3204 index_text.as_ref() != current_index.as_deref();
3205 index_changed
3206 .then_some(DiffBasesChange::SetIndex(index_text))
3207 }
3208 (None, Some(current_head)) => {
3209 let head_changed =
3210 head_text.as_ref() != current_head.as_deref();
3211 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3212 }
3213 (None, None) => None,
3214 };
3215
3216 changes.push((buffer.clone(), change))
3217 }
3218 changes
3219 })
3220 .await;
3221
3222 git_store.update(&mut cx, |git_store, cx| {
3223 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3224 let buffer_snapshot = buffer.read(cx).text_snapshot();
3225 let buffer_id = buffer_snapshot.remote_id();
3226 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3227 continue;
3228 };
3229
3230 let downstream_client = git_store.downstream_client();
3231 diff_state.update(cx, |diff_state, cx| {
3232 use proto::update_diff_bases::Mode;
3233
3234 if let Some((diff_bases_change, (client, project_id))) =
3235 diff_bases_change.clone().zip(downstream_client)
3236 {
3237 let (staged_text, committed_text, mode) = match diff_bases_change {
3238 DiffBasesChange::SetIndex(index) => {
3239 (index, None, Mode::IndexOnly)
3240 }
3241 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3242 DiffBasesChange::SetEach { index, head } => {
3243 (index, head, Mode::IndexAndHead)
3244 }
3245 DiffBasesChange::SetBoth(text) => {
3246 (None, text, Mode::IndexMatchesHead)
3247 }
3248 };
3249 client
3250 .send(proto::UpdateDiffBases {
3251 project_id: project_id.to_proto(),
3252 buffer_id: buffer_id.to_proto(),
3253 staged_text,
3254 committed_text,
3255 mode: mode as i32,
3256 })
3257 .log_err();
3258 }
3259
3260 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3261 });
3262 }
3263 })
3264 },
3265 );
3266 }
3267
3268 pub fn send_job<F, Fut, R>(
3269 &mut self,
3270 status: Option<SharedString>,
3271 job: F,
3272 ) -> oneshot::Receiver<R>
3273 where
3274 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3275 Fut: Future<Output = R> + 'static,
3276 R: Send + 'static,
3277 {
3278 self.send_keyed_job(None, status, job)
3279 }
3280
3281 fn send_keyed_job<F, Fut, R>(
3282 &mut self,
3283 key: Option<GitJobKey>,
3284 status: Option<SharedString>,
3285 job: F,
3286 ) -> oneshot::Receiver<R>
3287 where
3288 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3289 Fut: Future<Output = R> + 'static,
3290 R: Send + 'static,
3291 {
3292 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3293 let job_id = post_inc(&mut self.job_id);
3294 let this = self.this.clone();
3295 self.job_sender
3296 .unbounded_send(GitJob {
3297 key,
3298 job: Box::new(move |state, cx: &mut AsyncApp| {
3299 let job = job(state, cx.clone());
3300 cx.spawn(async move |cx| {
3301 if let Some(s) = status.clone() {
3302 this.update(cx, |this, cx| {
3303 this.active_jobs.insert(
3304 job_id,
3305 JobInfo {
3306 start: Instant::now(),
3307 message: s.clone(),
3308 },
3309 );
3310
3311 cx.notify();
3312 })
3313 .ok();
3314 }
3315 let result = job.await;
3316
3317 this.update(cx, |this, cx| {
3318 this.active_jobs.remove(&job_id);
3319 cx.notify();
3320 })
3321 .ok();
3322
3323 result_tx.send(result).ok();
3324 })
3325 }),
3326 })
3327 .ok();
3328 result_rx
3329 }
3330
3331 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3332 let Some(git_store) = self.git_store.upgrade() else {
3333 return;
3334 };
3335 let entity = cx.entity();
3336 git_store.update(cx, |git_store, cx| {
3337 let Some((&id, _)) = git_store
3338 .repositories
3339 .iter()
3340 .find(|(_, handle)| *handle == &entity)
3341 else {
3342 return;
3343 };
3344 git_store.active_repo_id = Some(id);
3345 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3346 });
3347 }
3348
3349 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3350 self.snapshot.status()
3351 }
3352
3353 pub fn cached_stash(&self) -> GitStash {
3354 self.snapshot.stash_entries.clone()
3355 }
3356
3357 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3358 let git_store = self.git_store.upgrade()?;
3359 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3360 let abs_path = self
3361 .snapshot
3362 .work_directory_abs_path
3363 .join(path.as_std_path());
3364 let abs_path = SanitizedPath::new(&abs_path);
3365 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3366 Some(ProjectPath {
3367 worktree_id: worktree.read(cx).id(),
3368 path: relative_path,
3369 })
3370 }
3371
3372 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3373 let git_store = self.git_store.upgrade()?;
3374 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3375 let abs_path = worktree_store.absolutize(path, cx)?;
3376 self.snapshot.abs_path_to_repo_path(&abs_path)
3377 }
3378
3379 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3380 other
3381 .read(cx)
3382 .snapshot
3383 .work_directory_abs_path
3384 .starts_with(&self.snapshot.work_directory_abs_path)
3385 }
3386
3387 pub fn open_commit_buffer(
3388 &mut self,
3389 languages: Option<Arc<LanguageRegistry>>,
3390 buffer_store: Entity<BufferStore>,
3391 cx: &mut Context<Self>,
3392 ) -> Task<Result<Entity<Buffer>>> {
3393 let id = self.id;
3394 if let Some(buffer) = self.commit_message_buffer.clone() {
3395 return Task::ready(Ok(buffer));
3396 }
3397 let this = cx.weak_entity();
3398
3399 let rx = self.send_job(None, move |state, mut cx| async move {
3400 let Some(this) = this.upgrade() else {
3401 bail!("git store was dropped");
3402 };
3403 match state {
3404 RepositoryState::Local { .. } => {
3405 this.update(&mut cx, |_, cx| {
3406 Self::open_local_commit_buffer(languages, buffer_store, cx)
3407 })?
3408 .await
3409 }
3410 RepositoryState::Remote { project_id, client } => {
3411 let request = client.request(proto::OpenCommitMessageBuffer {
3412 project_id: project_id.0,
3413 repository_id: id.to_proto(),
3414 });
3415 let response = request.await.context("requesting to open commit buffer")?;
3416 let buffer_id = BufferId::new(response.buffer_id)?;
3417 let buffer = buffer_store
3418 .update(&mut cx, |buffer_store, cx| {
3419 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3420 })?
3421 .await?;
3422 if let Some(language_registry) = languages {
3423 let git_commit_language =
3424 language_registry.language_for_name("Git Commit").await?;
3425 buffer.update(&mut cx, |buffer, cx| {
3426 buffer.set_language(Some(git_commit_language), cx);
3427 })?;
3428 }
3429 this.update(&mut cx, |this, _| {
3430 this.commit_message_buffer = Some(buffer.clone());
3431 })?;
3432 Ok(buffer)
3433 }
3434 }
3435 });
3436
3437 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3438 }
3439
3440 fn open_local_commit_buffer(
3441 language_registry: Option<Arc<LanguageRegistry>>,
3442 buffer_store: Entity<BufferStore>,
3443 cx: &mut Context<Self>,
3444 ) -> Task<Result<Entity<Buffer>>> {
3445 cx.spawn(async move |repository, cx| {
3446 let buffer = buffer_store
3447 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3448 .await?;
3449
3450 if let Some(language_registry) = language_registry {
3451 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3452 buffer.update(cx, |buffer, cx| {
3453 buffer.set_language(Some(git_commit_language), cx);
3454 })?;
3455 }
3456
3457 repository.update(cx, |repository, _| {
3458 repository.commit_message_buffer = Some(buffer.clone());
3459 })?;
3460 Ok(buffer)
3461 })
3462 }
3463
3464 pub fn checkout_files(
3465 &mut self,
3466 commit: &str,
3467 paths: Vec<RepoPath>,
3468 _cx: &mut App,
3469 ) -> oneshot::Receiver<Result<()>> {
3470 let commit = commit.to_string();
3471 let id = self.id;
3472
3473 self.send_job(
3474 Some(format!("git checkout {}", commit).into()),
3475 move |git_repo, _| async move {
3476 match git_repo {
3477 RepositoryState::Local {
3478 backend,
3479 environment,
3480 ..
3481 } => {
3482 backend
3483 .checkout_files(commit, paths, environment.clone())
3484 .await
3485 }
3486 RepositoryState::Remote { project_id, client } => {
3487 client
3488 .request(proto::GitCheckoutFiles {
3489 project_id: project_id.0,
3490 repository_id: id.to_proto(),
3491 commit,
3492 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3493 })
3494 .await?;
3495
3496 Ok(())
3497 }
3498 }
3499 },
3500 )
3501 }
3502
3503 pub fn reset(
3504 &mut self,
3505 commit: String,
3506 reset_mode: ResetMode,
3507 _cx: &mut App,
3508 ) -> oneshot::Receiver<Result<()>> {
3509 let id = self.id;
3510
3511 self.send_job(None, move |git_repo, _| async move {
3512 match git_repo {
3513 RepositoryState::Local {
3514 backend,
3515 environment,
3516 ..
3517 } => backend.reset(commit, reset_mode, environment).await,
3518 RepositoryState::Remote { project_id, client } => {
3519 client
3520 .request(proto::GitReset {
3521 project_id: project_id.0,
3522 repository_id: id.to_proto(),
3523 commit,
3524 mode: match reset_mode {
3525 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3526 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3527 },
3528 })
3529 .await?;
3530
3531 Ok(())
3532 }
3533 }
3534 })
3535 }
3536
3537 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3538 let id = self.id;
3539 self.send_job(None, move |git_repo, _cx| async move {
3540 match git_repo {
3541 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3542 RepositoryState::Remote { project_id, client } => {
3543 let resp = client
3544 .request(proto::GitShow {
3545 project_id: project_id.0,
3546 repository_id: id.to_proto(),
3547 commit,
3548 })
3549 .await?;
3550
3551 Ok(CommitDetails {
3552 sha: resp.sha.into(),
3553 message: resp.message.into(),
3554 commit_timestamp: resp.commit_timestamp,
3555 author_email: resp.author_email.into(),
3556 author_name: resp.author_name.into(),
3557 })
3558 }
3559 }
3560 })
3561 }
3562
3563 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3564 let id = self.id;
3565 self.send_job(None, move |git_repo, cx| async move {
3566 match git_repo {
3567 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3568 RepositoryState::Remote {
3569 client, project_id, ..
3570 } => {
3571 let response = client
3572 .request(proto::LoadCommitDiff {
3573 project_id: project_id.0,
3574 repository_id: id.to_proto(),
3575 commit,
3576 })
3577 .await?;
3578 Ok(CommitDiff {
3579 files: response
3580 .files
3581 .into_iter()
3582 .map(|file| {
3583 Ok(CommitFile {
3584 path: RepoPath::from_proto(&file.path)?,
3585 old_text: file.old_text,
3586 new_text: file.new_text,
3587 })
3588 })
3589 .collect::<Result<Vec<_>>>()?,
3590 })
3591 }
3592 }
3593 })
3594 }
3595
3596 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3597 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3598 }
3599
3600 pub fn stage_entries(
3601 &self,
3602 entries: Vec<RepoPath>,
3603 cx: &mut Context<Self>,
3604 ) -> Task<anyhow::Result<()>> {
3605 if entries.is_empty() {
3606 return Task::ready(Ok(()));
3607 }
3608 let id = self.id;
3609
3610 let mut save_futures = Vec::new();
3611 if let Some(buffer_store) = self.buffer_store(cx) {
3612 buffer_store.update(cx, |buffer_store, cx| {
3613 for path in &entries {
3614 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3615 continue;
3616 };
3617 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3618 && buffer
3619 .read(cx)
3620 .file()
3621 .is_some_and(|file| file.disk_state().exists())
3622 {
3623 save_futures.push(buffer_store.save_buffer(buffer, cx));
3624 }
3625 }
3626 })
3627 }
3628
3629 cx.spawn(async move |this, cx| {
3630 for save_future in save_futures {
3631 save_future.await?;
3632 }
3633
3634 this.update(cx, |this, _| {
3635 this.send_job(None, move |git_repo, _cx| async move {
3636 match git_repo {
3637 RepositoryState::Local {
3638 backend,
3639 environment,
3640 ..
3641 } => backend.stage_paths(entries, environment.clone()).await,
3642 RepositoryState::Remote { project_id, client } => {
3643 client
3644 .request(proto::Stage {
3645 project_id: project_id.0,
3646 repository_id: id.to_proto(),
3647 paths: entries
3648 .into_iter()
3649 .map(|repo_path| repo_path.to_proto())
3650 .collect(),
3651 })
3652 .await
3653 .context("sending stage request")?;
3654
3655 Ok(())
3656 }
3657 }
3658 })
3659 })?
3660 .await??;
3661
3662 Ok(())
3663 })
3664 }
3665
3666 pub fn unstage_entries(
3667 &self,
3668 entries: Vec<RepoPath>,
3669 cx: &mut Context<Self>,
3670 ) -> Task<anyhow::Result<()>> {
3671 if entries.is_empty() {
3672 return Task::ready(Ok(()));
3673 }
3674 let id = self.id;
3675
3676 let mut save_futures = Vec::new();
3677 if let Some(buffer_store) = self.buffer_store(cx) {
3678 buffer_store.update(cx, |buffer_store, cx| {
3679 for path in &entries {
3680 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3681 continue;
3682 };
3683 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3684 && buffer
3685 .read(cx)
3686 .file()
3687 .is_some_and(|file| file.disk_state().exists())
3688 {
3689 save_futures.push(buffer_store.save_buffer(buffer, cx));
3690 }
3691 }
3692 })
3693 }
3694
3695 cx.spawn(async move |this, cx| {
3696 for save_future in save_futures {
3697 save_future.await?;
3698 }
3699
3700 this.update(cx, |this, _| {
3701 this.send_job(None, move |git_repo, _cx| async move {
3702 match git_repo {
3703 RepositoryState::Local {
3704 backend,
3705 environment,
3706 ..
3707 } => backend.unstage_paths(entries, environment).await,
3708 RepositoryState::Remote { project_id, client } => {
3709 client
3710 .request(proto::Unstage {
3711 project_id: project_id.0,
3712 repository_id: id.to_proto(),
3713 paths: entries
3714 .into_iter()
3715 .map(|repo_path| repo_path.to_proto())
3716 .collect(),
3717 })
3718 .await
3719 .context("sending unstage request")?;
3720
3721 Ok(())
3722 }
3723 }
3724 })
3725 })?
3726 .await??;
3727
3728 Ok(())
3729 })
3730 }
3731
3732 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3733 let to_stage = self
3734 .cached_status()
3735 .filter(|entry| !entry.status.staging().is_fully_staged())
3736 .map(|entry| entry.repo_path)
3737 .collect();
3738 self.stage_entries(to_stage, cx)
3739 }
3740
3741 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3742 let to_unstage = self
3743 .cached_status()
3744 .filter(|entry| entry.status.staging().has_staged())
3745 .map(|entry| entry.repo_path)
3746 .collect();
3747 self.unstage_entries(to_unstage, cx)
3748 }
3749
3750 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3751 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3752
3753 self.stash_entries(to_stash, cx)
3754 }
3755
3756 pub fn stash_entries(
3757 &mut self,
3758 entries: Vec<RepoPath>,
3759 cx: &mut Context<Self>,
3760 ) -> Task<anyhow::Result<()>> {
3761 let id = self.id;
3762
3763 cx.spawn(async move |this, cx| {
3764 this.update(cx, |this, _| {
3765 this.send_job(None, move |git_repo, _cx| async move {
3766 match git_repo {
3767 RepositoryState::Local {
3768 backend,
3769 environment,
3770 ..
3771 } => backend.stash_paths(entries, environment).await,
3772 RepositoryState::Remote { project_id, client } => {
3773 client
3774 .request(proto::Stash {
3775 project_id: project_id.0,
3776 repository_id: id.to_proto(),
3777 paths: entries
3778 .into_iter()
3779 .map(|repo_path| repo_path.to_proto())
3780 .collect(),
3781 })
3782 .await
3783 .context("sending stash request")?;
3784 Ok(())
3785 }
3786 }
3787 })
3788 })?
3789 .await??;
3790 Ok(())
3791 })
3792 }
3793
3794 pub fn stash_pop(
3795 &mut self,
3796 index: Option<usize>,
3797 cx: &mut Context<Self>,
3798 ) -> Task<anyhow::Result<()>> {
3799 let id = self.id;
3800 cx.spawn(async move |this, cx| {
3801 this.update(cx, |this, _| {
3802 this.send_job(None, move |git_repo, _cx| async move {
3803 match git_repo {
3804 RepositoryState::Local {
3805 backend,
3806 environment,
3807 ..
3808 } => backend.stash_pop(index, environment).await,
3809 RepositoryState::Remote { project_id, client } => {
3810 client
3811 .request(proto::StashPop {
3812 project_id: project_id.0,
3813 repository_id: id.to_proto(),
3814 stash_index: index.map(|i| i as u64),
3815 })
3816 .await
3817 .context("sending stash pop request")?;
3818 Ok(())
3819 }
3820 }
3821 })
3822 })?
3823 .await??;
3824 Ok(())
3825 })
3826 }
3827
3828 pub fn stash_apply(
3829 &mut self,
3830 index: Option<usize>,
3831 cx: &mut Context<Self>,
3832 ) -> Task<anyhow::Result<()>> {
3833 let id = self.id;
3834 cx.spawn(async move |this, cx| {
3835 this.update(cx, |this, _| {
3836 this.send_job(None, move |git_repo, _cx| async move {
3837 match git_repo {
3838 RepositoryState::Local {
3839 backend,
3840 environment,
3841 ..
3842 } => backend.stash_apply(index, environment).await,
3843 RepositoryState::Remote { project_id, client } => {
3844 client
3845 .request(proto::StashApply {
3846 project_id: project_id.0,
3847 repository_id: id.to_proto(),
3848 stash_index: index.map(|i| i as u64),
3849 })
3850 .await
3851 .context("sending stash apply request")?;
3852 Ok(())
3853 }
3854 }
3855 })
3856 })?
3857 .await??;
3858 Ok(())
3859 })
3860 }
3861
3862 pub fn stash_drop(
3863 &mut self,
3864 index: Option<usize>,
3865 cx: &mut Context<Self>,
3866 ) -> oneshot::Receiver<anyhow::Result<()>> {
3867 let id = self.id;
3868 let updates_tx = self
3869 .git_store()
3870 .and_then(|git_store| match &git_store.read(cx).state {
3871 GitStoreState::Local { downstream, .. } => downstream
3872 .as_ref()
3873 .map(|downstream| downstream.updates_tx.clone()),
3874 _ => None,
3875 });
3876 let this = cx.weak_entity();
3877 self.send_job(None, move |git_repo, mut cx| async move {
3878 match git_repo {
3879 RepositoryState::Local {
3880 backend,
3881 environment,
3882 ..
3883 } => {
3884 let result = backend.stash_drop(index, environment).await;
3885 if result.is_ok()
3886 && let Ok(stash_entries) = backend.stash_entries().await
3887 {
3888 let snapshot = this.update(&mut cx, |this, cx| {
3889 this.snapshot.stash_entries = stash_entries;
3890 let snapshot = this.snapshot.clone();
3891 cx.emit(RepositoryEvent::Updated {
3892 full_scan: false,
3893 new_instance: false,
3894 });
3895 snapshot
3896 })?;
3897 if let Some(updates_tx) = updates_tx {
3898 updates_tx
3899 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3900 .ok();
3901 }
3902 }
3903
3904 result
3905 }
3906 RepositoryState::Remote { project_id, client } => {
3907 client
3908 .request(proto::StashDrop {
3909 project_id: project_id.0,
3910 repository_id: id.to_proto(),
3911 stash_index: index.map(|i| i as u64),
3912 })
3913 .await
3914 .context("sending stash pop request")?;
3915 Ok(())
3916 }
3917 }
3918 })
3919 }
3920
3921 pub fn commit(
3922 &mut self,
3923 message: SharedString,
3924 name_and_email: Option<(SharedString, SharedString)>,
3925 options: CommitOptions,
3926 _cx: &mut App,
3927 ) -> oneshot::Receiver<Result<()>> {
3928 let id = self.id;
3929
3930 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3931 match git_repo {
3932 RepositoryState::Local {
3933 backend,
3934 environment,
3935 ..
3936 } => {
3937 backend
3938 .commit(message, name_and_email, options, environment)
3939 .await
3940 }
3941 RepositoryState::Remote { project_id, client } => {
3942 let (name, email) = name_and_email.unzip();
3943 client
3944 .request(proto::Commit {
3945 project_id: project_id.0,
3946 repository_id: id.to_proto(),
3947 message: String::from(message),
3948 name: name.map(String::from),
3949 email: email.map(String::from),
3950 options: Some(proto::commit::CommitOptions {
3951 amend: options.amend,
3952 signoff: options.signoff,
3953 }),
3954 })
3955 .await
3956 .context("sending commit request")?;
3957
3958 Ok(())
3959 }
3960 }
3961 })
3962 }
3963
3964 pub fn fetch(
3965 &mut self,
3966 fetch_options: FetchOptions,
3967 askpass: AskPassDelegate,
3968 _cx: &mut App,
3969 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3970 let askpass_delegates = self.askpass_delegates.clone();
3971 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3972 let id = self.id;
3973
3974 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3975 match git_repo {
3976 RepositoryState::Local {
3977 backend,
3978 environment,
3979 ..
3980 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3981 RepositoryState::Remote { project_id, client } => {
3982 askpass_delegates.lock().insert(askpass_id, askpass);
3983 let _defer = util::defer(|| {
3984 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3985 debug_assert!(askpass_delegate.is_some());
3986 });
3987
3988 let response = client
3989 .request(proto::Fetch {
3990 project_id: project_id.0,
3991 repository_id: id.to_proto(),
3992 askpass_id,
3993 remote: fetch_options.to_proto(),
3994 })
3995 .await
3996 .context("sending fetch request")?;
3997
3998 Ok(RemoteCommandOutput {
3999 stdout: response.stdout,
4000 stderr: response.stderr,
4001 })
4002 }
4003 }
4004 })
4005 }
4006
4007 pub fn push(
4008 &mut self,
4009 branch: SharedString,
4010 remote: SharedString,
4011 options: Option<PushOptions>,
4012 askpass: AskPassDelegate,
4013 cx: &mut Context<Self>,
4014 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4015 let askpass_delegates = self.askpass_delegates.clone();
4016 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4017 let id = self.id;
4018
4019 let args = options
4020 .map(|option| match option {
4021 PushOptions::SetUpstream => " --set-upstream",
4022 PushOptions::Force => " --force-with-lease",
4023 })
4024 .unwrap_or("");
4025
4026 let updates_tx = self
4027 .git_store()
4028 .and_then(|git_store| match &git_store.read(cx).state {
4029 GitStoreState::Local { downstream, .. } => downstream
4030 .as_ref()
4031 .map(|downstream| downstream.updates_tx.clone()),
4032 _ => None,
4033 });
4034
4035 let this = cx.weak_entity();
4036 self.send_job(
4037 Some(format!("git push {} {} {}", args, branch, remote).into()),
4038 move |git_repo, mut cx| async move {
4039 match git_repo {
4040 RepositoryState::Local {
4041 backend,
4042 environment,
4043 ..
4044 } => {
4045 let result = backend
4046 .push(
4047 branch.to_string(),
4048 remote.to_string(),
4049 options,
4050 askpass,
4051 environment.clone(),
4052 cx.clone(),
4053 )
4054 .await;
4055 if result.is_ok() {
4056 let branches = backend.branches().await?;
4057 let branch = branches.into_iter().find(|branch| branch.is_head);
4058 log::info!("head branch after scan is {branch:?}");
4059 let snapshot = this.update(&mut cx, |this, cx| {
4060 this.snapshot.branch = branch;
4061 let snapshot = this.snapshot.clone();
4062 cx.emit(RepositoryEvent::Updated {
4063 full_scan: false,
4064 new_instance: false,
4065 });
4066 snapshot
4067 })?;
4068 if let Some(updates_tx) = updates_tx {
4069 updates_tx
4070 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4071 .ok();
4072 }
4073 }
4074 result
4075 }
4076 RepositoryState::Remote { project_id, client } => {
4077 askpass_delegates.lock().insert(askpass_id, askpass);
4078 let _defer = util::defer(|| {
4079 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4080 debug_assert!(askpass_delegate.is_some());
4081 });
4082 let response = client
4083 .request(proto::Push {
4084 project_id: project_id.0,
4085 repository_id: id.to_proto(),
4086 askpass_id,
4087 branch_name: branch.to_string(),
4088 remote_name: remote.to_string(),
4089 options: options.map(|options| match options {
4090 PushOptions::Force => proto::push::PushOptions::Force,
4091 PushOptions::SetUpstream => {
4092 proto::push::PushOptions::SetUpstream
4093 }
4094 }
4095 as i32),
4096 })
4097 .await
4098 .context("sending push request")?;
4099
4100 Ok(RemoteCommandOutput {
4101 stdout: response.stdout,
4102 stderr: response.stderr,
4103 })
4104 }
4105 }
4106 },
4107 )
4108 }
4109
4110 pub fn pull(
4111 &mut self,
4112 branch: SharedString,
4113 remote: SharedString,
4114 askpass: AskPassDelegate,
4115 _cx: &mut App,
4116 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4117 let askpass_delegates = self.askpass_delegates.clone();
4118 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4119 let id = self.id;
4120
4121 self.send_job(
4122 Some(format!("git pull {} {}", remote, branch).into()),
4123 move |git_repo, cx| async move {
4124 match git_repo {
4125 RepositoryState::Local {
4126 backend,
4127 environment,
4128 ..
4129 } => {
4130 backend
4131 .pull(
4132 branch.to_string(),
4133 remote.to_string(),
4134 askpass,
4135 environment.clone(),
4136 cx,
4137 )
4138 .await
4139 }
4140 RepositoryState::Remote { project_id, client } => {
4141 askpass_delegates.lock().insert(askpass_id, askpass);
4142 let _defer = util::defer(|| {
4143 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4144 debug_assert!(askpass_delegate.is_some());
4145 });
4146 let response = client
4147 .request(proto::Pull {
4148 project_id: project_id.0,
4149 repository_id: id.to_proto(),
4150 askpass_id,
4151 branch_name: branch.to_string(),
4152 remote_name: remote.to_string(),
4153 })
4154 .await
4155 .context("sending pull request")?;
4156
4157 Ok(RemoteCommandOutput {
4158 stdout: response.stdout,
4159 stderr: response.stderr,
4160 })
4161 }
4162 }
4163 },
4164 )
4165 }
4166
4167 fn spawn_set_index_text_job(
4168 &mut self,
4169 path: RepoPath,
4170 content: Option<String>,
4171 hunk_staging_operation_count: Option<usize>,
4172 cx: &mut Context<Self>,
4173 ) -> oneshot::Receiver<anyhow::Result<()>> {
4174 let id = self.id;
4175 let this = cx.weak_entity();
4176 let git_store = self.git_store.clone();
4177 self.send_keyed_job(
4178 Some(GitJobKey::WriteIndex(path.clone())),
4179 None,
4180 move |git_repo, mut cx| async move {
4181 log::debug!(
4182 "start updating index text for buffer {}",
4183 path.as_unix_str()
4184 );
4185 match git_repo {
4186 RepositoryState::Local {
4187 backend,
4188 environment,
4189 ..
4190 } => {
4191 backend
4192 .set_index_text(path.clone(), content, environment.clone())
4193 .await?;
4194 }
4195 RepositoryState::Remote { project_id, client } => {
4196 client
4197 .request(proto::SetIndexText {
4198 project_id: project_id.0,
4199 repository_id: id.to_proto(),
4200 path: path.to_proto(),
4201 text: content,
4202 })
4203 .await?;
4204 }
4205 }
4206 log::debug!(
4207 "finish updating index text for buffer {}",
4208 path.as_unix_str()
4209 );
4210
4211 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4212 let project_path = this
4213 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4214 .ok()
4215 .flatten();
4216 git_store.update(&mut cx, |git_store, cx| {
4217 let buffer_id = git_store
4218 .buffer_store
4219 .read(cx)
4220 .get_by_path(&project_path?)?
4221 .read(cx)
4222 .remote_id();
4223 let diff_state = git_store.diffs.get(&buffer_id)?;
4224 diff_state.update(cx, |diff_state, _| {
4225 diff_state.hunk_staging_operation_count_as_of_write =
4226 hunk_staging_operation_count;
4227 });
4228 Some(())
4229 })?;
4230 }
4231 Ok(())
4232 },
4233 )
4234 }
4235
4236 pub fn get_remotes(
4237 &mut self,
4238 branch_name: Option<String>,
4239 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4240 let id = self.id;
4241 self.send_job(None, move |repo, _cx| async move {
4242 match repo {
4243 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4244 RepositoryState::Remote { project_id, client } => {
4245 let response = client
4246 .request(proto::GetRemotes {
4247 project_id: project_id.0,
4248 repository_id: id.to_proto(),
4249 branch_name,
4250 })
4251 .await?;
4252
4253 let remotes = response
4254 .remotes
4255 .into_iter()
4256 .map(|remotes| git::repository::Remote {
4257 name: remotes.name.into(),
4258 })
4259 .collect();
4260
4261 Ok(remotes)
4262 }
4263 }
4264 })
4265 }
4266
4267 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4268 let id = self.id;
4269 self.send_job(None, move |repo, _| async move {
4270 match repo {
4271 RepositoryState::Local { backend, .. } => backend.branches().await,
4272 RepositoryState::Remote { project_id, client } => {
4273 let response = client
4274 .request(proto::GitGetBranches {
4275 project_id: project_id.0,
4276 repository_id: id.to_proto(),
4277 })
4278 .await?;
4279
4280 let branches = response
4281 .branches
4282 .into_iter()
4283 .map(|branch| proto_to_branch(&branch))
4284 .collect();
4285
4286 Ok(branches)
4287 }
4288 }
4289 })
4290 }
4291
4292 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4293 let id = self.id;
4294 self.send_job(None, move |repo, _| async move {
4295 match repo {
4296 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4297 RepositoryState::Remote { project_id, client } => {
4298 let response = client
4299 .request(proto::GetDefaultBranch {
4300 project_id: project_id.0,
4301 repository_id: id.to_proto(),
4302 })
4303 .await?;
4304
4305 anyhow::Ok(response.branch.map(SharedString::from))
4306 }
4307 }
4308 })
4309 }
4310
4311 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4312 let id = self.id;
4313 self.send_job(None, move |repo, _cx| async move {
4314 match repo {
4315 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4316 RepositoryState::Remote { project_id, client } => {
4317 let response = client
4318 .request(proto::GitDiff {
4319 project_id: project_id.0,
4320 repository_id: id.to_proto(),
4321 diff_type: match diff_type {
4322 DiffType::HeadToIndex => {
4323 proto::git_diff::DiffType::HeadToIndex.into()
4324 }
4325 DiffType::HeadToWorktree => {
4326 proto::git_diff::DiffType::HeadToWorktree.into()
4327 }
4328 },
4329 })
4330 .await?;
4331
4332 Ok(response.diff)
4333 }
4334 }
4335 })
4336 }
4337
4338 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4339 let id = self.id;
4340 self.send_job(
4341 Some(format!("git switch -c {branch_name}").into()),
4342 move |repo, _cx| async move {
4343 match repo {
4344 RepositoryState::Local { backend, .. } => {
4345 backend.create_branch(branch_name).await
4346 }
4347 RepositoryState::Remote { project_id, client } => {
4348 client
4349 .request(proto::GitCreateBranch {
4350 project_id: project_id.0,
4351 repository_id: id.to_proto(),
4352 branch_name,
4353 })
4354 .await?;
4355
4356 Ok(())
4357 }
4358 }
4359 },
4360 )
4361 }
4362
4363 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4364 let id = self.id;
4365 self.send_job(
4366 Some(format!("git switch {branch_name}").into()),
4367 move |repo, _cx| async move {
4368 match repo {
4369 RepositoryState::Local { backend, .. } => {
4370 backend.change_branch(branch_name).await
4371 }
4372 RepositoryState::Remote { project_id, client } => {
4373 client
4374 .request(proto::GitChangeBranch {
4375 project_id: project_id.0,
4376 repository_id: id.to_proto(),
4377 branch_name,
4378 })
4379 .await?;
4380
4381 Ok(())
4382 }
4383 }
4384 },
4385 )
4386 }
4387
4388 pub fn rename_branch(
4389 &mut self,
4390 branch: String,
4391 new_name: String,
4392 ) -> oneshot::Receiver<Result<()>> {
4393 let id = self.id;
4394 self.send_job(
4395 Some(format!("git branch -m {branch} {new_name}").into()),
4396 move |repo, _cx| async move {
4397 match repo {
4398 RepositoryState::Local { backend, .. } => {
4399 backend.rename_branch(branch, new_name).await
4400 }
4401 RepositoryState::Remote { project_id, client } => {
4402 client
4403 .request(proto::GitRenameBranch {
4404 project_id: project_id.0,
4405 repository_id: id.to_proto(),
4406 branch,
4407 new_name,
4408 })
4409 .await?;
4410
4411 Ok(())
4412 }
4413 }
4414 },
4415 )
4416 }
4417
4418 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4419 let id = self.id;
4420 self.send_job(None, move |repo, _cx| async move {
4421 match repo {
4422 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4423 RepositoryState::Remote { project_id, client } => {
4424 let response = client
4425 .request(proto::CheckForPushedCommits {
4426 project_id: project_id.0,
4427 repository_id: id.to_proto(),
4428 })
4429 .await?;
4430
4431 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4432
4433 Ok(branches)
4434 }
4435 }
4436 })
4437 }
4438
4439 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4440 self.send_job(None, |repo, _cx| async move {
4441 match repo {
4442 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4443 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4444 }
4445 })
4446 }
4447
4448 pub fn restore_checkpoint(
4449 &mut self,
4450 checkpoint: GitRepositoryCheckpoint,
4451 ) -> oneshot::Receiver<Result<()>> {
4452 self.send_job(None, move |repo, _cx| async move {
4453 match repo {
4454 RepositoryState::Local { backend, .. } => {
4455 backend.restore_checkpoint(checkpoint).await
4456 }
4457 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4458 }
4459 })
4460 }
4461
4462 pub(crate) fn apply_remote_update(
4463 &mut self,
4464 update: proto::UpdateRepository,
4465 is_new: bool,
4466 cx: &mut Context<Self>,
4467 ) -> Result<()> {
4468 let conflicted_paths = TreeSet::from_ordered_entries(
4469 update
4470 .current_merge_conflicts
4471 .into_iter()
4472 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4473 );
4474 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4475 self.snapshot.head_commit = update
4476 .head_commit_details
4477 .as_ref()
4478 .map(proto_to_commit_details);
4479
4480 self.snapshot.merge.conflicted_paths = conflicted_paths;
4481 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4482 self.snapshot.stash_entries = GitStash {
4483 entries: update
4484 .stash_entries
4485 .iter()
4486 .filter_map(|entry| proto_to_stash(entry).ok())
4487 .collect(),
4488 };
4489
4490 let edits = update
4491 .removed_statuses
4492 .into_iter()
4493 .filter_map(|path| {
4494 Some(sum_tree::Edit::Remove(PathKey(
4495 RelPath::from_proto(&path).log_err()?,
4496 )))
4497 })
4498 .chain(
4499 update
4500 .updated_statuses
4501 .into_iter()
4502 .filter_map(|updated_status| {
4503 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4504 }),
4505 )
4506 .collect::<Vec<_>>();
4507 self.snapshot.statuses_by_path.edit(edits, ());
4508 if update.is_last_update {
4509 self.snapshot.scan_id = update.scan_id;
4510 }
4511 cx.emit(RepositoryEvent::Updated {
4512 full_scan: true,
4513 new_instance: is_new,
4514 });
4515 Ok(())
4516 }
4517
4518 pub fn compare_checkpoints(
4519 &mut self,
4520 left: GitRepositoryCheckpoint,
4521 right: GitRepositoryCheckpoint,
4522 ) -> oneshot::Receiver<Result<bool>> {
4523 self.send_job(None, move |repo, _cx| async move {
4524 match repo {
4525 RepositoryState::Local { backend, .. } => {
4526 backend.compare_checkpoints(left, right).await
4527 }
4528 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4529 }
4530 })
4531 }
4532
4533 pub fn diff_checkpoints(
4534 &mut self,
4535 base_checkpoint: GitRepositoryCheckpoint,
4536 target_checkpoint: GitRepositoryCheckpoint,
4537 ) -> oneshot::Receiver<Result<String>> {
4538 self.send_job(None, move |repo, _cx| async move {
4539 match repo {
4540 RepositoryState::Local { backend, .. } => {
4541 backend
4542 .diff_checkpoints(base_checkpoint, target_checkpoint)
4543 .await
4544 }
4545 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4546 }
4547 })
4548 }
4549
4550 fn schedule_scan(
4551 &mut self,
4552 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4553 cx: &mut Context<Self>,
4554 ) {
4555 let this = cx.weak_entity();
4556 let _ = self.send_keyed_job(
4557 Some(GitJobKey::ReloadGitState),
4558 None,
4559 |state, mut cx| async move {
4560 log::debug!("run scheduled git status scan");
4561
4562 let Some(this) = this.upgrade() else {
4563 return Ok(());
4564 };
4565 let RepositoryState::Local { backend, .. } = state else {
4566 bail!("not a local repository")
4567 };
4568 let (snapshot, events) = this
4569 .update(&mut cx, |this, _| {
4570 this.paths_needing_status_update.clear();
4571 compute_snapshot(
4572 this.id,
4573 this.work_directory_abs_path.clone(),
4574 this.snapshot.clone(),
4575 backend.clone(),
4576 )
4577 })?
4578 .await?;
4579 this.update(&mut cx, |this, cx| {
4580 this.snapshot = snapshot.clone();
4581 for event in events {
4582 cx.emit(event);
4583 }
4584 })?;
4585 if let Some(updates_tx) = updates_tx {
4586 updates_tx
4587 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4588 .ok();
4589 }
4590 Ok(())
4591 },
4592 );
4593 }
4594
4595 fn spawn_local_git_worker(
4596 work_directory_abs_path: Arc<Path>,
4597 dot_git_abs_path: Arc<Path>,
4598 _repository_dir_abs_path: Arc<Path>,
4599 _common_dir_abs_path: Arc<Path>,
4600 project_environment: WeakEntity<ProjectEnvironment>,
4601 fs: Arc<dyn Fs>,
4602 cx: &mut Context<Self>,
4603 ) -> mpsc::UnboundedSender<GitJob> {
4604 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4605
4606 cx.spawn(async move |_, cx| {
4607 let environment = project_environment
4608 .upgrade()
4609 .context("missing project environment")?
4610 .update(cx, |project_environment, cx| {
4611 project_environment.get_local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
4612 })?
4613 .await
4614 .unwrap_or_else(|| {
4615 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4616 HashMap::default()
4617 });
4618 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4619 let backend = cx
4620 .background_spawn(async move {
4621 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4622 .or_else(|| which::which("git").ok());
4623 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4624 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4625 })
4626 .await?;
4627
4628 if let Some(git_hosting_provider_registry) =
4629 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4630 {
4631 git_hosting_providers::register_additional_providers(
4632 git_hosting_provider_registry,
4633 backend.clone(),
4634 );
4635 }
4636
4637 let state = RepositoryState::Local {
4638 backend,
4639 environment: Arc::new(environment),
4640 };
4641 let mut jobs = VecDeque::new();
4642 loop {
4643 while let Ok(Some(next_job)) = job_rx.try_next() {
4644 jobs.push_back(next_job);
4645 }
4646
4647 if let Some(job) = jobs.pop_front() {
4648 if let Some(current_key) = &job.key
4649 && jobs
4650 .iter()
4651 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4652 {
4653 continue;
4654 }
4655 (job.job)(state.clone(), cx).await;
4656 } else if let Some(job) = job_rx.next().await {
4657 jobs.push_back(job);
4658 } else {
4659 break;
4660 }
4661 }
4662 anyhow::Ok(())
4663 })
4664 .detach_and_log_err(cx);
4665
4666 job_tx
4667 }
4668
4669 fn spawn_remote_git_worker(
4670 project_id: ProjectId,
4671 client: AnyProtoClient,
4672 cx: &mut Context<Self>,
4673 ) -> mpsc::UnboundedSender<GitJob> {
4674 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4675
4676 cx.spawn(async move |_, cx| {
4677 let state = RepositoryState::Remote { project_id, client };
4678 let mut jobs = VecDeque::new();
4679 loop {
4680 while let Ok(Some(next_job)) = job_rx.try_next() {
4681 jobs.push_back(next_job);
4682 }
4683
4684 if let Some(job) = jobs.pop_front() {
4685 if let Some(current_key) = &job.key
4686 && jobs
4687 .iter()
4688 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4689 {
4690 continue;
4691 }
4692 (job.job)(state.clone(), cx).await;
4693 } else if let Some(job) = job_rx.next().await {
4694 jobs.push_back(job);
4695 } else {
4696 break;
4697 }
4698 }
4699 anyhow::Ok(())
4700 })
4701 .detach_and_log_err(cx);
4702
4703 job_tx
4704 }
4705
4706 fn load_staged_text(
4707 &mut self,
4708 buffer_id: BufferId,
4709 repo_path: RepoPath,
4710 cx: &App,
4711 ) -> Task<Result<Option<String>>> {
4712 let rx = self.send_job(None, move |state, _| async move {
4713 match state {
4714 RepositoryState::Local { backend, .. } => {
4715 anyhow::Ok(backend.load_index_text(repo_path).await)
4716 }
4717 RepositoryState::Remote { project_id, client } => {
4718 let response = client
4719 .request(proto::OpenUnstagedDiff {
4720 project_id: project_id.to_proto(),
4721 buffer_id: buffer_id.to_proto(),
4722 })
4723 .await?;
4724 Ok(response.staged_text)
4725 }
4726 }
4727 });
4728 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4729 }
4730
4731 fn load_committed_text(
4732 &mut self,
4733 buffer_id: BufferId,
4734 repo_path: RepoPath,
4735 cx: &App,
4736 ) -> Task<Result<DiffBasesChange>> {
4737 let rx = self.send_job(None, move |state, _| async move {
4738 match state {
4739 RepositoryState::Local { backend, .. } => {
4740 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4741 let staged_text = backend.load_index_text(repo_path).await;
4742 let diff_bases_change = if committed_text == staged_text {
4743 DiffBasesChange::SetBoth(committed_text)
4744 } else {
4745 DiffBasesChange::SetEach {
4746 index: staged_text,
4747 head: committed_text,
4748 }
4749 };
4750 anyhow::Ok(diff_bases_change)
4751 }
4752 RepositoryState::Remote { project_id, client } => {
4753 use proto::open_uncommitted_diff_response::Mode;
4754
4755 let response = client
4756 .request(proto::OpenUncommittedDiff {
4757 project_id: project_id.to_proto(),
4758 buffer_id: buffer_id.to_proto(),
4759 })
4760 .await?;
4761 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4762 let bases = match mode {
4763 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4764 Mode::IndexAndHead => DiffBasesChange::SetEach {
4765 head: response.committed_text,
4766 index: response.staged_text,
4767 },
4768 };
4769 Ok(bases)
4770 }
4771 }
4772 });
4773
4774 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4775 }
4776
4777 fn paths_changed(
4778 &mut self,
4779 paths: Vec<RepoPath>,
4780 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4781 cx: &mut Context<Self>,
4782 ) {
4783 self.paths_needing_status_update.extend(paths);
4784
4785 let this = cx.weak_entity();
4786 let _ = self.send_keyed_job(
4787 Some(GitJobKey::RefreshStatuses),
4788 None,
4789 |state, mut cx| async move {
4790 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4791 (
4792 this.snapshot.clone(),
4793 mem::take(&mut this.paths_needing_status_update),
4794 )
4795 })?;
4796 let RepositoryState::Local { backend, .. } = state else {
4797 bail!("not a local repository")
4798 };
4799
4800 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4801 if paths.is_empty() {
4802 return Ok(());
4803 }
4804 let statuses = backend.status(&paths).await?;
4805 let stash_entries = backend.stash_entries().await?;
4806
4807 let changed_path_statuses = cx
4808 .background_spawn(async move {
4809 let mut changed_path_statuses = Vec::new();
4810 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4811 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4812
4813 for (repo_path, status) in &*statuses.entries {
4814 changed_paths.remove(repo_path);
4815 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4816 && cursor.item().is_some_and(|entry| entry.status == *status)
4817 {
4818 continue;
4819 }
4820
4821 changed_path_statuses.push(Edit::Insert(StatusEntry {
4822 repo_path: repo_path.clone(),
4823 status: *status,
4824 }));
4825 }
4826 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4827 for path in changed_paths.into_iter() {
4828 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4829 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4830 }
4831 }
4832 changed_path_statuses
4833 })
4834 .await;
4835
4836 this.update(&mut cx, |this, cx| {
4837 let needs_update = !changed_path_statuses.is_empty()
4838 || this.snapshot.stash_entries != stash_entries;
4839 this.snapshot.stash_entries = stash_entries;
4840 if !changed_path_statuses.is_empty() {
4841 this.snapshot
4842 .statuses_by_path
4843 .edit(changed_path_statuses, ());
4844 this.snapshot.scan_id += 1;
4845 }
4846
4847 if needs_update {
4848 cx.emit(RepositoryEvent::Updated {
4849 full_scan: false,
4850 new_instance: false,
4851 });
4852 }
4853
4854 if let Some(updates_tx) = updates_tx {
4855 updates_tx
4856 .unbounded_send(DownstreamUpdate::UpdateRepository(
4857 this.snapshot.clone(),
4858 ))
4859 .ok();
4860 }
4861 cx.emit(RepositoryEvent::PathsChanged);
4862 })
4863 },
4864 );
4865 }
4866
4867 /// currently running git command and when it started
4868 pub fn current_job(&self) -> Option<JobInfo> {
4869 self.active_jobs.values().next().cloned()
4870 }
4871
4872 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4873 self.send_job(None, |_, _| async {})
4874 }
4875}
4876
4877fn get_permalink_in_rust_registry_src(
4878 provider_registry: Arc<GitHostingProviderRegistry>,
4879 path: PathBuf,
4880 selection: Range<u32>,
4881) -> Result<url::Url> {
4882 #[derive(Deserialize)]
4883 struct CargoVcsGit {
4884 sha1: String,
4885 }
4886
4887 #[derive(Deserialize)]
4888 struct CargoVcsInfo {
4889 git: CargoVcsGit,
4890 path_in_vcs: String,
4891 }
4892
4893 #[derive(Deserialize)]
4894 struct CargoPackage {
4895 repository: String,
4896 }
4897
4898 #[derive(Deserialize)]
4899 struct CargoToml {
4900 package: CargoPackage,
4901 }
4902
4903 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4904 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4905 Some((dir, json))
4906 }) else {
4907 bail!("No .cargo_vcs_info.json found in parent directories")
4908 };
4909 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4910 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4911 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4912 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4913 .context("parsing package.repository field of manifest")?;
4914 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4915 let permalink = provider.build_permalink(
4916 remote,
4917 BuildPermalinkParams {
4918 sha: &cargo_vcs_info.git.sha1,
4919 path: &path.to_string_lossy(),
4920 selection: Some(selection),
4921 },
4922 );
4923 Ok(permalink)
4924}
4925
4926fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4927 let Some(blame) = blame else {
4928 return proto::BlameBufferResponse {
4929 blame_response: None,
4930 };
4931 };
4932
4933 let entries = blame
4934 .entries
4935 .into_iter()
4936 .map(|entry| proto::BlameEntry {
4937 sha: entry.sha.as_bytes().into(),
4938 start_line: entry.range.start,
4939 end_line: entry.range.end,
4940 original_line_number: entry.original_line_number,
4941 author: entry.author,
4942 author_mail: entry.author_mail,
4943 author_time: entry.author_time,
4944 author_tz: entry.author_tz,
4945 committer: entry.committer_name,
4946 committer_mail: entry.committer_email,
4947 committer_time: entry.committer_time,
4948 committer_tz: entry.committer_tz,
4949 summary: entry.summary,
4950 previous: entry.previous,
4951 filename: entry.filename,
4952 })
4953 .collect::<Vec<_>>();
4954
4955 let messages = blame
4956 .messages
4957 .into_iter()
4958 .map(|(oid, message)| proto::CommitMessage {
4959 oid: oid.as_bytes().into(),
4960 message,
4961 })
4962 .collect::<Vec<_>>();
4963
4964 proto::BlameBufferResponse {
4965 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4966 entries,
4967 messages,
4968 remote_url: blame.remote_url,
4969 }),
4970 }
4971}
4972
4973fn deserialize_blame_buffer_response(
4974 response: proto::BlameBufferResponse,
4975) -> Option<git::blame::Blame> {
4976 let response = response.blame_response?;
4977 let entries = response
4978 .entries
4979 .into_iter()
4980 .filter_map(|entry| {
4981 Some(git::blame::BlameEntry {
4982 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4983 range: entry.start_line..entry.end_line,
4984 original_line_number: entry.original_line_number,
4985 committer_name: entry.committer,
4986 committer_time: entry.committer_time,
4987 committer_tz: entry.committer_tz,
4988 committer_email: entry.committer_mail,
4989 author: entry.author,
4990 author_mail: entry.author_mail,
4991 author_time: entry.author_time,
4992 author_tz: entry.author_tz,
4993 summary: entry.summary,
4994 previous: entry.previous,
4995 filename: entry.filename,
4996 })
4997 })
4998 .collect::<Vec<_>>();
4999
5000 let messages = response
5001 .messages
5002 .into_iter()
5003 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5004 .collect::<HashMap<_, _>>();
5005
5006 Some(Blame {
5007 entries,
5008 messages,
5009 remote_url: response.remote_url,
5010 })
5011}
5012
5013fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5014 proto::Branch {
5015 is_head: branch.is_head,
5016 ref_name: branch.ref_name.to_string(),
5017 unix_timestamp: branch
5018 .most_recent_commit
5019 .as_ref()
5020 .map(|commit| commit.commit_timestamp as u64),
5021 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5022 ref_name: upstream.ref_name.to_string(),
5023 tracking: upstream
5024 .tracking
5025 .status()
5026 .map(|upstream| proto::UpstreamTracking {
5027 ahead: upstream.ahead as u64,
5028 behind: upstream.behind as u64,
5029 }),
5030 }),
5031 most_recent_commit: branch
5032 .most_recent_commit
5033 .as_ref()
5034 .map(|commit| proto::CommitSummary {
5035 sha: commit.sha.to_string(),
5036 subject: commit.subject.to_string(),
5037 commit_timestamp: commit.commit_timestamp,
5038 author_name: commit.author_name.to_string(),
5039 }),
5040 }
5041}
5042
5043fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5044 git::repository::Branch {
5045 is_head: proto.is_head,
5046 ref_name: proto.ref_name.clone().into(),
5047 upstream: proto
5048 .upstream
5049 .as_ref()
5050 .map(|upstream| git::repository::Upstream {
5051 ref_name: upstream.ref_name.to_string().into(),
5052 tracking: upstream
5053 .tracking
5054 .as_ref()
5055 .map(|tracking| {
5056 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5057 ahead: tracking.ahead as u32,
5058 behind: tracking.behind as u32,
5059 })
5060 })
5061 .unwrap_or(git::repository::UpstreamTracking::Gone),
5062 }),
5063 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5064 git::repository::CommitSummary {
5065 sha: commit.sha.to_string().into(),
5066 subject: commit.subject.to_string().into(),
5067 commit_timestamp: commit.commit_timestamp,
5068 author_name: commit.author_name.to_string().into(),
5069 has_parent: true,
5070 }
5071 }),
5072 }
5073}
5074
5075fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5076 proto::GitCommitDetails {
5077 sha: commit.sha.to_string(),
5078 message: commit.message.to_string(),
5079 commit_timestamp: commit.commit_timestamp,
5080 author_email: commit.author_email.to_string(),
5081 author_name: commit.author_name.to_string(),
5082 }
5083}
5084
5085fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5086 CommitDetails {
5087 sha: proto.sha.clone().into(),
5088 message: proto.message.clone().into(),
5089 commit_timestamp: proto.commit_timestamp,
5090 author_email: proto.author_email.clone().into(),
5091 author_name: proto.author_name.clone().into(),
5092 }
5093}
5094
5095async fn compute_snapshot(
5096 id: RepositoryId,
5097 work_directory_abs_path: Arc<Path>,
5098 prev_snapshot: RepositorySnapshot,
5099 backend: Arc<dyn GitRepository>,
5100) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5101 let mut events = Vec::new();
5102 let branches = backend.branches().await?;
5103 let branch = branches.into_iter().find(|branch| branch.is_head);
5104 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5105 let stash_entries = backend.stash_entries().await?;
5106 let statuses_by_path = SumTree::from_iter(
5107 statuses
5108 .entries
5109 .iter()
5110 .map(|(repo_path, status)| StatusEntry {
5111 repo_path: repo_path.clone(),
5112 status: *status,
5113 }),
5114 (),
5115 );
5116 let (merge_details, merge_heads_changed) =
5117 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5118 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5119
5120 if merge_heads_changed
5121 || branch != prev_snapshot.branch
5122 || statuses_by_path != prev_snapshot.statuses_by_path
5123 {
5124 events.push(RepositoryEvent::Updated {
5125 full_scan: true,
5126 new_instance: false,
5127 });
5128 }
5129
5130 // Cache merge conflict paths so they don't change from staging/unstaging,
5131 // until the merge heads change (at commit time, etc.).
5132 if merge_heads_changed {
5133 events.push(RepositoryEvent::MergeHeadsChanged);
5134 }
5135
5136 // Useful when branch is None in detached head state
5137 let head_commit = match backend.head_sha().await {
5138 Some(head_sha) => backend.show(head_sha).await.log_err(),
5139 None => None,
5140 };
5141
5142 // Used by edit prediction data collection
5143 let remote_origin_url = backend.remote_url("origin");
5144 let remote_upstream_url = backend.remote_url("upstream");
5145
5146 let snapshot = RepositorySnapshot {
5147 id,
5148 statuses_by_path,
5149 work_directory_abs_path,
5150 path_style: prev_snapshot.path_style,
5151 scan_id: prev_snapshot.scan_id + 1,
5152 branch,
5153 head_commit,
5154 merge: merge_details,
5155 remote_origin_url,
5156 remote_upstream_url,
5157 stash_entries,
5158 };
5159
5160 Ok((snapshot, events))
5161}
5162
5163fn status_from_proto(
5164 simple_status: i32,
5165 status: Option<proto::GitFileStatus>,
5166) -> anyhow::Result<FileStatus> {
5167 use proto::git_file_status::Variant;
5168
5169 let Some(variant) = status.and_then(|status| status.variant) else {
5170 let code = proto::GitStatus::from_i32(simple_status)
5171 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5172 let result = match code {
5173 proto::GitStatus::Added => TrackedStatus {
5174 worktree_status: StatusCode::Added,
5175 index_status: StatusCode::Unmodified,
5176 }
5177 .into(),
5178 proto::GitStatus::Modified => TrackedStatus {
5179 worktree_status: StatusCode::Modified,
5180 index_status: StatusCode::Unmodified,
5181 }
5182 .into(),
5183 proto::GitStatus::Conflict => UnmergedStatus {
5184 first_head: UnmergedStatusCode::Updated,
5185 second_head: UnmergedStatusCode::Updated,
5186 }
5187 .into(),
5188 proto::GitStatus::Deleted => TrackedStatus {
5189 worktree_status: StatusCode::Deleted,
5190 index_status: StatusCode::Unmodified,
5191 }
5192 .into(),
5193 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5194 };
5195 return Ok(result);
5196 };
5197
5198 let result = match variant {
5199 Variant::Untracked(_) => FileStatus::Untracked,
5200 Variant::Ignored(_) => FileStatus::Ignored,
5201 Variant::Unmerged(unmerged) => {
5202 let [first_head, second_head] =
5203 [unmerged.first_head, unmerged.second_head].map(|head| {
5204 let code = proto::GitStatus::from_i32(head)
5205 .with_context(|| format!("Invalid git status code: {head}"))?;
5206 let result = match code {
5207 proto::GitStatus::Added => UnmergedStatusCode::Added,
5208 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5209 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5210 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5211 };
5212 Ok(result)
5213 });
5214 let [first_head, second_head] = [first_head?, second_head?];
5215 UnmergedStatus {
5216 first_head,
5217 second_head,
5218 }
5219 .into()
5220 }
5221 Variant::Tracked(tracked) => {
5222 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5223 .map(|status| {
5224 let code = proto::GitStatus::from_i32(status)
5225 .with_context(|| format!("Invalid git status code: {status}"))?;
5226 let result = match code {
5227 proto::GitStatus::Modified => StatusCode::Modified,
5228 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5229 proto::GitStatus::Added => StatusCode::Added,
5230 proto::GitStatus::Deleted => StatusCode::Deleted,
5231 proto::GitStatus::Renamed => StatusCode::Renamed,
5232 proto::GitStatus::Copied => StatusCode::Copied,
5233 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5234 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5235 };
5236 Ok(result)
5237 });
5238 let [index_status, worktree_status] = [index_status?, worktree_status?];
5239 TrackedStatus {
5240 index_status,
5241 worktree_status,
5242 }
5243 .into()
5244 }
5245 };
5246 Ok(result)
5247}
5248
5249fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5250 use proto::git_file_status::{Tracked, Unmerged, Variant};
5251
5252 let variant = match status {
5253 FileStatus::Untracked => Variant::Untracked(Default::default()),
5254 FileStatus::Ignored => Variant::Ignored(Default::default()),
5255 FileStatus::Unmerged(UnmergedStatus {
5256 first_head,
5257 second_head,
5258 }) => Variant::Unmerged(Unmerged {
5259 first_head: unmerged_status_to_proto(first_head),
5260 second_head: unmerged_status_to_proto(second_head),
5261 }),
5262 FileStatus::Tracked(TrackedStatus {
5263 index_status,
5264 worktree_status,
5265 }) => Variant::Tracked(Tracked {
5266 index_status: tracked_status_to_proto(index_status),
5267 worktree_status: tracked_status_to_proto(worktree_status),
5268 }),
5269 };
5270 proto::GitFileStatus {
5271 variant: Some(variant),
5272 }
5273}
5274
5275fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5276 match code {
5277 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5278 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5279 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5280 }
5281}
5282
5283fn tracked_status_to_proto(code: StatusCode) -> i32 {
5284 match code {
5285 StatusCode::Added => proto::GitStatus::Added as _,
5286 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5287 StatusCode::Modified => proto::GitStatus::Modified as _,
5288 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5289 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5290 StatusCode::Copied => proto::GitStatus::Copied as _,
5291 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5292 }
5293}