1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::{AskPassDelegate, EncryptedPassword};
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, FromProto, ToProto, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use text::{Bias, BufferId};
66use util::{ResultExt, debug_panic, paths::SanitizedPath, post_inc};
67use worktree::{
68 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
69 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
70};
71use zeroize::Zeroize;
72
73pub struct GitStore {
74 state: GitStoreState,
75 buffer_store: Entity<BufferStore>,
76 worktree_store: Entity<WorktreeStore>,
77 repositories: HashMap<RepositoryId, Entity<Repository>>,
78 active_repo_id: Option<RepositoryId>,
79 #[allow(clippy::type_complexity)]
80 loading_diffs:
81 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
82 diffs: HashMap<BufferId, Entity<BufferGitState>>,
83 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
84 _subscriptions: Vec<Subscription>,
85}
86
87#[derive(Default)]
88struct SharedDiffs {
89 unstaged: Option<Entity<BufferDiff>>,
90 uncommitted: Option<Entity<BufferDiff>>,
91}
92
93struct BufferGitState {
94 unstaged_diff: Option<WeakEntity<BufferDiff>>,
95 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
96 conflict_set: Option<WeakEntity<ConflictSet>>,
97 recalculate_diff_task: Option<Task<Result<()>>>,
98 reparse_conflict_markers_task: Option<Task<Result<()>>>,
99 language: Option<Arc<Language>>,
100 language_registry: Option<Arc<LanguageRegistry>>,
101 conflict_updated_futures: Vec<oneshot::Sender<()>>,
102 recalculating_tx: postage::watch::Sender<bool>,
103
104 /// These operation counts are used to ensure that head and index text
105 /// values read from the git repository are up-to-date with any hunk staging
106 /// operations that have been performed on the BufferDiff.
107 ///
108 /// The operation count is incremented immediately when the user initiates a
109 /// hunk stage/unstage operation. Then, upon finishing writing the new index
110 /// text do disk, the `operation count as of write` is updated to reflect
111 /// the operation count that prompted the write.
112 hunk_staging_operation_count: usize,
113 hunk_staging_operation_count_as_of_write: usize,
114
115 head_text: Option<Arc<String>>,
116 index_text: Option<Arc<String>>,
117 head_changed: bool,
118 index_changed: bool,
119 language_changed: bool,
120}
121
122#[derive(Clone, Debug)]
123enum DiffBasesChange {
124 SetIndex(Option<String>),
125 SetHead(Option<String>),
126 SetEach {
127 index: Option<String>,
128 head: Option<String>,
129 },
130 SetBoth(Option<String>),
131}
132
133#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
134enum DiffKind {
135 Unstaged,
136 Uncommitted,
137}
138
139enum GitStoreState {
140 Local {
141 next_repository_id: Arc<AtomicU64>,
142 downstream: Option<LocalDownstreamState>,
143 project_environment: Entity<ProjectEnvironment>,
144 fs: Arc<dyn Fs>,
145 },
146 Remote {
147 upstream_client: AnyProtoClient,
148 upstream_project_id: u64,
149 downstream: Option<(AnyProtoClient, ProjectId)>,
150 },
151}
152
153enum DownstreamUpdate {
154 UpdateRepository(RepositorySnapshot),
155 RemoveRepository(RepositoryId),
156}
157
158struct LocalDownstreamState {
159 client: AnyProtoClient,
160 project_id: ProjectId,
161 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
162 _task: Task<Result<()>>,
163}
164
165#[derive(Clone, Debug)]
166pub struct GitStoreCheckpoint {
167 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
168}
169
170#[derive(Clone, Debug, PartialEq, Eq)]
171pub struct StatusEntry {
172 pub repo_path: RepoPath,
173 pub status: FileStatus,
174}
175
176impl StatusEntry {
177 fn to_proto(&self) -> proto::StatusEntry {
178 let simple_status = match self.status {
179 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
180 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
181 FileStatus::Tracked(TrackedStatus {
182 index_status,
183 worktree_status,
184 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
185 worktree_status
186 } else {
187 index_status
188 }),
189 };
190
191 proto::StatusEntry {
192 repo_path: self.repo_path.as_ref().to_proto(),
193 simple_status,
194 status: Some(status_to_proto(self.status)),
195 }
196 }
197}
198
199impl TryFrom<proto::StatusEntry> for StatusEntry {
200 type Error = anyhow::Error;
201
202 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
203 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
204 let status = status_from_proto(value.simple_status, value.status)?;
205 Ok(Self { repo_path, status })
206 }
207}
208
209impl sum_tree::Item for StatusEntry {
210 type Summary = PathSummary<GitSummary>;
211
212 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
213 PathSummary {
214 max_path: self.repo_path.0.clone(),
215 item_summary: self.status.summary(),
216 }
217 }
218}
219
220impl sum_tree::KeyedItem for StatusEntry {
221 type Key = PathKey;
222
223 fn key(&self) -> Self::Key {
224 PathKey(self.repo_path.0.clone())
225 }
226}
227
228#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
229pub struct RepositoryId(pub u64);
230
231#[derive(Clone, Debug, Default, PartialEq, Eq)]
232pub struct MergeDetails {
233 pub conflicted_paths: TreeSet<RepoPath>,
234 pub message: Option<SharedString>,
235 pub heads: Vec<Option<SharedString>>,
236}
237
238#[derive(Clone, Debug, PartialEq, Eq)]
239pub struct RepositorySnapshot {
240 pub id: RepositoryId,
241 pub statuses_by_path: SumTree<StatusEntry>,
242 pub work_directory_abs_path: Arc<Path>,
243 pub branch: Option<Branch>,
244 pub head_commit: Option<CommitDetails>,
245 pub scan_id: u64,
246 pub merge: MergeDetails,
247 pub remote_origin_url: Option<String>,
248 pub remote_upstream_url: Option<String>,
249 pub stash_entries: GitStash,
250}
251
252type JobId = u64;
253
254#[derive(Clone, Debug, PartialEq, Eq)]
255pub struct JobInfo {
256 pub start: Instant,
257 pub message: SharedString,
258}
259
260pub struct Repository {
261 this: WeakEntity<Self>,
262 snapshot: RepositorySnapshot,
263 commit_message_buffer: Option<Entity<Buffer>>,
264 git_store: WeakEntity<GitStore>,
265 // For a local repository, holds paths that have had worktree events since the last status scan completed,
266 // and that should be examined during the next status scan.
267 paths_needing_status_update: BTreeSet<RepoPath>,
268 job_sender: mpsc::UnboundedSender<GitJob>,
269 active_jobs: HashMap<JobId, JobInfo>,
270 job_id: JobId,
271 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
272 latest_askpass_id: u64,
273}
274
275impl std::ops::Deref for Repository {
276 type Target = RepositorySnapshot;
277
278 fn deref(&self) -> &Self::Target {
279 &self.snapshot
280 }
281}
282
283#[derive(Clone)]
284pub enum RepositoryState {
285 Local {
286 backend: Arc<dyn GitRepository>,
287 environment: Arc<HashMap<String, String>>,
288 },
289 Remote {
290 project_id: ProjectId,
291 client: AnyProtoClient,
292 },
293}
294
295#[derive(Clone, Debug)]
296pub enum RepositoryEvent {
297 Updated { full_scan: bool, new_instance: bool },
298 MergeHeadsChanged,
299}
300
301#[derive(Clone, Debug)]
302pub struct JobsUpdated;
303
304#[derive(Debug)]
305pub enum GitStoreEvent {
306 ActiveRepositoryChanged(Option<RepositoryId>),
307 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
308 RepositoryAdded(RepositoryId),
309 RepositoryRemoved(RepositoryId),
310 IndexWriteError(anyhow::Error),
311 JobsUpdated,
312 ConflictsUpdated,
313}
314
315impl EventEmitter<RepositoryEvent> for Repository {}
316impl EventEmitter<JobsUpdated> for Repository {}
317impl EventEmitter<GitStoreEvent> for GitStore {}
318
319pub struct GitJob {
320 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
321 key: Option<GitJobKey>,
322}
323
324#[derive(PartialEq, Eq)]
325enum GitJobKey {
326 WriteIndex(RepoPath),
327 ReloadBufferDiffBases,
328 RefreshStatuses,
329 ReloadGitState,
330}
331
332impl GitStore {
333 pub fn local(
334 worktree_store: &Entity<WorktreeStore>,
335 buffer_store: Entity<BufferStore>,
336 environment: Entity<ProjectEnvironment>,
337 fs: Arc<dyn Fs>,
338 cx: &mut Context<Self>,
339 ) -> Self {
340 Self::new(
341 worktree_store.clone(),
342 buffer_store,
343 GitStoreState::Local {
344 next_repository_id: Arc::new(AtomicU64::new(1)),
345 downstream: None,
346 project_environment: environment,
347 fs,
348 },
349 cx,
350 )
351 }
352
353 pub fn remote(
354 worktree_store: &Entity<WorktreeStore>,
355 buffer_store: Entity<BufferStore>,
356 upstream_client: AnyProtoClient,
357 project_id: u64,
358 cx: &mut Context<Self>,
359 ) -> Self {
360 Self::new(
361 worktree_store.clone(),
362 buffer_store,
363 GitStoreState::Remote {
364 upstream_client,
365 upstream_project_id: project_id,
366 downstream: None,
367 },
368 cx,
369 )
370 }
371
372 fn new(
373 worktree_store: Entity<WorktreeStore>,
374 buffer_store: Entity<BufferStore>,
375 state: GitStoreState,
376 cx: &mut Context<Self>,
377 ) -> Self {
378 let _subscriptions = vec![
379 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
380 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
381 ];
382
383 GitStore {
384 state,
385 buffer_store,
386 worktree_store,
387 repositories: HashMap::default(),
388 active_repo_id: None,
389 _subscriptions,
390 loading_diffs: HashMap::default(),
391 shared_diffs: HashMap::default(),
392 diffs: HashMap::default(),
393 }
394 }
395
396 pub fn init(client: &AnyProtoClient) {
397 client.add_entity_request_handler(Self::handle_get_remotes);
398 client.add_entity_request_handler(Self::handle_get_branches);
399 client.add_entity_request_handler(Self::handle_get_default_branch);
400 client.add_entity_request_handler(Self::handle_change_branch);
401 client.add_entity_request_handler(Self::handle_create_branch);
402 client.add_entity_request_handler(Self::handle_rename_branch);
403 client.add_entity_request_handler(Self::handle_git_init);
404 client.add_entity_request_handler(Self::handle_push);
405 client.add_entity_request_handler(Self::handle_pull);
406 client.add_entity_request_handler(Self::handle_fetch);
407 client.add_entity_request_handler(Self::handle_stage);
408 client.add_entity_request_handler(Self::handle_unstage);
409 client.add_entity_request_handler(Self::handle_stash);
410 client.add_entity_request_handler(Self::handle_stash_pop);
411 client.add_entity_request_handler(Self::handle_stash_apply);
412 client.add_entity_request_handler(Self::handle_stash_drop);
413 client.add_entity_request_handler(Self::handle_commit);
414 client.add_entity_request_handler(Self::handle_reset);
415 client.add_entity_request_handler(Self::handle_show);
416 client.add_entity_request_handler(Self::handle_load_commit_diff);
417 client.add_entity_request_handler(Self::handle_checkout_files);
418 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
419 client.add_entity_request_handler(Self::handle_set_index_text);
420 client.add_entity_request_handler(Self::handle_askpass);
421 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
422 client.add_entity_request_handler(Self::handle_git_diff);
423 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
424 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
425 client.add_entity_message_handler(Self::handle_update_diff_bases);
426 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
427 client.add_entity_request_handler(Self::handle_blame_buffer);
428 client.add_entity_message_handler(Self::handle_update_repository);
429 client.add_entity_message_handler(Self::handle_remove_repository);
430 client.add_entity_request_handler(Self::handle_git_clone);
431 }
432
433 pub fn is_local(&self) -> bool {
434 matches!(self.state, GitStoreState::Local { .. })
435 }
436
437 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
438 match &mut self.state {
439 GitStoreState::Remote {
440 downstream: downstream_client,
441 ..
442 } => {
443 for repo in self.repositories.values() {
444 let update = repo.read(cx).snapshot.initial_update(project_id);
445 for update in split_repository_update(update) {
446 client.send(update).log_err();
447 }
448 }
449 *downstream_client = Some((client, ProjectId(project_id)));
450 }
451 GitStoreState::Local {
452 downstream: downstream_client,
453 ..
454 } => {
455 let mut snapshots = HashMap::default();
456 let (updates_tx, mut updates_rx) = mpsc::unbounded();
457 for repo in self.repositories.values() {
458 updates_tx
459 .unbounded_send(DownstreamUpdate::UpdateRepository(
460 repo.read(cx).snapshot.clone(),
461 ))
462 .ok();
463 }
464 *downstream_client = Some(LocalDownstreamState {
465 client: client.clone(),
466 project_id: ProjectId(project_id),
467 updates_tx,
468 _task: cx.spawn(async move |this, cx| {
469 cx.background_spawn(async move {
470 while let Some(update) = updates_rx.next().await {
471 match update {
472 DownstreamUpdate::UpdateRepository(snapshot) => {
473 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
474 {
475 let update =
476 snapshot.build_update(old_snapshot, project_id);
477 *old_snapshot = snapshot;
478 for update in split_repository_update(update) {
479 client.send(update)?;
480 }
481 } else {
482 let update = snapshot.initial_update(project_id);
483 for update in split_repository_update(update) {
484 client.send(update)?;
485 }
486 snapshots.insert(snapshot.id, snapshot);
487 }
488 }
489 DownstreamUpdate::RemoveRepository(id) => {
490 client.send(proto::RemoveRepository {
491 project_id,
492 id: id.to_proto(),
493 })?;
494 }
495 }
496 }
497 anyhow::Ok(())
498 })
499 .await
500 .ok();
501 this.update(cx, |this, _| {
502 if let GitStoreState::Local {
503 downstream: downstream_client,
504 ..
505 } = &mut this.state
506 {
507 downstream_client.take();
508 } else {
509 unreachable!("unshared called on remote store");
510 }
511 })
512 }),
513 });
514 }
515 }
516 }
517
518 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
519 match &mut self.state {
520 GitStoreState::Local {
521 downstream: downstream_client,
522 ..
523 } => {
524 downstream_client.take();
525 }
526 GitStoreState::Remote {
527 downstream: downstream_client,
528 ..
529 } => {
530 downstream_client.take();
531 }
532 }
533 self.shared_diffs.clear();
534 }
535
536 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
537 self.shared_diffs.remove(peer_id);
538 }
539
540 pub fn active_repository(&self) -> Option<Entity<Repository>> {
541 self.active_repo_id
542 .as_ref()
543 .map(|id| self.repositories[id].clone())
544 }
545
546 pub fn open_unstaged_diff(
547 &mut self,
548 buffer: Entity<Buffer>,
549 cx: &mut Context<Self>,
550 ) -> Task<Result<Entity<BufferDiff>>> {
551 let buffer_id = buffer.read(cx).remote_id();
552 if let Some(diff_state) = self.diffs.get(&buffer_id)
553 && let Some(unstaged_diff) = diff_state
554 .read(cx)
555 .unstaged_diff
556 .as_ref()
557 .and_then(|weak| weak.upgrade())
558 {
559 if let Some(task) =
560 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
561 {
562 return cx.background_executor().spawn(async move {
563 task.await;
564 Ok(unstaged_diff)
565 });
566 }
567 return Task::ready(Ok(unstaged_diff));
568 }
569
570 let Some((repo, repo_path)) =
571 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
572 else {
573 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
574 };
575
576 let task = self
577 .loading_diffs
578 .entry((buffer_id, DiffKind::Unstaged))
579 .or_insert_with(|| {
580 let staged_text = repo.update(cx, |repo, cx| {
581 repo.load_staged_text(buffer_id, repo_path, cx)
582 });
583 cx.spawn(async move |this, cx| {
584 Self::open_diff_internal(
585 this,
586 DiffKind::Unstaged,
587 staged_text.await.map(DiffBasesChange::SetIndex),
588 buffer,
589 cx,
590 )
591 .await
592 .map_err(Arc::new)
593 })
594 .shared()
595 })
596 .clone();
597
598 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
599 }
600
601 pub fn open_uncommitted_diff(
602 &mut self,
603 buffer: Entity<Buffer>,
604 cx: &mut Context<Self>,
605 ) -> Task<Result<Entity<BufferDiff>>> {
606 let buffer_id = buffer.read(cx).remote_id();
607
608 if let Some(diff_state) = self.diffs.get(&buffer_id)
609 && let Some(uncommitted_diff) = diff_state
610 .read(cx)
611 .uncommitted_diff
612 .as_ref()
613 .and_then(|weak| weak.upgrade())
614 {
615 if let Some(task) =
616 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
617 {
618 return cx.background_executor().spawn(async move {
619 task.await;
620 Ok(uncommitted_diff)
621 });
622 }
623 return Task::ready(Ok(uncommitted_diff));
624 }
625
626 let Some((repo, repo_path)) =
627 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
628 else {
629 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
630 };
631
632 let task = self
633 .loading_diffs
634 .entry((buffer_id, DiffKind::Uncommitted))
635 .or_insert_with(|| {
636 let changes = repo.update(cx, |repo, cx| {
637 repo.load_committed_text(buffer_id, repo_path, cx)
638 });
639
640 cx.spawn(async move |this, cx| {
641 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
642 .await
643 .map_err(Arc::new)
644 })
645 .shared()
646 })
647 .clone();
648
649 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
650 }
651
652 async fn open_diff_internal(
653 this: WeakEntity<Self>,
654 kind: DiffKind,
655 texts: Result<DiffBasesChange>,
656 buffer_entity: Entity<Buffer>,
657 cx: &mut AsyncApp,
658 ) -> Result<Entity<BufferDiff>> {
659 let diff_bases_change = match texts {
660 Err(e) => {
661 this.update(cx, |this, cx| {
662 let buffer = buffer_entity.read(cx);
663 let buffer_id = buffer.remote_id();
664 this.loading_diffs.remove(&(buffer_id, kind));
665 })?;
666 return Err(e);
667 }
668 Ok(change) => change,
669 };
670
671 this.update(cx, |this, cx| {
672 let buffer = buffer_entity.read(cx);
673 let buffer_id = buffer.remote_id();
674 let language = buffer.language().cloned();
675 let language_registry = buffer.language_registry();
676 let text_snapshot = buffer.text_snapshot();
677 this.loading_diffs.remove(&(buffer_id, kind));
678
679 let git_store = cx.weak_entity();
680 let diff_state = this
681 .diffs
682 .entry(buffer_id)
683 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
684
685 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
686
687 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
688 diff_state.update(cx, |diff_state, cx| {
689 diff_state.language = language;
690 diff_state.language_registry = language_registry;
691
692 match kind {
693 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
694 DiffKind::Uncommitted => {
695 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
696 diff
697 } else {
698 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
699 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
700 unstaged_diff
701 };
702
703 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
704 diff_state.uncommitted_diff = Some(diff.downgrade())
705 }
706 }
707
708 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
709 let rx = diff_state.wait_for_recalculation();
710
711 anyhow::Ok(async move {
712 if let Some(rx) = rx {
713 rx.await;
714 }
715 Ok(diff)
716 })
717 })
718 })??
719 .await
720 }
721
722 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
723 let diff_state = self.diffs.get(&buffer_id)?;
724 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
725 }
726
727 pub fn get_uncommitted_diff(
728 &self,
729 buffer_id: BufferId,
730 cx: &App,
731 ) -> Option<Entity<BufferDiff>> {
732 let diff_state = self.diffs.get(&buffer_id)?;
733 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
734 }
735
736 pub fn open_conflict_set(
737 &mut self,
738 buffer: Entity<Buffer>,
739 cx: &mut Context<Self>,
740 ) -> Entity<ConflictSet> {
741 log::debug!("open conflict set");
742 let buffer_id = buffer.read(cx).remote_id();
743
744 if let Some(git_state) = self.diffs.get(&buffer_id)
745 && let Some(conflict_set) = git_state
746 .read(cx)
747 .conflict_set
748 .as_ref()
749 .and_then(|weak| weak.upgrade())
750 {
751 let conflict_set = conflict_set;
752 let buffer_snapshot = buffer.read(cx).text_snapshot();
753
754 git_state.update(cx, |state, cx| {
755 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
756 });
757
758 return conflict_set;
759 }
760
761 let is_unmerged = self
762 .repository_and_path_for_buffer_id(buffer_id, cx)
763 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
764 let git_store = cx.weak_entity();
765 let buffer_git_state = self
766 .diffs
767 .entry(buffer_id)
768 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
769 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
770
771 self._subscriptions
772 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
773 cx.emit(GitStoreEvent::ConflictsUpdated);
774 }));
775
776 buffer_git_state.update(cx, |state, cx| {
777 state.conflict_set = Some(conflict_set.downgrade());
778 let buffer_snapshot = buffer.read(cx).text_snapshot();
779 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
780 });
781
782 conflict_set
783 }
784
785 pub fn project_path_git_status(
786 &self,
787 project_path: &ProjectPath,
788 cx: &App,
789 ) -> Option<FileStatus> {
790 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
791 Some(repo.read(cx).status_for_path(&repo_path)?.status)
792 }
793
794 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
795 let mut work_directory_abs_paths = Vec::new();
796 let mut checkpoints = Vec::new();
797 for repository in self.repositories.values() {
798 repository.update(cx, |repository, _| {
799 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
800 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
801 });
802 }
803
804 cx.background_executor().spawn(async move {
805 let checkpoints = future::try_join_all(checkpoints).await?;
806 Ok(GitStoreCheckpoint {
807 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
808 .into_iter()
809 .zip(checkpoints)
810 .collect(),
811 })
812 })
813 }
814
815 pub fn restore_checkpoint(
816 &self,
817 checkpoint: GitStoreCheckpoint,
818 cx: &mut App,
819 ) -> Task<Result<()>> {
820 let repositories_by_work_dir_abs_path = self
821 .repositories
822 .values()
823 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
824 .collect::<HashMap<_, _>>();
825
826 let mut tasks = Vec::new();
827 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
828 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
829 let restore = repository.update(cx, |repository, _| {
830 repository.restore_checkpoint(checkpoint)
831 });
832 tasks.push(async move { restore.await? });
833 }
834 }
835 cx.background_spawn(async move {
836 future::try_join_all(tasks).await?;
837 Ok(())
838 })
839 }
840
841 /// Compares two checkpoints, returning true if they are equal.
842 pub fn compare_checkpoints(
843 &self,
844 left: GitStoreCheckpoint,
845 mut right: GitStoreCheckpoint,
846 cx: &mut App,
847 ) -> Task<Result<bool>> {
848 let repositories_by_work_dir_abs_path = self
849 .repositories
850 .values()
851 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
852 .collect::<HashMap<_, _>>();
853
854 let mut tasks = Vec::new();
855 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
856 if let Some(right_checkpoint) = right
857 .checkpoints_by_work_dir_abs_path
858 .remove(&work_dir_abs_path)
859 {
860 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
861 {
862 let compare = repository.update(cx, |repository, _| {
863 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
864 });
865
866 tasks.push(async move { compare.await? });
867 }
868 } else {
869 return Task::ready(Ok(false));
870 }
871 }
872 cx.background_spawn(async move {
873 Ok(future::try_join_all(tasks)
874 .await?
875 .into_iter()
876 .all(|result| result))
877 })
878 }
879
880 /// Blames a buffer.
881 pub fn blame_buffer(
882 &self,
883 buffer: &Entity<Buffer>,
884 version: Option<clock::Global>,
885 cx: &mut App,
886 ) -> Task<Result<Option<Blame>>> {
887 let buffer = buffer.read(cx);
888 let Some((repo, repo_path)) =
889 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
890 else {
891 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
892 };
893 let content = match &version {
894 Some(version) => buffer.rope_for_version(version),
895 None => buffer.as_rope().clone(),
896 };
897 let version = version.unwrap_or(buffer.version());
898 let buffer_id = buffer.remote_id();
899
900 let rx = repo.update(cx, |repo, _| {
901 repo.send_job(None, move |state, _| async move {
902 match state {
903 RepositoryState::Local { backend, .. } => backend
904 .blame(repo_path.clone(), content)
905 .await
906 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
907 .map(Some),
908 RepositoryState::Remote { project_id, client } => {
909 let response = client
910 .request(proto::BlameBuffer {
911 project_id: project_id.to_proto(),
912 buffer_id: buffer_id.into(),
913 version: serialize_version(&version),
914 })
915 .await?;
916 Ok(deserialize_blame_buffer_response(response))
917 }
918 }
919 })
920 });
921
922 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
923 }
924
925 pub fn get_permalink_to_line(
926 &self,
927 buffer: &Entity<Buffer>,
928 selection: Range<u32>,
929 cx: &mut App,
930 ) -> Task<Result<url::Url>> {
931 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
932 return Task::ready(Err(anyhow!("buffer has no file")));
933 };
934
935 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
936 &(file.worktree.read(cx).id(), file.path.clone()).into(),
937 cx,
938 ) else {
939 // If we're not in a Git repo, check whether this is a Rust source
940 // file in the Cargo registry (presumably opened with go-to-definition
941 // from a normal Rust file). If so, we can put together a permalink
942 // using crate metadata.
943 if buffer
944 .read(cx)
945 .language()
946 .is_none_or(|lang| lang.name() != "Rust".into())
947 {
948 return Task::ready(Err(anyhow!("no permalink available")));
949 }
950 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
951 return Task::ready(Err(anyhow!("no permalink available")));
952 };
953 return cx.spawn(async move |cx| {
954 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
955 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
956 .context("no permalink available")
957 });
958
959 // TODO remote case
960 };
961
962 let buffer_id = buffer.read(cx).remote_id();
963 let branch = repo.read(cx).branch.clone();
964 let remote = branch
965 .as_ref()
966 .and_then(|b| b.upstream.as_ref())
967 .and_then(|b| b.remote_name())
968 .unwrap_or("origin")
969 .to_string();
970
971 let rx = repo.update(cx, |repo, _| {
972 repo.send_job(None, move |state, cx| async move {
973 match state {
974 RepositoryState::Local { backend, .. } => {
975 let origin_url = backend
976 .remote_url(&remote)
977 .with_context(|| format!("remote \"{remote}\" not found"))?;
978
979 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
980
981 let provider_registry =
982 cx.update(GitHostingProviderRegistry::default_global)?;
983
984 let (provider, remote) =
985 parse_git_remote_url(provider_registry, &origin_url)
986 .context("parsing Git remote URL")?;
987
988 let path = repo_path.to_str().with_context(|| {
989 format!("converting repo path {repo_path:?} to string")
990 })?;
991
992 Ok(provider.build_permalink(
993 remote,
994 BuildPermalinkParams {
995 sha: &sha,
996 path,
997 selection: Some(selection),
998 },
999 ))
1000 }
1001 RepositoryState::Remote { project_id, client } => {
1002 let response = client
1003 .request(proto::GetPermalinkToLine {
1004 project_id: project_id.to_proto(),
1005 buffer_id: buffer_id.into(),
1006 selection: Some(proto::Range {
1007 start: selection.start as u64,
1008 end: selection.end as u64,
1009 }),
1010 })
1011 .await?;
1012
1013 url::Url::parse(&response.permalink).context("failed to parse permalink")
1014 }
1015 }
1016 })
1017 });
1018 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1019 }
1020
1021 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1022 match &self.state {
1023 GitStoreState::Local {
1024 downstream: downstream_client,
1025 ..
1026 } => downstream_client
1027 .as_ref()
1028 .map(|state| (state.client.clone(), state.project_id)),
1029 GitStoreState::Remote {
1030 downstream: downstream_client,
1031 ..
1032 } => downstream_client.clone(),
1033 }
1034 }
1035
1036 fn upstream_client(&self) -> Option<AnyProtoClient> {
1037 match &self.state {
1038 GitStoreState::Local { .. } => None,
1039 GitStoreState::Remote {
1040 upstream_client, ..
1041 } => Some(upstream_client.clone()),
1042 }
1043 }
1044
1045 fn on_worktree_store_event(
1046 &mut self,
1047 worktree_store: Entity<WorktreeStore>,
1048 event: &WorktreeStoreEvent,
1049 cx: &mut Context<Self>,
1050 ) {
1051 let GitStoreState::Local {
1052 project_environment,
1053 downstream,
1054 next_repository_id,
1055 fs,
1056 } = &self.state
1057 else {
1058 return;
1059 };
1060
1061 match event {
1062 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1063 if let Some(worktree) = self
1064 .worktree_store
1065 .read(cx)
1066 .worktree_for_id(*worktree_id, cx)
1067 {
1068 let paths_by_git_repo =
1069 self.process_updated_entries(&worktree, updated_entries, cx);
1070 let downstream = downstream
1071 .as_ref()
1072 .map(|downstream| downstream.updates_tx.clone());
1073 cx.spawn(async move |_, cx| {
1074 let paths_by_git_repo = paths_by_git_repo.await;
1075 for (repo, paths) in paths_by_git_repo {
1076 repo.update(cx, |repo, cx| {
1077 repo.paths_changed(paths, downstream.clone(), cx);
1078 })
1079 .ok();
1080 }
1081 })
1082 .detach();
1083 }
1084 }
1085 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1086 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1087 else {
1088 return;
1089 };
1090 if !worktree.read(cx).is_visible() {
1091 log::debug!(
1092 "not adding repositories for local worktree {:?} because it's not visible",
1093 worktree.read(cx).abs_path()
1094 );
1095 return;
1096 }
1097 self.update_repositories_from_worktree(
1098 project_environment.clone(),
1099 next_repository_id.clone(),
1100 downstream
1101 .as_ref()
1102 .map(|downstream| downstream.updates_tx.clone()),
1103 changed_repos.clone(),
1104 fs.clone(),
1105 cx,
1106 );
1107 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1108 }
1109 _ => {}
1110 }
1111 }
1112
1113 fn on_repository_event(
1114 &mut self,
1115 repo: Entity<Repository>,
1116 event: &RepositoryEvent,
1117 cx: &mut Context<Self>,
1118 ) {
1119 let id = repo.read(cx).id;
1120 let repo_snapshot = repo.read(cx).snapshot.clone();
1121 for (buffer_id, diff) in self.diffs.iter() {
1122 if let Some((buffer_repo, repo_path)) =
1123 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1124 && buffer_repo == repo
1125 {
1126 diff.update(cx, |diff, cx| {
1127 if let Some(conflict_set) = &diff.conflict_set {
1128 let conflict_status_changed =
1129 conflict_set.update(cx, |conflict_set, cx| {
1130 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1131 conflict_set.set_has_conflict(has_conflict, cx)
1132 })?;
1133 if conflict_status_changed {
1134 let buffer_store = self.buffer_store.read(cx);
1135 if let Some(buffer) = buffer_store.get(*buffer_id) {
1136 let _ = diff
1137 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1138 }
1139 }
1140 }
1141 anyhow::Ok(())
1142 })
1143 .ok();
1144 }
1145 }
1146 cx.emit(GitStoreEvent::RepositoryUpdated(
1147 id,
1148 event.clone(),
1149 self.active_repo_id == Some(id),
1150 ))
1151 }
1152
1153 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1154 cx.emit(GitStoreEvent::JobsUpdated)
1155 }
1156
1157 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1158 fn update_repositories_from_worktree(
1159 &mut self,
1160 project_environment: Entity<ProjectEnvironment>,
1161 next_repository_id: Arc<AtomicU64>,
1162 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1163 updated_git_repositories: UpdatedGitRepositoriesSet,
1164 fs: Arc<dyn Fs>,
1165 cx: &mut Context<Self>,
1166 ) {
1167 let mut removed_ids = Vec::new();
1168 for update in updated_git_repositories.iter() {
1169 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1170 let existing_work_directory_abs_path =
1171 repo.read(cx).work_directory_abs_path.clone();
1172 Some(&existing_work_directory_abs_path)
1173 == update.old_work_directory_abs_path.as_ref()
1174 || Some(&existing_work_directory_abs_path)
1175 == update.new_work_directory_abs_path.as_ref()
1176 }) {
1177 if let Some(new_work_directory_abs_path) =
1178 update.new_work_directory_abs_path.clone()
1179 {
1180 existing.update(cx, |existing, cx| {
1181 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1182 existing.schedule_scan(updates_tx.clone(), cx);
1183 });
1184 } else {
1185 removed_ids.push(*id);
1186 }
1187 } else if let UpdatedGitRepository {
1188 new_work_directory_abs_path: Some(work_directory_abs_path),
1189 dot_git_abs_path: Some(dot_git_abs_path),
1190 repository_dir_abs_path: Some(repository_dir_abs_path),
1191 common_dir_abs_path: Some(common_dir_abs_path),
1192 ..
1193 } = update
1194 {
1195 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1196 let git_store = cx.weak_entity();
1197 let repo = cx.new(|cx| {
1198 let mut repo = Repository::local(
1199 id,
1200 work_directory_abs_path.clone(),
1201 dot_git_abs_path.clone(),
1202 repository_dir_abs_path.clone(),
1203 common_dir_abs_path.clone(),
1204 project_environment.downgrade(),
1205 fs.clone(),
1206 git_store,
1207 cx,
1208 );
1209 repo.schedule_scan(updates_tx.clone(), cx);
1210 repo
1211 });
1212 self._subscriptions
1213 .push(cx.subscribe(&repo, Self::on_repository_event));
1214 self._subscriptions
1215 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1216 self.repositories.insert(id, repo);
1217 cx.emit(GitStoreEvent::RepositoryAdded(id));
1218 self.active_repo_id.get_or_insert_with(|| {
1219 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1220 id
1221 });
1222 }
1223 }
1224
1225 for id in removed_ids {
1226 if self.active_repo_id == Some(id) {
1227 self.active_repo_id = None;
1228 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1229 }
1230 self.repositories.remove(&id);
1231 if let Some(updates_tx) = updates_tx.as_ref() {
1232 updates_tx
1233 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1234 .ok();
1235 }
1236 }
1237 }
1238
1239 fn on_buffer_store_event(
1240 &mut self,
1241 _: Entity<BufferStore>,
1242 event: &BufferStoreEvent,
1243 cx: &mut Context<Self>,
1244 ) {
1245 match event {
1246 BufferStoreEvent::BufferAdded(buffer) => {
1247 cx.subscribe(buffer, |this, buffer, event, cx| {
1248 if let BufferEvent::LanguageChanged = event {
1249 let buffer_id = buffer.read(cx).remote_id();
1250 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1251 diff_state.update(cx, |diff_state, cx| {
1252 diff_state.buffer_language_changed(buffer, cx);
1253 });
1254 }
1255 }
1256 })
1257 .detach();
1258 }
1259 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1260 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1261 diffs.remove(buffer_id);
1262 }
1263 }
1264 BufferStoreEvent::BufferDropped(buffer_id) => {
1265 self.diffs.remove(buffer_id);
1266 for diffs in self.shared_diffs.values_mut() {
1267 diffs.remove(buffer_id);
1268 }
1269 }
1270
1271 _ => {}
1272 }
1273 }
1274
1275 pub fn recalculate_buffer_diffs(
1276 &mut self,
1277 buffers: Vec<Entity<Buffer>>,
1278 cx: &mut Context<Self>,
1279 ) -> impl Future<Output = ()> + use<> {
1280 let mut futures = Vec::new();
1281 for buffer in buffers {
1282 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1283 let buffer = buffer.read(cx).text_snapshot();
1284 diff_state.update(cx, |diff_state, cx| {
1285 diff_state.recalculate_diffs(buffer.clone(), cx);
1286 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1287 });
1288 futures.push(diff_state.update(cx, |diff_state, cx| {
1289 diff_state
1290 .reparse_conflict_markers(buffer, cx)
1291 .map(|_| {})
1292 .boxed()
1293 }));
1294 }
1295 }
1296 async move {
1297 futures::future::join_all(futures).await;
1298 }
1299 }
1300
1301 fn on_buffer_diff_event(
1302 &mut self,
1303 diff: Entity<buffer_diff::BufferDiff>,
1304 event: &BufferDiffEvent,
1305 cx: &mut Context<Self>,
1306 ) {
1307 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1308 let buffer_id = diff.read(cx).buffer_id;
1309 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1310 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1311 diff_state.hunk_staging_operation_count += 1;
1312 diff_state.hunk_staging_operation_count
1313 });
1314 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1315 let recv = repo.update(cx, |repo, cx| {
1316 log::debug!("hunks changed for {}", path.display());
1317 repo.spawn_set_index_text_job(
1318 path,
1319 new_index_text.as_ref().map(|rope| rope.to_string()),
1320 Some(hunk_staging_operation_count),
1321 cx,
1322 )
1323 });
1324 let diff = diff.downgrade();
1325 cx.spawn(async move |this, cx| {
1326 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1327 diff.update(cx, |diff, cx| {
1328 diff.clear_pending_hunks(cx);
1329 })
1330 .ok();
1331 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1332 .ok();
1333 }
1334 })
1335 .detach();
1336 }
1337 }
1338 }
1339 }
1340
1341 fn local_worktree_git_repos_changed(
1342 &mut self,
1343 worktree: Entity<Worktree>,
1344 changed_repos: &UpdatedGitRepositoriesSet,
1345 cx: &mut Context<Self>,
1346 ) {
1347 log::debug!("local worktree repos changed");
1348 debug_assert!(worktree.read(cx).is_local());
1349
1350 for repository in self.repositories.values() {
1351 repository.update(cx, |repository, cx| {
1352 let repo_abs_path = &repository.work_directory_abs_path;
1353 if changed_repos.iter().any(|update| {
1354 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1355 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1356 }) {
1357 repository.reload_buffer_diff_bases(cx);
1358 }
1359 });
1360 }
1361 }
1362
1363 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1364 &self.repositories
1365 }
1366
1367 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1368 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1369 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1370 Some(status.status)
1371 }
1372
1373 pub fn repository_and_path_for_buffer_id(
1374 &self,
1375 buffer_id: BufferId,
1376 cx: &App,
1377 ) -> Option<(Entity<Repository>, RepoPath)> {
1378 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1379 let project_path = buffer.read(cx).project_path(cx)?;
1380 self.repository_and_path_for_project_path(&project_path, cx)
1381 }
1382
1383 pub fn repository_and_path_for_project_path(
1384 &self,
1385 path: &ProjectPath,
1386 cx: &App,
1387 ) -> Option<(Entity<Repository>, RepoPath)> {
1388 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1389 self.repositories
1390 .values()
1391 .filter_map(|repo| {
1392 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1393 Some((repo.clone(), repo_path))
1394 })
1395 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1396 }
1397
1398 pub fn git_init(
1399 &self,
1400 path: Arc<Path>,
1401 fallback_branch_name: String,
1402 cx: &App,
1403 ) -> Task<Result<()>> {
1404 match &self.state {
1405 GitStoreState::Local { fs, .. } => {
1406 let fs = fs.clone();
1407 cx.background_executor()
1408 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1409 }
1410 GitStoreState::Remote {
1411 upstream_client,
1412 upstream_project_id: project_id,
1413 ..
1414 } => {
1415 let client = upstream_client.clone();
1416 let project_id = *project_id;
1417 cx.background_executor().spawn(async move {
1418 client
1419 .request(proto::GitInit {
1420 project_id: project_id,
1421 abs_path: path.to_string_lossy().to_string(),
1422 fallback_branch_name,
1423 })
1424 .await?;
1425 Ok(())
1426 })
1427 }
1428 }
1429 }
1430
1431 pub fn git_clone(
1432 &self,
1433 repo: String,
1434 path: impl Into<Arc<std::path::Path>>,
1435 cx: &App,
1436 ) -> Task<Result<()>> {
1437 let path = path.into();
1438 match &self.state {
1439 GitStoreState::Local { fs, .. } => {
1440 let fs = fs.clone();
1441 cx.background_executor()
1442 .spawn(async move { fs.git_clone(&repo, &path).await })
1443 }
1444 GitStoreState::Remote {
1445 upstream_client,
1446 upstream_project_id,
1447 ..
1448 } => {
1449 if upstream_client.is_via_collab() {
1450 return Task::ready(Err(anyhow!(
1451 "Git Clone isn't supported for project guests"
1452 )));
1453 }
1454 let request = upstream_client.request(proto::GitClone {
1455 project_id: *upstream_project_id,
1456 abs_path: path.to_string_lossy().to_string(),
1457 remote_repo: repo,
1458 });
1459
1460 cx.background_spawn(async move {
1461 let result = request.await?;
1462
1463 match result.success {
1464 true => Ok(()),
1465 false => Err(anyhow!("Git Clone failed")),
1466 }
1467 })
1468 }
1469 }
1470 }
1471
1472 async fn handle_update_repository(
1473 this: Entity<Self>,
1474 envelope: TypedEnvelope<proto::UpdateRepository>,
1475 mut cx: AsyncApp,
1476 ) -> Result<()> {
1477 this.update(&mut cx, |this, cx| {
1478 let mut update = envelope.payload;
1479
1480 let id = RepositoryId::from_proto(update.id);
1481 let client = this.upstream_client().context("no upstream client")?;
1482
1483 let mut is_new = false;
1484 let repo = this.repositories.entry(id).or_insert_with(|| {
1485 is_new = true;
1486 let git_store = cx.weak_entity();
1487 cx.new(|cx| {
1488 Repository::remote(
1489 id,
1490 Path::new(&update.abs_path).into(),
1491 ProjectId(update.project_id),
1492 client,
1493 git_store,
1494 cx,
1495 )
1496 })
1497 });
1498 if is_new {
1499 this._subscriptions
1500 .push(cx.subscribe(repo, Self::on_repository_event))
1501 }
1502
1503 repo.update(cx, {
1504 let update = update.clone();
1505 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1506 })?;
1507
1508 this.active_repo_id.get_or_insert_with(|| {
1509 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1510 id
1511 });
1512
1513 if let Some((client, project_id)) = this.downstream_client() {
1514 update.project_id = project_id.to_proto();
1515 client.send(update).log_err();
1516 }
1517 Ok(())
1518 })?
1519 }
1520
1521 async fn handle_remove_repository(
1522 this: Entity<Self>,
1523 envelope: TypedEnvelope<proto::RemoveRepository>,
1524 mut cx: AsyncApp,
1525 ) -> Result<()> {
1526 this.update(&mut cx, |this, cx| {
1527 let mut update = envelope.payload;
1528 let id = RepositoryId::from_proto(update.id);
1529 this.repositories.remove(&id);
1530 if let Some((client, project_id)) = this.downstream_client() {
1531 update.project_id = project_id.to_proto();
1532 client.send(update).log_err();
1533 }
1534 if this.active_repo_id == Some(id) {
1535 this.active_repo_id = None;
1536 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1537 }
1538 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1539 })
1540 }
1541
1542 async fn handle_git_init(
1543 this: Entity<Self>,
1544 envelope: TypedEnvelope<proto::GitInit>,
1545 cx: AsyncApp,
1546 ) -> Result<proto::Ack> {
1547 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1548 let name = envelope.payload.fallback_branch_name;
1549 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1550 .await?;
1551
1552 Ok(proto::Ack {})
1553 }
1554
1555 async fn handle_git_clone(
1556 this: Entity<Self>,
1557 envelope: TypedEnvelope<proto::GitClone>,
1558 cx: AsyncApp,
1559 ) -> Result<proto::GitCloneResponse> {
1560 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1561 let repo_name = envelope.payload.remote_repo;
1562 let result = cx
1563 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1564 .await;
1565
1566 Ok(proto::GitCloneResponse {
1567 success: result.is_ok(),
1568 })
1569 }
1570
1571 async fn handle_fetch(
1572 this: Entity<Self>,
1573 envelope: TypedEnvelope<proto::Fetch>,
1574 mut cx: AsyncApp,
1575 ) -> Result<proto::RemoteMessageResponse> {
1576 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1577 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1578 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1579 let askpass_id = envelope.payload.askpass_id;
1580
1581 let askpass = make_remote_delegate(
1582 this,
1583 envelope.payload.project_id,
1584 repository_id,
1585 askpass_id,
1586 &mut cx,
1587 );
1588
1589 let remote_output = repository_handle
1590 .update(&mut cx, |repository_handle, cx| {
1591 repository_handle.fetch(fetch_options, askpass, cx)
1592 })?
1593 .await??;
1594
1595 Ok(proto::RemoteMessageResponse {
1596 stdout: remote_output.stdout,
1597 stderr: remote_output.stderr,
1598 })
1599 }
1600
1601 async fn handle_push(
1602 this: Entity<Self>,
1603 envelope: TypedEnvelope<proto::Push>,
1604 mut cx: AsyncApp,
1605 ) -> Result<proto::RemoteMessageResponse> {
1606 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1607 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1608
1609 let askpass_id = envelope.payload.askpass_id;
1610 let askpass = make_remote_delegate(
1611 this,
1612 envelope.payload.project_id,
1613 repository_id,
1614 askpass_id,
1615 &mut cx,
1616 );
1617
1618 let options = envelope
1619 .payload
1620 .options
1621 .as_ref()
1622 .map(|_| match envelope.payload.options() {
1623 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1624 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1625 });
1626
1627 let branch_name = envelope.payload.branch_name.into();
1628 let remote_name = envelope.payload.remote_name.into();
1629
1630 let remote_output = repository_handle
1631 .update(&mut cx, |repository_handle, cx| {
1632 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1633 })?
1634 .await??;
1635 Ok(proto::RemoteMessageResponse {
1636 stdout: remote_output.stdout,
1637 stderr: remote_output.stderr,
1638 })
1639 }
1640
1641 async fn handle_pull(
1642 this: Entity<Self>,
1643 envelope: TypedEnvelope<proto::Pull>,
1644 mut cx: AsyncApp,
1645 ) -> Result<proto::RemoteMessageResponse> {
1646 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1647 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1648 let askpass_id = envelope.payload.askpass_id;
1649 let askpass = make_remote_delegate(
1650 this,
1651 envelope.payload.project_id,
1652 repository_id,
1653 askpass_id,
1654 &mut cx,
1655 );
1656
1657 let branch_name = envelope.payload.branch_name.into();
1658 let remote_name = envelope.payload.remote_name.into();
1659
1660 let remote_message = repository_handle
1661 .update(&mut cx, |repository_handle, cx| {
1662 repository_handle.pull(branch_name, remote_name, askpass, cx)
1663 })?
1664 .await??;
1665
1666 Ok(proto::RemoteMessageResponse {
1667 stdout: remote_message.stdout,
1668 stderr: remote_message.stderr,
1669 })
1670 }
1671
1672 async fn handle_stage(
1673 this: Entity<Self>,
1674 envelope: TypedEnvelope<proto::Stage>,
1675 mut cx: AsyncApp,
1676 ) -> Result<proto::Ack> {
1677 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1678 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1679
1680 let entries = envelope
1681 .payload
1682 .paths
1683 .into_iter()
1684 .map(PathBuf::from)
1685 .map(RepoPath::new)
1686 .collect();
1687
1688 repository_handle
1689 .update(&mut cx, |repository_handle, cx| {
1690 repository_handle.stage_entries(entries, cx)
1691 })?
1692 .await?;
1693 Ok(proto::Ack {})
1694 }
1695
1696 async fn handle_unstage(
1697 this: Entity<Self>,
1698 envelope: TypedEnvelope<proto::Unstage>,
1699 mut cx: AsyncApp,
1700 ) -> Result<proto::Ack> {
1701 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1702 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1703
1704 let entries = envelope
1705 .payload
1706 .paths
1707 .into_iter()
1708 .map(PathBuf::from)
1709 .map(RepoPath::new)
1710 .collect();
1711
1712 repository_handle
1713 .update(&mut cx, |repository_handle, cx| {
1714 repository_handle.unstage_entries(entries, cx)
1715 })?
1716 .await?;
1717
1718 Ok(proto::Ack {})
1719 }
1720
1721 async fn handle_stash(
1722 this: Entity<Self>,
1723 envelope: TypedEnvelope<proto::Stash>,
1724 mut cx: AsyncApp,
1725 ) -> Result<proto::Ack> {
1726 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1727 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1728
1729 let entries = envelope
1730 .payload
1731 .paths
1732 .into_iter()
1733 .map(PathBuf::from)
1734 .map(RepoPath::new)
1735 .collect();
1736
1737 repository_handle
1738 .update(&mut cx, |repository_handle, cx| {
1739 repository_handle.stash_entries(entries, cx)
1740 })?
1741 .await?;
1742
1743 Ok(proto::Ack {})
1744 }
1745
1746 async fn handle_stash_pop(
1747 this: Entity<Self>,
1748 envelope: TypedEnvelope<proto::StashPop>,
1749 mut cx: AsyncApp,
1750 ) -> Result<proto::Ack> {
1751 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1752 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1753 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1754
1755 repository_handle
1756 .update(&mut cx, |repository_handle, cx| {
1757 repository_handle.stash_pop(stash_index, cx)
1758 })?
1759 .await?;
1760
1761 Ok(proto::Ack {})
1762 }
1763
1764 async fn handle_stash_apply(
1765 this: Entity<Self>,
1766 envelope: TypedEnvelope<proto::StashApply>,
1767 mut cx: AsyncApp,
1768 ) -> Result<proto::Ack> {
1769 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1770 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1771 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1772
1773 repository_handle
1774 .update(&mut cx, |repository_handle, cx| {
1775 repository_handle.stash_apply(stash_index, cx)
1776 })?
1777 .await?;
1778
1779 Ok(proto::Ack {})
1780 }
1781
1782 async fn handle_stash_drop(
1783 this: Entity<Self>,
1784 envelope: TypedEnvelope<proto::StashDrop>,
1785 mut cx: AsyncApp,
1786 ) -> Result<proto::Ack> {
1787 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1788 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1789 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1790
1791 repository_handle
1792 .update(&mut cx, |repository_handle, cx| {
1793 repository_handle.stash_drop(stash_index, cx)
1794 })?
1795 .await??;
1796
1797 Ok(proto::Ack {})
1798 }
1799
1800 async fn handle_set_index_text(
1801 this: Entity<Self>,
1802 envelope: TypedEnvelope<proto::SetIndexText>,
1803 mut cx: AsyncApp,
1804 ) -> Result<proto::Ack> {
1805 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1806 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1807 let repo_path = RepoPath::from_str(&envelope.payload.path);
1808
1809 repository_handle
1810 .update(&mut cx, |repository_handle, cx| {
1811 repository_handle.spawn_set_index_text_job(
1812 repo_path,
1813 envelope.payload.text,
1814 None,
1815 cx,
1816 )
1817 })?
1818 .await??;
1819 Ok(proto::Ack {})
1820 }
1821
1822 async fn handle_commit(
1823 this: Entity<Self>,
1824 envelope: TypedEnvelope<proto::Commit>,
1825 mut cx: AsyncApp,
1826 ) -> Result<proto::Ack> {
1827 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1828 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1829
1830 let message = SharedString::from(envelope.payload.message);
1831 let name = envelope.payload.name.map(SharedString::from);
1832 let email = envelope.payload.email.map(SharedString::from);
1833 let options = envelope.payload.options.unwrap_or_default();
1834
1835 repository_handle
1836 .update(&mut cx, |repository_handle, cx| {
1837 repository_handle.commit(
1838 message,
1839 name.zip(email),
1840 CommitOptions {
1841 amend: options.amend,
1842 signoff: options.signoff,
1843 },
1844 cx,
1845 )
1846 })?
1847 .await??;
1848 Ok(proto::Ack {})
1849 }
1850
1851 async fn handle_get_remotes(
1852 this: Entity<Self>,
1853 envelope: TypedEnvelope<proto::GetRemotes>,
1854 mut cx: AsyncApp,
1855 ) -> Result<proto::GetRemotesResponse> {
1856 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1857 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1858
1859 let branch_name = envelope.payload.branch_name;
1860
1861 let remotes = repository_handle
1862 .update(&mut cx, |repository_handle, _| {
1863 repository_handle.get_remotes(branch_name)
1864 })?
1865 .await??;
1866
1867 Ok(proto::GetRemotesResponse {
1868 remotes: remotes
1869 .into_iter()
1870 .map(|remotes| proto::get_remotes_response::Remote {
1871 name: remotes.name.to_string(),
1872 })
1873 .collect::<Vec<_>>(),
1874 })
1875 }
1876
1877 async fn handle_get_branches(
1878 this: Entity<Self>,
1879 envelope: TypedEnvelope<proto::GitGetBranches>,
1880 mut cx: AsyncApp,
1881 ) -> Result<proto::GitBranchesResponse> {
1882 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1883 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1884
1885 let branches = repository_handle
1886 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1887 .await??;
1888
1889 Ok(proto::GitBranchesResponse {
1890 branches: branches
1891 .into_iter()
1892 .map(|branch| branch_to_proto(&branch))
1893 .collect::<Vec<_>>(),
1894 })
1895 }
1896 async fn handle_get_default_branch(
1897 this: Entity<Self>,
1898 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1899 mut cx: AsyncApp,
1900 ) -> Result<proto::GetDefaultBranchResponse> {
1901 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1902 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1903
1904 let branch = repository_handle
1905 .update(&mut cx, |repository_handle, _| {
1906 repository_handle.default_branch()
1907 })?
1908 .await??
1909 .map(Into::into);
1910
1911 Ok(proto::GetDefaultBranchResponse { branch })
1912 }
1913 async fn handle_create_branch(
1914 this: Entity<Self>,
1915 envelope: TypedEnvelope<proto::GitCreateBranch>,
1916 mut cx: AsyncApp,
1917 ) -> Result<proto::Ack> {
1918 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1919 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1920 let branch_name = envelope.payload.branch_name;
1921
1922 repository_handle
1923 .update(&mut cx, |repository_handle, _| {
1924 repository_handle.create_branch(branch_name)
1925 })?
1926 .await??;
1927
1928 Ok(proto::Ack {})
1929 }
1930
1931 async fn handle_change_branch(
1932 this: Entity<Self>,
1933 envelope: TypedEnvelope<proto::GitChangeBranch>,
1934 mut cx: AsyncApp,
1935 ) -> Result<proto::Ack> {
1936 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1937 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1938 let branch_name = envelope.payload.branch_name;
1939
1940 repository_handle
1941 .update(&mut cx, |repository_handle, _| {
1942 repository_handle.change_branch(branch_name)
1943 })?
1944 .await??;
1945
1946 Ok(proto::Ack {})
1947 }
1948
1949 async fn handle_rename_branch(
1950 this: Entity<Self>,
1951 envelope: TypedEnvelope<proto::GitRenameBranch>,
1952 mut cx: AsyncApp,
1953 ) -> Result<proto::Ack> {
1954 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1955 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1956 let branch = envelope.payload.branch;
1957 let new_name = envelope.payload.new_name;
1958
1959 repository_handle
1960 .update(&mut cx, |repository_handle, _| {
1961 repository_handle.rename_branch(branch, new_name)
1962 })?
1963 .await??;
1964
1965 Ok(proto::Ack {})
1966 }
1967
1968 async fn handle_show(
1969 this: Entity<Self>,
1970 envelope: TypedEnvelope<proto::GitShow>,
1971 mut cx: AsyncApp,
1972 ) -> Result<proto::GitCommitDetails> {
1973 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1974 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1975
1976 let commit = repository_handle
1977 .update(&mut cx, |repository_handle, _| {
1978 repository_handle.show(envelope.payload.commit)
1979 })?
1980 .await??;
1981 Ok(proto::GitCommitDetails {
1982 sha: commit.sha.into(),
1983 message: commit.message.into(),
1984 commit_timestamp: commit.commit_timestamp,
1985 author_email: commit.author_email.into(),
1986 author_name: commit.author_name.into(),
1987 })
1988 }
1989
1990 async fn handle_load_commit_diff(
1991 this: Entity<Self>,
1992 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1993 mut cx: AsyncApp,
1994 ) -> Result<proto::LoadCommitDiffResponse> {
1995 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1996 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1997
1998 let commit_diff = repository_handle
1999 .update(&mut cx, |repository_handle, _| {
2000 repository_handle.load_commit_diff(envelope.payload.commit)
2001 })?
2002 .await??;
2003 Ok(proto::LoadCommitDiffResponse {
2004 files: commit_diff
2005 .files
2006 .into_iter()
2007 .map(|file| proto::CommitFile {
2008 path: file.path.to_string(),
2009 old_text: file.old_text,
2010 new_text: file.new_text,
2011 })
2012 .collect(),
2013 })
2014 }
2015
2016 async fn handle_reset(
2017 this: Entity<Self>,
2018 envelope: TypedEnvelope<proto::GitReset>,
2019 mut cx: AsyncApp,
2020 ) -> Result<proto::Ack> {
2021 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2022 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2023
2024 let mode = match envelope.payload.mode() {
2025 git_reset::ResetMode::Soft => ResetMode::Soft,
2026 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2027 };
2028
2029 repository_handle
2030 .update(&mut cx, |repository_handle, cx| {
2031 repository_handle.reset(envelope.payload.commit, mode, cx)
2032 })?
2033 .await??;
2034 Ok(proto::Ack {})
2035 }
2036
2037 async fn handle_checkout_files(
2038 this: Entity<Self>,
2039 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2040 mut cx: AsyncApp,
2041 ) -> Result<proto::Ack> {
2042 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2043 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2044 let paths = envelope
2045 .payload
2046 .paths
2047 .iter()
2048 .map(|s| RepoPath::from_str(s))
2049 .collect();
2050
2051 repository_handle
2052 .update(&mut cx, |repository_handle, cx| {
2053 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2054 })?
2055 .await??;
2056 Ok(proto::Ack {})
2057 }
2058
2059 async fn handle_open_commit_message_buffer(
2060 this: Entity<Self>,
2061 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2062 mut cx: AsyncApp,
2063 ) -> Result<proto::OpenBufferResponse> {
2064 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2065 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2066 let buffer = repository
2067 .update(&mut cx, |repository, cx| {
2068 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2069 })?
2070 .await?;
2071
2072 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2073 this.update(&mut cx, |this, cx| {
2074 this.buffer_store.update(cx, |buffer_store, cx| {
2075 buffer_store
2076 .create_buffer_for_peer(
2077 &buffer,
2078 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2079 cx,
2080 )
2081 .detach_and_log_err(cx);
2082 })
2083 })?;
2084
2085 Ok(proto::OpenBufferResponse {
2086 buffer_id: buffer_id.to_proto(),
2087 })
2088 }
2089
2090 async fn handle_askpass(
2091 this: Entity<Self>,
2092 envelope: TypedEnvelope<proto::AskPassRequest>,
2093 mut cx: AsyncApp,
2094 ) -> Result<proto::AskPassResponse> {
2095 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2096 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2097
2098 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2099 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2100 debug_panic!("no askpass found");
2101 anyhow::bail!("no askpass found");
2102 };
2103
2104 let response = askpass.ask_password(envelope.payload.prompt).await?;
2105
2106 delegates
2107 .lock()
2108 .insert(envelope.payload.askpass_id, askpass);
2109
2110 response.try_into()
2111 }
2112
2113 async fn handle_check_for_pushed_commits(
2114 this: Entity<Self>,
2115 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2116 mut cx: AsyncApp,
2117 ) -> Result<proto::CheckForPushedCommitsResponse> {
2118 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2119 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2120
2121 let branches = repository_handle
2122 .update(&mut cx, |repository_handle, _| {
2123 repository_handle.check_for_pushed_commits()
2124 })?
2125 .await??;
2126 Ok(proto::CheckForPushedCommitsResponse {
2127 pushed_to: branches
2128 .into_iter()
2129 .map(|commit| commit.to_string())
2130 .collect(),
2131 })
2132 }
2133
2134 async fn handle_git_diff(
2135 this: Entity<Self>,
2136 envelope: TypedEnvelope<proto::GitDiff>,
2137 mut cx: AsyncApp,
2138 ) -> Result<proto::GitDiffResponse> {
2139 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2140 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2141 let diff_type = match envelope.payload.diff_type() {
2142 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2143 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2144 };
2145
2146 let mut diff = repository_handle
2147 .update(&mut cx, |repository_handle, cx| {
2148 repository_handle.diff(diff_type, cx)
2149 })?
2150 .await??;
2151 const ONE_MB: usize = 1_000_000;
2152 if diff.len() > ONE_MB {
2153 diff = diff.chars().take(ONE_MB).collect()
2154 }
2155
2156 Ok(proto::GitDiffResponse { diff })
2157 }
2158
2159 async fn handle_open_unstaged_diff(
2160 this: Entity<Self>,
2161 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2162 mut cx: AsyncApp,
2163 ) -> Result<proto::OpenUnstagedDiffResponse> {
2164 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2165 let diff = this
2166 .update(&mut cx, |this, cx| {
2167 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2168 Some(this.open_unstaged_diff(buffer, cx))
2169 })?
2170 .context("missing buffer")?
2171 .await?;
2172 this.update(&mut cx, |this, _| {
2173 let shared_diffs = this
2174 .shared_diffs
2175 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2176 .or_default();
2177 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2178 })?;
2179 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2180 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2181 }
2182
2183 async fn handle_open_uncommitted_diff(
2184 this: Entity<Self>,
2185 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2186 mut cx: AsyncApp,
2187 ) -> Result<proto::OpenUncommittedDiffResponse> {
2188 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2189 let diff = this
2190 .update(&mut cx, |this, cx| {
2191 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2192 Some(this.open_uncommitted_diff(buffer, cx))
2193 })?
2194 .context("missing buffer")?
2195 .await?;
2196 this.update(&mut cx, |this, _| {
2197 let shared_diffs = this
2198 .shared_diffs
2199 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2200 .or_default();
2201 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2202 })?;
2203 diff.read_with(&cx, |diff, cx| {
2204 use proto::open_uncommitted_diff_response::Mode;
2205
2206 let unstaged_diff = diff.secondary_diff();
2207 let index_snapshot = unstaged_diff.and_then(|diff| {
2208 let diff = diff.read(cx);
2209 diff.base_text_exists().then(|| diff.base_text())
2210 });
2211
2212 let mode;
2213 let staged_text;
2214 let committed_text;
2215 if diff.base_text_exists() {
2216 let committed_snapshot = diff.base_text();
2217 committed_text = Some(committed_snapshot.text());
2218 if let Some(index_text) = index_snapshot {
2219 if index_text.remote_id() == committed_snapshot.remote_id() {
2220 mode = Mode::IndexMatchesHead;
2221 staged_text = None;
2222 } else {
2223 mode = Mode::IndexAndHead;
2224 staged_text = Some(index_text.text());
2225 }
2226 } else {
2227 mode = Mode::IndexAndHead;
2228 staged_text = None;
2229 }
2230 } else {
2231 mode = Mode::IndexAndHead;
2232 committed_text = None;
2233 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2234 }
2235
2236 proto::OpenUncommittedDiffResponse {
2237 committed_text,
2238 staged_text,
2239 mode: mode.into(),
2240 }
2241 })
2242 }
2243
2244 async fn handle_update_diff_bases(
2245 this: Entity<Self>,
2246 request: TypedEnvelope<proto::UpdateDiffBases>,
2247 mut cx: AsyncApp,
2248 ) -> Result<()> {
2249 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2250 this.update(&mut cx, |this, cx| {
2251 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2252 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2253 {
2254 let buffer = buffer.read(cx).text_snapshot();
2255 diff_state.update(cx, |diff_state, cx| {
2256 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2257 })
2258 }
2259 })
2260 }
2261
2262 async fn handle_blame_buffer(
2263 this: Entity<Self>,
2264 envelope: TypedEnvelope<proto::BlameBuffer>,
2265 mut cx: AsyncApp,
2266 ) -> Result<proto::BlameBufferResponse> {
2267 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2268 let version = deserialize_version(&envelope.payload.version);
2269 let buffer = this.read_with(&cx, |this, cx| {
2270 this.buffer_store.read(cx).get_existing(buffer_id)
2271 })??;
2272 buffer
2273 .update(&mut cx, |buffer, _| {
2274 buffer.wait_for_version(version.clone())
2275 })?
2276 .await?;
2277 let blame = this
2278 .update(&mut cx, |this, cx| {
2279 this.blame_buffer(&buffer, Some(version), cx)
2280 })?
2281 .await?;
2282 Ok(serialize_blame_buffer_response(blame))
2283 }
2284
2285 async fn handle_get_permalink_to_line(
2286 this: Entity<Self>,
2287 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2288 mut cx: AsyncApp,
2289 ) -> Result<proto::GetPermalinkToLineResponse> {
2290 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2291 // let version = deserialize_version(&envelope.payload.version);
2292 let selection = {
2293 let proto_selection = envelope
2294 .payload
2295 .selection
2296 .context("no selection to get permalink for defined")?;
2297 proto_selection.start as u32..proto_selection.end as u32
2298 };
2299 let buffer = this.read_with(&cx, |this, cx| {
2300 this.buffer_store.read(cx).get_existing(buffer_id)
2301 })??;
2302 let permalink = this
2303 .update(&mut cx, |this, cx| {
2304 this.get_permalink_to_line(&buffer, selection, cx)
2305 })?
2306 .await?;
2307 Ok(proto::GetPermalinkToLineResponse {
2308 permalink: permalink.to_string(),
2309 })
2310 }
2311
2312 fn repository_for_request(
2313 this: &Entity<Self>,
2314 id: RepositoryId,
2315 cx: &mut AsyncApp,
2316 ) -> Result<Entity<Repository>> {
2317 this.read_with(cx, |this, _| {
2318 this.repositories
2319 .get(&id)
2320 .context("missing repository handle")
2321 .cloned()
2322 })?
2323 }
2324
2325 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2326 self.repositories
2327 .iter()
2328 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2329 .collect()
2330 }
2331
2332 fn process_updated_entries(
2333 &self,
2334 worktree: &Entity<Worktree>,
2335 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2336 cx: &mut App,
2337 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2338 let mut repo_paths = self
2339 .repositories
2340 .values()
2341 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2342 .collect::<Vec<_>>();
2343 let mut entries: Vec<_> = updated_entries
2344 .iter()
2345 .map(|(path, _, _)| path.clone())
2346 .collect();
2347 entries.sort();
2348 let worktree = worktree.read(cx);
2349
2350 let entries = entries
2351 .into_iter()
2352 .filter_map(|path| worktree.absolutize(&path).ok())
2353 .collect::<Arc<[_]>>();
2354
2355 let executor = cx.background_executor().clone();
2356 cx.background_executor().spawn(async move {
2357 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2358 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2359 let mut tasks = FuturesOrdered::new();
2360 for (repo_path, repo) in repo_paths.into_iter().rev() {
2361 let entries = entries.clone();
2362 let task = executor.spawn(async move {
2363 // Find all repository paths that belong to this repo
2364 let mut ix = entries.partition_point(|path| path < &*repo_path);
2365 if ix == entries.len() {
2366 return None;
2367 };
2368
2369 let mut paths = Vec::new();
2370 // All paths prefixed by a given repo will constitute a continuous range.
2371 while let Some(path) = entries.get(ix)
2372 && let Some(repo_path) =
2373 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, path)
2374 {
2375 paths.push((repo_path, ix));
2376 ix += 1;
2377 }
2378 if paths.is_empty() {
2379 None
2380 } else {
2381 Some((repo, paths))
2382 }
2383 });
2384 tasks.push_back(task);
2385 }
2386
2387 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2388 let mut path_was_used = vec![false; entries.len()];
2389 let tasks = tasks.collect::<Vec<_>>().await;
2390 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2391 // We always want to assign a path to it's innermost repository.
2392 for t in tasks {
2393 let Some((repo, paths)) = t else {
2394 continue;
2395 };
2396 let entry = paths_by_git_repo.entry(repo).or_default();
2397 for (repo_path, ix) in paths {
2398 if path_was_used[ix] {
2399 continue;
2400 }
2401 path_was_used[ix] = true;
2402 entry.push(repo_path);
2403 }
2404 }
2405
2406 paths_by_git_repo
2407 })
2408 }
2409}
2410
2411impl BufferGitState {
2412 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2413 Self {
2414 unstaged_diff: Default::default(),
2415 uncommitted_diff: Default::default(),
2416 recalculate_diff_task: Default::default(),
2417 language: Default::default(),
2418 language_registry: Default::default(),
2419 recalculating_tx: postage::watch::channel_with(false).0,
2420 hunk_staging_operation_count: 0,
2421 hunk_staging_operation_count_as_of_write: 0,
2422 head_text: Default::default(),
2423 index_text: Default::default(),
2424 head_changed: Default::default(),
2425 index_changed: Default::default(),
2426 language_changed: Default::default(),
2427 conflict_updated_futures: Default::default(),
2428 conflict_set: Default::default(),
2429 reparse_conflict_markers_task: Default::default(),
2430 }
2431 }
2432
2433 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2434 self.language = buffer.read(cx).language().cloned();
2435 self.language_changed = true;
2436 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2437 }
2438
2439 fn reparse_conflict_markers(
2440 &mut self,
2441 buffer: text::BufferSnapshot,
2442 cx: &mut Context<Self>,
2443 ) -> oneshot::Receiver<()> {
2444 let (tx, rx) = oneshot::channel();
2445
2446 let Some(conflict_set) = self
2447 .conflict_set
2448 .as_ref()
2449 .and_then(|conflict_set| conflict_set.upgrade())
2450 else {
2451 return rx;
2452 };
2453
2454 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2455 if conflict_set.has_conflict {
2456 Some(conflict_set.snapshot())
2457 } else {
2458 None
2459 }
2460 });
2461
2462 if let Some(old_snapshot) = old_snapshot {
2463 self.conflict_updated_futures.push(tx);
2464 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2465 let (snapshot, changed_range) = cx
2466 .background_spawn(async move {
2467 let new_snapshot = ConflictSet::parse(&buffer);
2468 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2469 (new_snapshot, changed_range)
2470 })
2471 .await;
2472 this.update(cx, |this, cx| {
2473 if let Some(conflict_set) = &this.conflict_set {
2474 conflict_set
2475 .update(cx, |conflict_set, cx| {
2476 conflict_set.set_snapshot(snapshot, changed_range, cx);
2477 })
2478 .ok();
2479 }
2480 let futures = std::mem::take(&mut this.conflict_updated_futures);
2481 for tx in futures {
2482 tx.send(()).ok();
2483 }
2484 })
2485 }))
2486 }
2487
2488 rx
2489 }
2490
2491 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2492 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2493 }
2494
2495 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2496 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2497 }
2498
2499 fn handle_base_texts_updated(
2500 &mut self,
2501 buffer: text::BufferSnapshot,
2502 message: proto::UpdateDiffBases,
2503 cx: &mut Context<Self>,
2504 ) {
2505 use proto::update_diff_bases::Mode;
2506
2507 let Some(mode) = Mode::from_i32(message.mode) else {
2508 return;
2509 };
2510
2511 let diff_bases_change = match mode {
2512 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2513 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2514 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2515 Mode::IndexAndHead => DiffBasesChange::SetEach {
2516 index: message.staged_text,
2517 head: message.committed_text,
2518 },
2519 };
2520
2521 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2522 }
2523
2524 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2525 if *self.recalculating_tx.borrow() {
2526 let mut rx = self.recalculating_tx.subscribe();
2527 Some(async move {
2528 loop {
2529 let is_recalculating = rx.recv().await;
2530 if is_recalculating != Some(true) {
2531 break;
2532 }
2533 }
2534 })
2535 } else {
2536 None
2537 }
2538 }
2539
2540 fn diff_bases_changed(
2541 &mut self,
2542 buffer: text::BufferSnapshot,
2543 diff_bases_change: Option<DiffBasesChange>,
2544 cx: &mut Context<Self>,
2545 ) {
2546 match diff_bases_change {
2547 Some(DiffBasesChange::SetIndex(index)) => {
2548 self.index_text = index.map(|mut index| {
2549 text::LineEnding::normalize(&mut index);
2550 Arc::new(index)
2551 });
2552 self.index_changed = true;
2553 }
2554 Some(DiffBasesChange::SetHead(head)) => {
2555 self.head_text = head.map(|mut head| {
2556 text::LineEnding::normalize(&mut head);
2557 Arc::new(head)
2558 });
2559 self.head_changed = true;
2560 }
2561 Some(DiffBasesChange::SetBoth(text)) => {
2562 let text = text.map(|mut text| {
2563 text::LineEnding::normalize(&mut text);
2564 Arc::new(text)
2565 });
2566 self.head_text = text.clone();
2567 self.index_text = text;
2568 self.head_changed = true;
2569 self.index_changed = true;
2570 }
2571 Some(DiffBasesChange::SetEach { index, head }) => {
2572 self.index_text = index.map(|mut index| {
2573 text::LineEnding::normalize(&mut index);
2574 Arc::new(index)
2575 });
2576 self.index_changed = true;
2577 self.head_text = head.map(|mut head| {
2578 text::LineEnding::normalize(&mut head);
2579 Arc::new(head)
2580 });
2581 self.head_changed = true;
2582 }
2583 None => {}
2584 }
2585
2586 self.recalculate_diffs(buffer, cx)
2587 }
2588
2589 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2590 *self.recalculating_tx.borrow_mut() = true;
2591
2592 let language = self.language.clone();
2593 let language_registry = self.language_registry.clone();
2594 let unstaged_diff = self.unstaged_diff();
2595 let uncommitted_diff = self.uncommitted_diff();
2596 let head = self.head_text.clone();
2597 let index = self.index_text.clone();
2598 let index_changed = self.index_changed;
2599 let head_changed = self.head_changed;
2600 let language_changed = self.language_changed;
2601 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2602 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2603 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2604 (None, None) => true,
2605 _ => false,
2606 };
2607 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2608 log::debug!(
2609 "start recalculating diffs for buffer {}",
2610 buffer.remote_id()
2611 );
2612
2613 let mut new_unstaged_diff = None;
2614 if let Some(unstaged_diff) = &unstaged_diff {
2615 new_unstaged_diff = Some(
2616 BufferDiff::update_diff(
2617 unstaged_diff.clone(),
2618 buffer.clone(),
2619 index,
2620 index_changed,
2621 language_changed,
2622 language.clone(),
2623 language_registry.clone(),
2624 cx,
2625 )
2626 .await?,
2627 );
2628 }
2629
2630 let mut new_uncommitted_diff = None;
2631 if let Some(uncommitted_diff) = &uncommitted_diff {
2632 new_uncommitted_diff = if index_matches_head {
2633 new_unstaged_diff.clone()
2634 } else {
2635 Some(
2636 BufferDiff::update_diff(
2637 uncommitted_diff.clone(),
2638 buffer.clone(),
2639 head,
2640 head_changed,
2641 language_changed,
2642 language.clone(),
2643 language_registry.clone(),
2644 cx,
2645 )
2646 .await?,
2647 )
2648 }
2649 }
2650
2651 let cancel = this.update(cx, |this, _| {
2652 // This checks whether all pending stage/unstage operations
2653 // have quiesced (i.e. both the corresponding write and the
2654 // read of that write have completed). If not, then we cancel
2655 // this recalculation attempt to avoid invalidating pending
2656 // state too quickly; another recalculation will come along
2657 // later and clear the pending state once the state of the index has settled.
2658 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2659 *this.recalculating_tx.borrow_mut() = false;
2660 true
2661 } else {
2662 false
2663 }
2664 })?;
2665 if cancel {
2666 log::debug!(
2667 concat!(
2668 "aborting recalculating diffs for buffer {}",
2669 "due to subsequent hunk operations",
2670 ),
2671 buffer.remote_id()
2672 );
2673 return Ok(());
2674 }
2675
2676 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2677 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2678 {
2679 unstaged_diff.update(cx, |diff, cx| {
2680 if language_changed {
2681 diff.language_changed(cx);
2682 }
2683 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2684 })?
2685 } else {
2686 None
2687 };
2688
2689 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2690 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2691 {
2692 uncommitted_diff.update(cx, |diff, cx| {
2693 if language_changed {
2694 diff.language_changed(cx);
2695 }
2696 diff.set_snapshot_with_secondary(
2697 new_uncommitted_diff,
2698 &buffer,
2699 unstaged_changed_range,
2700 true,
2701 cx,
2702 );
2703 })?;
2704 }
2705
2706 log::debug!(
2707 "finished recalculating diffs for buffer {}",
2708 buffer.remote_id()
2709 );
2710
2711 if let Some(this) = this.upgrade() {
2712 this.update(cx, |this, _| {
2713 this.index_changed = false;
2714 this.head_changed = false;
2715 this.language_changed = false;
2716 *this.recalculating_tx.borrow_mut() = false;
2717 })?;
2718 }
2719
2720 Ok(())
2721 }));
2722 }
2723}
2724
2725fn make_remote_delegate(
2726 this: Entity<GitStore>,
2727 project_id: u64,
2728 repository_id: RepositoryId,
2729 askpass_id: u64,
2730 cx: &mut AsyncApp,
2731) -> AskPassDelegate {
2732 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2733 this.update(cx, |this, cx| {
2734 let Some((client, _)) = this.downstream_client() else {
2735 return;
2736 };
2737 let response = client.request(proto::AskPassRequest {
2738 project_id,
2739 repository_id: repository_id.to_proto(),
2740 askpass_id,
2741 prompt,
2742 });
2743 cx.spawn(async move |_, _| {
2744 let mut response = response.await?.response;
2745 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2746 .ok();
2747 response.zeroize();
2748 anyhow::Ok(())
2749 })
2750 .detach_and_log_err(cx);
2751 })
2752 .log_err();
2753 })
2754}
2755
2756impl RepositoryId {
2757 pub fn to_proto(self) -> u64 {
2758 self.0
2759 }
2760
2761 pub fn from_proto(id: u64) -> Self {
2762 RepositoryId(id)
2763 }
2764}
2765
2766impl RepositorySnapshot {
2767 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2768 Self {
2769 id,
2770 statuses_by_path: Default::default(),
2771 work_directory_abs_path,
2772 branch: None,
2773 head_commit: None,
2774 scan_id: 0,
2775 merge: Default::default(),
2776 remote_origin_url: None,
2777 remote_upstream_url: None,
2778 stash_entries: Default::default(),
2779 }
2780 }
2781
2782 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2783 proto::UpdateRepository {
2784 branch_summary: self.branch.as_ref().map(branch_to_proto),
2785 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2786 updated_statuses: self
2787 .statuses_by_path
2788 .iter()
2789 .map(|entry| entry.to_proto())
2790 .collect(),
2791 removed_statuses: Default::default(),
2792 current_merge_conflicts: self
2793 .merge
2794 .conflicted_paths
2795 .iter()
2796 .map(|repo_path| repo_path.to_proto())
2797 .collect(),
2798 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2799 project_id,
2800 id: self.id.to_proto(),
2801 abs_path: self.work_directory_abs_path.to_proto(),
2802 entry_ids: vec![self.id.to_proto()],
2803 scan_id: self.scan_id,
2804 is_last_update: true,
2805 stash_entries: self
2806 .stash_entries
2807 .entries
2808 .iter()
2809 .map(stash_to_proto)
2810 .collect(),
2811 }
2812 }
2813
2814 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2815 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2816 let mut removed_statuses: Vec<String> = Vec::new();
2817
2818 let mut new_statuses = self.statuses_by_path.iter().peekable();
2819 let mut old_statuses = old.statuses_by_path.iter().peekable();
2820
2821 let mut current_new_entry = new_statuses.next();
2822 let mut current_old_entry = old_statuses.next();
2823 loop {
2824 match (current_new_entry, current_old_entry) {
2825 (Some(new_entry), Some(old_entry)) => {
2826 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2827 Ordering::Less => {
2828 updated_statuses.push(new_entry.to_proto());
2829 current_new_entry = new_statuses.next();
2830 }
2831 Ordering::Equal => {
2832 if new_entry.status != old_entry.status {
2833 updated_statuses.push(new_entry.to_proto());
2834 }
2835 current_old_entry = old_statuses.next();
2836 current_new_entry = new_statuses.next();
2837 }
2838 Ordering::Greater => {
2839 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2840 current_old_entry = old_statuses.next();
2841 }
2842 }
2843 }
2844 (None, Some(old_entry)) => {
2845 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2846 current_old_entry = old_statuses.next();
2847 }
2848 (Some(new_entry), None) => {
2849 updated_statuses.push(new_entry.to_proto());
2850 current_new_entry = new_statuses.next();
2851 }
2852 (None, None) => break,
2853 }
2854 }
2855
2856 proto::UpdateRepository {
2857 branch_summary: self.branch.as_ref().map(branch_to_proto),
2858 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2859 updated_statuses,
2860 removed_statuses,
2861 current_merge_conflicts: self
2862 .merge
2863 .conflicted_paths
2864 .iter()
2865 .map(|path| path.as_ref().to_proto())
2866 .collect(),
2867 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2868 project_id,
2869 id: self.id.to_proto(),
2870 abs_path: self.work_directory_abs_path.to_proto(),
2871 entry_ids: vec![],
2872 scan_id: self.scan_id,
2873 is_last_update: true,
2874 stash_entries: self
2875 .stash_entries
2876 .entries
2877 .iter()
2878 .map(stash_to_proto)
2879 .collect(),
2880 }
2881 }
2882
2883 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2884 self.statuses_by_path.iter().cloned()
2885 }
2886
2887 pub fn status_summary(&self) -> GitSummary {
2888 self.statuses_by_path.summary().item_summary
2889 }
2890
2891 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2892 self.statuses_by_path
2893 .get(&PathKey(path.0.clone()), &())
2894 .cloned()
2895 }
2896
2897 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2898 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2899 }
2900
2901 #[inline]
2902 fn abs_path_to_repo_path_inner(
2903 work_directory_abs_path: &Path,
2904 abs_path: &Path,
2905 ) -> Option<RepoPath> {
2906 abs_path
2907 .strip_prefix(&work_directory_abs_path)
2908 .map(RepoPath::from)
2909 .ok()
2910 }
2911
2912 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2913 self.merge.conflicted_paths.contains(repo_path)
2914 }
2915
2916 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2917 let had_conflict_on_last_merge_head_change =
2918 self.merge.conflicted_paths.contains(repo_path);
2919 let has_conflict_currently = self
2920 .status_for_path(repo_path)
2921 .is_some_and(|entry| entry.status.is_conflicted());
2922 had_conflict_on_last_merge_head_change || has_conflict_currently
2923 }
2924
2925 /// This is the name that will be displayed in the repository selector for this repository.
2926 pub fn display_name(&self) -> SharedString {
2927 self.work_directory_abs_path
2928 .file_name()
2929 .unwrap_or_default()
2930 .to_string_lossy()
2931 .to_string()
2932 .into()
2933 }
2934}
2935
2936pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2937 proto::StashEntry {
2938 oid: entry.oid.as_bytes().to_vec(),
2939 message: entry.message.clone(),
2940 branch: entry.branch.clone(),
2941 index: entry.index as u64,
2942 timestamp: entry.timestamp,
2943 }
2944}
2945
2946pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2947 Ok(StashEntry {
2948 oid: Oid::from_bytes(&entry.oid)?,
2949 message: entry.message.clone(),
2950 index: entry.index as usize,
2951 branch: entry.branch.clone(),
2952 timestamp: entry.timestamp,
2953 })
2954}
2955
2956impl MergeDetails {
2957 async fn load(
2958 backend: &Arc<dyn GitRepository>,
2959 status: &SumTree<StatusEntry>,
2960 prev_snapshot: &RepositorySnapshot,
2961 ) -> Result<(MergeDetails, bool)> {
2962 log::debug!("load merge details");
2963 let message = backend.merge_message().await;
2964 let heads = backend
2965 .revparse_batch(vec![
2966 "MERGE_HEAD".into(),
2967 "CHERRY_PICK_HEAD".into(),
2968 "REBASE_HEAD".into(),
2969 "REVERT_HEAD".into(),
2970 "APPLY_HEAD".into(),
2971 ])
2972 .await
2973 .log_err()
2974 .unwrap_or_default()
2975 .into_iter()
2976 .map(|opt| opt.map(SharedString::from))
2977 .collect::<Vec<_>>();
2978 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2979 let conflicted_paths = if merge_heads_changed {
2980 let current_conflicted_paths = TreeSet::from_ordered_entries(
2981 status
2982 .iter()
2983 .filter(|entry| entry.status.is_conflicted())
2984 .map(|entry| entry.repo_path.clone()),
2985 );
2986
2987 // It can happen that we run a scan while a lengthy merge is in progress
2988 // that will eventually result in conflicts, but before those conflicts
2989 // are reported by `git status`. Since for the moment we only care about
2990 // the merge heads state for the purposes of tracking conflicts, don't update
2991 // this state until we see some conflicts.
2992 if heads.iter().any(Option::is_some)
2993 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2994 && current_conflicted_paths.is_empty()
2995 {
2996 log::debug!("not updating merge heads because no conflicts found");
2997 return Ok((
2998 MergeDetails {
2999 message: message.map(SharedString::from),
3000 ..prev_snapshot.merge.clone()
3001 },
3002 false,
3003 ));
3004 }
3005
3006 current_conflicted_paths
3007 } else {
3008 prev_snapshot.merge.conflicted_paths.clone()
3009 };
3010 let details = MergeDetails {
3011 conflicted_paths,
3012 message: message.map(SharedString::from),
3013 heads,
3014 };
3015 Ok((details, merge_heads_changed))
3016 }
3017}
3018
3019impl Repository {
3020 pub fn snapshot(&self) -> RepositorySnapshot {
3021 self.snapshot.clone()
3022 }
3023
3024 fn local(
3025 id: RepositoryId,
3026 work_directory_abs_path: Arc<Path>,
3027 dot_git_abs_path: Arc<Path>,
3028 repository_dir_abs_path: Arc<Path>,
3029 common_dir_abs_path: Arc<Path>,
3030 project_environment: WeakEntity<ProjectEnvironment>,
3031 fs: Arc<dyn Fs>,
3032 git_store: WeakEntity<GitStore>,
3033 cx: &mut Context<Self>,
3034 ) -> Self {
3035 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
3036 Repository {
3037 this: cx.weak_entity(),
3038 git_store,
3039 snapshot,
3040 commit_message_buffer: None,
3041 askpass_delegates: Default::default(),
3042 paths_needing_status_update: Default::default(),
3043 latest_askpass_id: 0,
3044 job_sender: Repository::spawn_local_git_worker(
3045 work_directory_abs_path,
3046 dot_git_abs_path,
3047 repository_dir_abs_path,
3048 common_dir_abs_path,
3049 project_environment,
3050 fs,
3051 cx,
3052 ),
3053 job_id: 0,
3054 active_jobs: Default::default(),
3055 }
3056 }
3057
3058 fn remote(
3059 id: RepositoryId,
3060 work_directory_abs_path: Arc<Path>,
3061 project_id: ProjectId,
3062 client: AnyProtoClient,
3063 git_store: WeakEntity<GitStore>,
3064 cx: &mut Context<Self>,
3065 ) -> Self {
3066 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
3067 Self {
3068 this: cx.weak_entity(),
3069 snapshot,
3070 commit_message_buffer: None,
3071 git_store,
3072 paths_needing_status_update: Default::default(),
3073 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3074 askpass_delegates: Default::default(),
3075 latest_askpass_id: 0,
3076 active_jobs: Default::default(),
3077 job_id: 0,
3078 }
3079 }
3080
3081 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3082 self.git_store.upgrade()
3083 }
3084
3085 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3086 let this = cx.weak_entity();
3087 let git_store = self.git_store.clone();
3088 let _ = self.send_keyed_job(
3089 Some(GitJobKey::ReloadBufferDiffBases),
3090 None,
3091 |state, mut cx| async move {
3092 let RepositoryState::Local { backend, .. } = state else {
3093 log::error!("tried to recompute diffs for a non-local repository");
3094 return Ok(());
3095 };
3096
3097 let Some(this) = this.upgrade() else {
3098 return Ok(());
3099 };
3100
3101 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3102 git_store.update(cx, |git_store, cx| {
3103 git_store
3104 .diffs
3105 .iter()
3106 .filter_map(|(buffer_id, diff_state)| {
3107 let buffer_store = git_store.buffer_store.read(cx);
3108 let buffer = buffer_store.get(*buffer_id)?;
3109 let file = File::from_dyn(buffer.read(cx).file())?;
3110 let abs_path =
3111 file.worktree.read(cx).absolutize(&file.path).ok()?;
3112 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3113 log::debug!(
3114 "start reload diff bases for repo path {}",
3115 repo_path.0.display()
3116 );
3117 diff_state.update(cx, |diff_state, _| {
3118 let has_unstaged_diff = diff_state
3119 .unstaged_diff
3120 .as_ref()
3121 .is_some_and(|diff| diff.is_upgradable());
3122 let has_uncommitted_diff = diff_state
3123 .uncommitted_diff
3124 .as_ref()
3125 .is_some_and(|set| set.is_upgradable());
3126
3127 Some((
3128 buffer,
3129 repo_path,
3130 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3131 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3132 ))
3133 })
3134 })
3135 .collect::<Vec<_>>()
3136 })
3137 })??;
3138
3139 let buffer_diff_base_changes = cx
3140 .background_spawn(async move {
3141 let mut changes = Vec::new();
3142 for (buffer, repo_path, current_index_text, current_head_text) in
3143 &repo_diff_state_updates
3144 {
3145 let index_text = if current_index_text.is_some() {
3146 backend.load_index_text(repo_path.clone()).await
3147 } else {
3148 None
3149 };
3150 let head_text = if current_head_text.is_some() {
3151 backend.load_committed_text(repo_path.clone()).await
3152 } else {
3153 None
3154 };
3155
3156 let change =
3157 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3158 (Some(current_index), Some(current_head)) => {
3159 let index_changed =
3160 index_text.as_ref() != current_index.as_deref();
3161 let head_changed =
3162 head_text.as_ref() != current_head.as_deref();
3163 if index_changed && head_changed {
3164 if index_text == head_text {
3165 Some(DiffBasesChange::SetBoth(head_text))
3166 } else {
3167 Some(DiffBasesChange::SetEach {
3168 index: index_text,
3169 head: head_text,
3170 })
3171 }
3172 } else if index_changed {
3173 Some(DiffBasesChange::SetIndex(index_text))
3174 } else if head_changed {
3175 Some(DiffBasesChange::SetHead(head_text))
3176 } else {
3177 None
3178 }
3179 }
3180 (Some(current_index), None) => {
3181 let index_changed =
3182 index_text.as_ref() != current_index.as_deref();
3183 index_changed
3184 .then_some(DiffBasesChange::SetIndex(index_text))
3185 }
3186 (None, Some(current_head)) => {
3187 let head_changed =
3188 head_text.as_ref() != current_head.as_deref();
3189 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3190 }
3191 (None, None) => None,
3192 };
3193
3194 changes.push((buffer.clone(), change))
3195 }
3196 changes
3197 })
3198 .await;
3199
3200 git_store.update(&mut cx, |git_store, cx| {
3201 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3202 let buffer_snapshot = buffer.read(cx).text_snapshot();
3203 let buffer_id = buffer_snapshot.remote_id();
3204 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3205 continue;
3206 };
3207
3208 let downstream_client = git_store.downstream_client();
3209 diff_state.update(cx, |diff_state, cx| {
3210 use proto::update_diff_bases::Mode;
3211
3212 if let Some((diff_bases_change, (client, project_id))) =
3213 diff_bases_change.clone().zip(downstream_client)
3214 {
3215 let (staged_text, committed_text, mode) = match diff_bases_change {
3216 DiffBasesChange::SetIndex(index) => {
3217 (index, None, Mode::IndexOnly)
3218 }
3219 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3220 DiffBasesChange::SetEach { index, head } => {
3221 (index, head, Mode::IndexAndHead)
3222 }
3223 DiffBasesChange::SetBoth(text) => {
3224 (None, text, Mode::IndexMatchesHead)
3225 }
3226 };
3227 client
3228 .send(proto::UpdateDiffBases {
3229 project_id: project_id.to_proto(),
3230 buffer_id: buffer_id.to_proto(),
3231 staged_text,
3232 committed_text,
3233 mode: mode as i32,
3234 })
3235 .log_err();
3236 }
3237
3238 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3239 });
3240 }
3241 })
3242 },
3243 );
3244 }
3245
3246 pub fn send_job<F, Fut, R>(
3247 &mut self,
3248 status: Option<SharedString>,
3249 job: F,
3250 ) -> oneshot::Receiver<R>
3251 where
3252 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3253 Fut: Future<Output = R> + 'static,
3254 R: Send + 'static,
3255 {
3256 self.send_keyed_job(None, status, job)
3257 }
3258
3259 fn send_keyed_job<F, Fut, R>(
3260 &mut self,
3261 key: Option<GitJobKey>,
3262 status: Option<SharedString>,
3263 job: F,
3264 ) -> oneshot::Receiver<R>
3265 where
3266 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3267 Fut: Future<Output = R> + 'static,
3268 R: Send + 'static,
3269 {
3270 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3271 let job_id = post_inc(&mut self.job_id);
3272 let this = self.this.clone();
3273 self.job_sender
3274 .unbounded_send(GitJob {
3275 key,
3276 job: Box::new(move |state, cx: &mut AsyncApp| {
3277 let job = job(state, cx.clone());
3278 cx.spawn(async move |cx| {
3279 if let Some(s) = status.clone() {
3280 this.update(cx, |this, cx| {
3281 this.active_jobs.insert(
3282 job_id,
3283 JobInfo {
3284 start: Instant::now(),
3285 message: s.clone(),
3286 },
3287 );
3288
3289 cx.notify();
3290 })
3291 .ok();
3292 }
3293 let result = job.await;
3294
3295 this.update(cx, |this, cx| {
3296 this.active_jobs.remove(&job_id);
3297 cx.notify();
3298 })
3299 .ok();
3300
3301 result_tx.send(result).ok();
3302 })
3303 }),
3304 })
3305 .ok();
3306 result_rx
3307 }
3308
3309 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3310 let Some(git_store) = self.git_store.upgrade() else {
3311 return;
3312 };
3313 let entity = cx.entity();
3314 git_store.update(cx, |git_store, cx| {
3315 let Some((&id, _)) = git_store
3316 .repositories
3317 .iter()
3318 .find(|(_, handle)| *handle == &entity)
3319 else {
3320 return;
3321 };
3322 git_store.active_repo_id = Some(id);
3323 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3324 });
3325 }
3326
3327 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3328 self.snapshot.status()
3329 }
3330
3331 pub fn cached_stash(&self) -> GitStash {
3332 self.snapshot.stash_entries.clone()
3333 }
3334
3335 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3336 let git_store = self.git_store.upgrade()?;
3337 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3338 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3339 let abs_path = SanitizedPath::new(&abs_path);
3340 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3341 Some(ProjectPath {
3342 worktree_id: worktree.read(cx).id(),
3343 path: relative_path.into(),
3344 })
3345 }
3346
3347 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3348 let git_store = self.git_store.upgrade()?;
3349 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3350 let abs_path = worktree_store.absolutize(path, cx)?;
3351 self.snapshot.abs_path_to_repo_path(&abs_path)
3352 }
3353
3354 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3355 other
3356 .read(cx)
3357 .snapshot
3358 .work_directory_abs_path
3359 .starts_with(&self.snapshot.work_directory_abs_path)
3360 }
3361
3362 pub fn open_commit_buffer(
3363 &mut self,
3364 languages: Option<Arc<LanguageRegistry>>,
3365 buffer_store: Entity<BufferStore>,
3366 cx: &mut Context<Self>,
3367 ) -> Task<Result<Entity<Buffer>>> {
3368 let id = self.id;
3369 if let Some(buffer) = self.commit_message_buffer.clone() {
3370 return Task::ready(Ok(buffer));
3371 }
3372 let this = cx.weak_entity();
3373
3374 let rx = self.send_job(None, move |state, mut cx| async move {
3375 let Some(this) = this.upgrade() else {
3376 bail!("git store was dropped");
3377 };
3378 match state {
3379 RepositoryState::Local { .. } => {
3380 this.update(&mut cx, |_, cx| {
3381 Self::open_local_commit_buffer(languages, buffer_store, cx)
3382 })?
3383 .await
3384 }
3385 RepositoryState::Remote { project_id, client } => {
3386 let request = client.request(proto::OpenCommitMessageBuffer {
3387 project_id: project_id.0,
3388 repository_id: id.to_proto(),
3389 });
3390 let response = request.await.context("requesting to open commit buffer")?;
3391 let buffer_id = BufferId::new(response.buffer_id)?;
3392 let buffer = buffer_store
3393 .update(&mut cx, |buffer_store, cx| {
3394 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3395 })?
3396 .await?;
3397 if let Some(language_registry) = languages {
3398 let git_commit_language =
3399 language_registry.language_for_name("Git Commit").await?;
3400 buffer.update(&mut cx, |buffer, cx| {
3401 buffer.set_language(Some(git_commit_language), cx);
3402 })?;
3403 }
3404 this.update(&mut cx, |this, _| {
3405 this.commit_message_buffer = Some(buffer.clone());
3406 })?;
3407 Ok(buffer)
3408 }
3409 }
3410 });
3411
3412 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3413 }
3414
3415 fn open_local_commit_buffer(
3416 language_registry: Option<Arc<LanguageRegistry>>,
3417 buffer_store: Entity<BufferStore>,
3418 cx: &mut Context<Self>,
3419 ) -> Task<Result<Entity<Buffer>>> {
3420 cx.spawn(async move |repository, cx| {
3421 let buffer = buffer_store
3422 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3423 .await?;
3424
3425 if let Some(language_registry) = language_registry {
3426 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3427 buffer.update(cx, |buffer, cx| {
3428 buffer.set_language(Some(git_commit_language), cx);
3429 })?;
3430 }
3431
3432 repository.update(cx, |repository, _| {
3433 repository.commit_message_buffer = Some(buffer.clone());
3434 })?;
3435 Ok(buffer)
3436 })
3437 }
3438
3439 pub fn checkout_files(
3440 &mut self,
3441 commit: &str,
3442 paths: Vec<RepoPath>,
3443 _cx: &mut App,
3444 ) -> oneshot::Receiver<Result<()>> {
3445 let commit = commit.to_string();
3446 let id = self.id;
3447
3448 self.send_job(
3449 Some(format!("git checkout {}", commit).into()),
3450 move |git_repo, _| async move {
3451 match git_repo {
3452 RepositoryState::Local {
3453 backend,
3454 environment,
3455 ..
3456 } => {
3457 backend
3458 .checkout_files(commit, paths, environment.clone())
3459 .await
3460 }
3461 RepositoryState::Remote { project_id, client } => {
3462 client
3463 .request(proto::GitCheckoutFiles {
3464 project_id: project_id.0,
3465 repository_id: id.to_proto(),
3466 commit,
3467 paths: paths
3468 .into_iter()
3469 .map(|p| p.to_string_lossy().to_string())
3470 .collect(),
3471 })
3472 .await?;
3473
3474 Ok(())
3475 }
3476 }
3477 },
3478 )
3479 }
3480
3481 pub fn reset(
3482 &mut self,
3483 commit: String,
3484 reset_mode: ResetMode,
3485 _cx: &mut App,
3486 ) -> oneshot::Receiver<Result<()>> {
3487 let id = self.id;
3488
3489 self.send_job(None, move |git_repo, _| async move {
3490 match git_repo {
3491 RepositoryState::Local {
3492 backend,
3493 environment,
3494 ..
3495 } => backend.reset(commit, reset_mode, environment).await,
3496 RepositoryState::Remote { project_id, client } => {
3497 client
3498 .request(proto::GitReset {
3499 project_id: project_id.0,
3500 repository_id: id.to_proto(),
3501 commit,
3502 mode: match reset_mode {
3503 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3504 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3505 },
3506 })
3507 .await?;
3508
3509 Ok(())
3510 }
3511 }
3512 })
3513 }
3514
3515 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3516 let id = self.id;
3517 self.send_job(None, move |git_repo, _cx| async move {
3518 match git_repo {
3519 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3520 RepositoryState::Remote { project_id, client } => {
3521 let resp = client
3522 .request(proto::GitShow {
3523 project_id: project_id.0,
3524 repository_id: id.to_proto(),
3525 commit,
3526 })
3527 .await?;
3528
3529 Ok(CommitDetails {
3530 sha: resp.sha.into(),
3531 message: resp.message.into(),
3532 commit_timestamp: resp.commit_timestamp,
3533 author_email: resp.author_email.into(),
3534 author_name: resp.author_name.into(),
3535 })
3536 }
3537 }
3538 })
3539 }
3540
3541 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3542 let id = self.id;
3543 self.send_job(None, move |git_repo, cx| async move {
3544 match git_repo {
3545 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3546 RepositoryState::Remote {
3547 client, project_id, ..
3548 } => {
3549 let response = client
3550 .request(proto::LoadCommitDiff {
3551 project_id: project_id.0,
3552 repository_id: id.to_proto(),
3553 commit,
3554 })
3555 .await?;
3556 Ok(CommitDiff {
3557 files: response
3558 .files
3559 .into_iter()
3560 .map(|file| CommitFile {
3561 path: Path::new(&file.path).into(),
3562 old_text: file.old_text,
3563 new_text: file.new_text,
3564 })
3565 .collect(),
3566 })
3567 }
3568 }
3569 })
3570 }
3571
3572 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3573 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3574 }
3575
3576 pub fn stage_entries(
3577 &self,
3578 entries: Vec<RepoPath>,
3579 cx: &mut Context<Self>,
3580 ) -> Task<anyhow::Result<()>> {
3581 if entries.is_empty() {
3582 return Task::ready(Ok(()));
3583 }
3584 let id = self.id;
3585
3586 let mut save_futures = Vec::new();
3587 if let Some(buffer_store) = self.buffer_store(cx) {
3588 buffer_store.update(cx, |buffer_store, cx| {
3589 for path in &entries {
3590 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3591 continue;
3592 };
3593 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3594 && buffer
3595 .read(cx)
3596 .file()
3597 .is_some_and(|file| file.disk_state().exists())
3598 {
3599 save_futures.push(buffer_store.save_buffer(buffer, cx));
3600 }
3601 }
3602 })
3603 }
3604
3605 cx.spawn(async move |this, cx| {
3606 for save_future in save_futures {
3607 save_future.await?;
3608 }
3609
3610 this.update(cx, |this, _| {
3611 this.send_job(None, move |git_repo, _cx| async move {
3612 match git_repo {
3613 RepositoryState::Local {
3614 backend,
3615 environment,
3616 ..
3617 } => backend.stage_paths(entries, environment.clone()).await,
3618 RepositoryState::Remote { project_id, client } => {
3619 client
3620 .request(proto::Stage {
3621 project_id: project_id.0,
3622 repository_id: id.to_proto(),
3623 paths: entries
3624 .into_iter()
3625 .map(|repo_path| repo_path.as_ref().to_proto())
3626 .collect(),
3627 })
3628 .await
3629 .context("sending stage request")?;
3630
3631 Ok(())
3632 }
3633 }
3634 })
3635 })?
3636 .await??;
3637
3638 Ok(())
3639 })
3640 }
3641
3642 pub fn unstage_entries(
3643 &self,
3644 entries: Vec<RepoPath>,
3645 cx: &mut Context<Self>,
3646 ) -> Task<anyhow::Result<()>> {
3647 if entries.is_empty() {
3648 return Task::ready(Ok(()));
3649 }
3650 let id = self.id;
3651
3652 let mut save_futures = Vec::new();
3653 if let Some(buffer_store) = self.buffer_store(cx) {
3654 buffer_store.update(cx, |buffer_store, cx| {
3655 for path in &entries {
3656 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3657 continue;
3658 };
3659 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3660 && buffer
3661 .read(cx)
3662 .file()
3663 .is_some_and(|file| file.disk_state().exists())
3664 {
3665 save_futures.push(buffer_store.save_buffer(buffer, cx));
3666 }
3667 }
3668 })
3669 }
3670
3671 cx.spawn(async move |this, cx| {
3672 for save_future in save_futures {
3673 save_future.await?;
3674 }
3675
3676 this.update(cx, |this, _| {
3677 this.send_job(None, move |git_repo, _cx| async move {
3678 match git_repo {
3679 RepositoryState::Local {
3680 backend,
3681 environment,
3682 ..
3683 } => backend.unstage_paths(entries, environment).await,
3684 RepositoryState::Remote { project_id, client } => {
3685 client
3686 .request(proto::Unstage {
3687 project_id: project_id.0,
3688 repository_id: id.to_proto(),
3689 paths: entries
3690 .into_iter()
3691 .map(|repo_path| repo_path.as_ref().to_proto())
3692 .collect(),
3693 })
3694 .await
3695 .context("sending unstage request")?;
3696
3697 Ok(())
3698 }
3699 }
3700 })
3701 })?
3702 .await??;
3703
3704 Ok(())
3705 })
3706 }
3707
3708 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3709 let to_stage = self
3710 .cached_status()
3711 .filter(|entry| !entry.status.staging().is_fully_staged())
3712 .map(|entry| entry.repo_path)
3713 .collect();
3714 self.stage_entries(to_stage, cx)
3715 }
3716
3717 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3718 let to_unstage = self
3719 .cached_status()
3720 .filter(|entry| entry.status.staging().has_staged())
3721 .map(|entry| entry.repo_path)
3722 .collect();
3723 self.unstage_entries(to_unstage, cx)
3724 }
3725
3726 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3727 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3728
3729 self.stash_entries(to_stash, cx)
3730 }
3731
3732 pub fn stash_entries(
3733 &mut self,
3734 entries: Vec<RepoPath>,
3735 cx: &mut Context<Self>,
3736 ) -> Task<anyhow::Result<()>> {
3737 let id = self.id;
3738
3739 cx.spawn(async move |this, cx| {
3740 this.update(cx, |this, _| {
3741 this.send_job(None, move |git_repo, _cx| async move {
3742 match git_repo {
3743 RepositoryState::Local {
3744 backend,
3745 environment,
3746 ..
3747 } => backend.stash_paths(entries, environment).await,
3748 RepositoryState::Remote { project_id, client } => {
3749 client
3750 .request(proto::Stash {
3751 project_id: project_id.0,
3752 repository_id: id.to_proto(),
3753 paths: entries
3754 .into_iter()
3755 .map(|repo_path| repo_path.as_ref().to_proto())
3756 .collect(),
3757 })
3758 .await
3759 .context("sending stash request")?;
3760 Ok(())
3761 }
3762 }
3763 })
3764 })?
3765 .await??;
3766 Ok(())
3767 })
3768 }
3769
3770 pub fn stash_pop(
3771 &mut self,
3772 index: Option<usize>,
3773 cx: &mut Context<Self>,
3774 ) -> Task<anyhow::Result<()>> {
3775 let id = self.id;
3776 cx.spawn(async move |this, cx| {
3777 this.update(cx, |this, _| {
3778 this.send_job(None, move |git_repo, _cx| async move {
3779 match git_repo {
3780 RepositoryState::Local {
3781 backend,
3782 environment,
3783 ..
3784 } => backend.stash_pop(index, environment).await,
3785 RepositoryState::Remote { project_id, client } => {
3786 client
3787 .request(proto::StashPop {
3788 project_id: project_id.0,
3789 repository_id: id.to_proto(),
3790 stash_index: index.map(|i| i as u64),
3791 })
3792 .await
3793 .context("sending stash pop request")?;
3794 Ok(())
3795 }
3796 }
3797 })
3798 })?
3799 .await??;
3800 Ok(())
3801 })
3802 }
3803
3804 pub fn stash_apply(
3805 &mut self,
3806 index: Option<usize>,
3807 cx: &mut Context<Self>,
3808 ) -> Task<anyhow::Result<()>> {
3809 let id = self.id;
3810 cx.spawn(async move |this, cx| {
3811 this.update(cx, |this, _| {
3812 this.send_job(None, move |git_repo, _cx| async move {
3813 match git_repo {
3814 RepositoryState::Local {
3815 backend,
3816 environment,
3817 ..
3818 } => backend.stash_apply(index, environment).await,
3819 RepositoryState::Remote { project_id, client } => {
3820 client
3821 .request(proto::StashApply {
3822 project_id: project_id.0,
3823 repository_id: id.to_proto(),
3824 stash_index: index.map(|i| i as u64),
3825 })
3826 .await
3827 .context("sending stash apply request")?;
3828 Ok(())
3829 }
3830 }
3831 })
3832 })?
3833 .await??;
3834 Ok(())
3835 })
3836 }
3837
3838 pub fn stash_drop(
3839 &mut self,
3840 index: Option<usize>,
3841 cx: &mut Context<Self>,
3842 ) -> oneshot::Receiver<anyhow::Result<()>> {
3843 let id = self.id;
3844 let updates_tx = self
3845 .git_store()
3846 .and_then(|git_store| match &git_store.read(cx).state {
3847 GitStoreState::Local { downstream, .. } => downstream
3848 .as_ref()
3849 .map(|downstream| downstream.updates_tx.clone()),
3850 _ => None,
3851 });
3852 let this = cx.weak_entity();
3853 self.send_job(None, move |git_repo, mut cx| async move {
3854 match git_repo {
3855 RepositoryState::Local {
3856 backend,
3857 environment,
3858 ..
3859 } => {
3860 let result = backend.stash_drop(index, environment).await;
3861 if result.is_ok()
3862 && let Ok(stash_entries) = backend.stash_entries().await
3863 {
3864 let snapshot = this.update(&mut cx, |this, cx| {
3865 this.snapshot.stash_entries = stash_entries;
3866 let snapshot = this.snapshot.clone();
3867 cx.emit(RepositoryEvent::Updated {
3868 full_scan: false,
3869 new_instance: false,
3870 });
3871 snapshot
3872 })?;
3873 if let Some(updates_tx) = updates_tx {
3874 updates_tx
3875 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3876 .ok();
3877 }
3878 }
3879
3880 result
3881 }
3882 RepositoryState::Remote { project_id, client } => {
3883 client
3884 .request(proto::StashDrop {
3885 project_id: project_id.0,
3886 repository_id: id.to_proto(),
3887 stash_index: index.map(|i| i as u64),
3888 })
3889 .await
3890 .context("sending stash pop request")?;
3891 Ok(())
3892 }
3893 }
3894 })
3895 }
3896
3897 pub fn commit(
3898 &mut self,
3899 message: SharedString,
3900 name_and_email: Option<(SharedString, SharedString)>,
3901 options: CommitOptions,
3902 _cx: &mut App,
3903 ) -> oneshot::Receiver<Result<()>> {
3904 let id = self.id;
3905
3906 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3907 match git_repo {
3908 RepositoryState::Local {
3909 backend,
3910 environment,
3911 ..
3912 } => {
3913 backend
3914 .commit(message, name_and_email, options, environment)
3915 .await
3916 }
3917 RepositoryState::Remote { project_id, client } => {
3918 let (name, email) = name_and_email.unzip();
3919 client
3920 .request(proto::Commit {
3921 project_id: project_id.0,
3922 repository_id: id.to_proto(),
3923 message: String::from(message),
3924 name: name.map(String::from),
3925 email: email.map(String::from),
3926 options: Some(proto::commit::CommitOptions {
3927 amend: options.amend,
3928 signoff: options.signoff,
3929 }),
3930 })
3931 .await
3932 .context("sending commit request")?;
3933
3934 Ok(())
3935 }
3936 }
3937 })
3938 }
3939
3940 pub fn fetch(
3941 &mut self,
3942 fetch_options: FetchOptions,
3943 askpass: AskPassDelegate,
3944 _cx: &mut App,
3945 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3946 let askpass_delegates = self.askpass_delegates.clone();
3947 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3948 let id = self.id;
3949
3950 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3951 match git_repo {
3952 RepositoryState::Local {
3953 backend,
3954 environment,
3955 ..
3956 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3957 RepositoryState::Remote { project_id, client } => {
3958 askpass_delegates.lock().insert(askpass_id, askpass);
3959 let _defer = util::defer(|| {
3960 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3961 debug_assert!(askpass_delegate.is_some());
3962 });
3963
3964 let response = client
3965 .request(proto::Fetch {
3966 project_id: project_id.0,
3967 repository_id: id.to_proto(),
3968 askpass_id,
3969 remote: fetch_options.to_proto(),
3970 })
3971 .await
3972 .context("sending fetch request")?;
3973
3974 Ok(RemoteCommandOutput {
3975 stdout: response.stdout,
3976 stderr: response.stderr,
3977 })
3978 }
3979 }
3980 })
3981 }
3982
3983 pub fn push(
3984 &mut self,
3985 branch: SharedString,
3986 remote: SharedString,
3987 options: Option<PushOptions>,
3988 askpass: AskPassDelegate,
3989 cx: &mut Context<Self>,
3990 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3991 let askpass_delegates = self.askpass_delegates.clone();
3992 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3993 let id = self.id;
3994
3995 let args = options
3996 .map(|option| match option {
3997 PushOptions::SetUpstream => " --set-upstream",
3998 PushOptions::Force => " --force-with-lease",
3999 })
4000 .unwrap_or("");
4001
4002 let updates_tx = self
4003 .git_store()
4004 .and_then(|git_store| match &git_store.read(cx).state {
4005 GitStoreState::Local { downstream, .. } => downstream
4006 .as_ref()
4007 .map(|downstream| downstream.updates_tx.clone()),
4008 _ => None,
4009 });
4010
4011 let this = cx.weak_entity();
4012 self.send_job(
4013 Some(format!("git push {} {} {}", args, branch, remote).into()),
4014 move |git_repo, mut cx| async move {
4015 match git_repo {
4016 RepositoryState::Local {
4017 backend,
4018 environment,
4019 ..
4020 } => {
4021 let result = backend
4022 .push(
4023 branch.to_string(),
4024 remote.to_string(),
4025 options,
4026 askpass,
4027 environment.clone(),
4028 cx.clone(),
4029 )
4030 .await;
4031 if result.is_ok() {
4032 let branches = backend.branches().await?;
4033 let branch = branches.into_iter().find(|branch| branch.is_head);
4034 log::info!("head branch after scan is {branch:?}");
4035 let snapshot = this.update(&mut cx, |this, cx| {
4036 this.snapshot.branch = branch;
4037 let snapshot = this.snapshot.clone();
4038 cx.emit(RepositoryEvent::Updated {
4039 full_scan: false,
4040 new_instance: false,
4041 });
4042 snapshot
4043 })?;
4044 if let Some(updates_tx) = updates_tx {
4045 updates_tx
4046 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4047 .ok();
4048 }
4049 }
4050 result
4051 }
4052 RepositoryState::Remote { project_id, client } => {
4053 askpass_delegates.lock().insert(askpass_id, askpass);
4054 let _defer = util::defer(|| {
4055 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4056 debug_assert!(askpass_delegate.is_some());
4057 });
4058 let response = client
4059 .request(proto::Push {
4060 project_id: project_id.0,
4061 repository_id: id.to_proto(),
4062 askpass_id,
4063 branch_name: branch.to_string(),
4064 remote_name: remote.to_string(),
4065 options: options.map(|options| match options {
4066 PushOptions::Force => proto::push::PushOptions::Force,
4067 PushOptions::SetUpstream => {
4068 proto::push::PushOptions::SetUpstream
4069 }
4070 }
4071 as i32),
4072 })
4073 .await
4074 .context("sending push request")?;
4075
4076 Ok(RemoteCommandOutput {
4077 stdout: response.stdout,
4078 stderr: response.stderr,
4079 })
4080 }
4081 }
4082 },
4083 )
4084 }
4085
4086 pub fn pull(
4087 &mut self,
4088 branch: SharedString,
4089 remote: SharedString,
4090 askpass: AskPassDelegate,
4091 _cx: &mut App,
4092 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4093 let askpass_delegates = self.askpass_delegates.clone();
4094 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4095 let id = self.id;
4096
4097 self.send_job(
4098 Some(format!("git pull {} {}", remote, branch).into()),
4099 move |git_repo, cx| async move {
4100 match git_repo {
4101 RepositoryState::Local {
4102 backend,
4103 environment,
4104 ..
4105 } => {
4106 backend
4107 .pull(
4108 branch.to_string(),
4109 remote.to_string(),
4110 askpass,
4111 environment.clone(),
4112 cx,
4113 )
4114 .await
4115 }
4116 RepositoryState::Remote { project_id, client } => {
4117 askpass_delegates.lock().insert(askpass_id, askpass);
4118 let _defer = util::defer(|| {
4119 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4120 debug_assert!(askpass_delegate.is_some());
4121 });
4122 let response = client
4123 .request(proto::Pull {
4124 project_id: project_id.0,
4125 repository_id: id.to_proto(),
4126 askpass_id,
4127 branch_name: branch.to_string(),
4128 remote_name: remote.to_string(),
4129 })
4130 .await
4131 .context("sending pull request")?;
4132
4133 Ok(RemoteCommandOutput {
4134 stdout: response.stdout,
4135 stderr: response.stderr,
4136 })
4137 }
4138 }
4139 },
4140 )
4141 }
4142
4143 fn spawn_set_index_text_job(
4144 &mut self,
4145 path: RepoPath,
4146 content: Option<String>,
4147 hunk_staging_operation_count: Option<usize>,
4148 cx: &mut Context<Self>,
4149 ) -> oneshot::Receiver<anyhow::Result<()>> {
4150 let id = self.id;
4151 let this = cx.weak_entity();
4152 let git_store = self.git_store.clone();
4153 self.send_keyed_job(
4154 Some(GitJobKey::WriteIndex(path.clone())),
4155 None,
4156 move |git_repo, mut cx| async move {
4157 log::debug!("start updating index text for buffer {}", path.display());
4158 match git_repo {
4159 RepositoryState::Local {
4160 backend,
4161 environment,
4162 ..
4163 } => {
4164 backend
4165 .set_index_text(path.clone(), content, environment.clone())
4166 .await?;
4167 }
4168 RepositoryState::Remote { project_id, client } => {
4169 client
4170 .request(proto::SetIndexText {
4171 project_id: project_id.0,
4172 repository_id: id.to_proto(),
4173 path: path.as_ref().to_proto(),
4174 text: content,
4175 })
4176 .await?;
4177 }
4178 }
4179 log::debug!("finish updating index text for buffer {}", path.display());
4180
4181 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4182 let project_path = this
4183 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4184 .ok()
4185 .flatten();
4186 git_store.update(&mut cx, |git_store, cx| {
4187 let buffer_id = git_store
4188 .buffer_store
4189 .read(cx)
4190 .get_by_path(&project_path?)?
4191 .read(cx)
4192 .remote_id();
4193 let diff_state = git_store.diffs.get(&buffer_id)?;
4194 diff_state.update(cx, |diff_state, _| {
4195 diff_state.hunk_staging_operation_count_as_of_write =
4196 hunk_staging_operation_count;
4197 });
4198 Some(())
4199 })?;
4200 }
4201 Ok(())
4202 },
4203 )
4204 }
4205
4206 pub fn get_remotes(
4207 &mut self,
4208 branch_name: Option<String>,
4209 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4210 let id = self.id;
4211 self.send_job(None, move |repo, _cx| async move {
4212 match repo {
4213 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4214 RepositoryState::Remote { project_id, client } => {
4215 let response = client
4216 .request(proto::GetRemotes {
4217 project_id: project_id.0,
4218 repository_id: id.to_proto(),
4219 branch_name,
4220 })
4221 .await?;
4222
4223 let remotes = response
4224 .remotes
4225 .into_iter()
4226 .map(|remotes| git::repository::Remote {
4227 name: remotes.name.into(),
4228 })
4229 .collect();
4230
4231 Ok(remotes)
4232 }
4233 }
4234 })
4235 }
4236
4237 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4238 let id = self.id;
4239 self.send_job(None, move |repo, _| async move {
4240 match repo {
4241 RepositoryState::Local { backend, .. } => backend.branches().await,
4242 RepositoryState::Remote { project_id, client } => {
4243 let response = client
4244 .request(proto::GitGetBranches {
4245 project_id: project_id.0,
4246 repository_id: id.to_proto(),
4247 })
4248 .await?;
4249
4250 let branches = response
4251 .branches
4252 .into_iter()
4253 .map(|branch| proto_to_branch(&branch))
4254 .collect();
4255
4256 Ok(branches)
4257 }
4258 }
4259 })
4260 }
4261
4262 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4263 let id = self.id;
4264 self.send_job(None, move |repo, _| async move {
4265 match repo {
4266 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4267 RepositoryState::Remote { project_id, client } => {
4268 let response = client
4269 .request(proto::GetDefaultBranch {
4270 project_id: project_id.0,
4271 repository_id: id.to_proto(),
4272 })
4273 .await?;
4274
4275 anyhow::Ok(response.branch.map(SharedString::from))
4276 }
4277 }
4278 })
4279 }
4280
4281 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4282 let id = self.id;
4283 self.send_job(None, move |repo, _cx| async move {
4284 match repo {
4285 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4286 RepositoryState::Remote { project_id, client } => {
4287 let response = client
4288 .request(proto::GitDiff {
4289 project_id: project_id.0,
4290 repository_id: id.to_proto(),
4291 diff_type: match diff_type {
4292 DiffType::HeadToIndex => {
4293 proto::git_diff::DiffType::HeadToIndex.into()
4294 }
4295 DiffType::HeadToWorktree => {
4296 proto::git_diff::DiffType::HeadToWorktree.into()
4297 }
4298 },
4299 })
4300 .await?;
4301
4302 Ok(response.diff)
4303 }
4304 }
4305 })
4306 }
4307
4308 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4309 let id = self.id;
4310 self.send_job(
4311 Some(format!("git switch -c {branch_name}").into()),
4312 move |repo, _cx| async move {
4313 match repo {
4314 RepositoryState::Local { backend, .. } => {
4315 backend.create_branch(branch_name).await
4316 }
4317 RepositoryState::Remote { project_id, client } => {
4318 client
4319 .request(proto::GitCreateBranch {
4320 project_id: project_id.0,
4321 repository_id: id.to_proto(),
4322 branch_name,
4323 })
4324 .await?;
4325
4326 Ok(())
4327 }
4328 }
4329 },
4330 )
4331 }
4332
4333 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4334 let id = self.id;
4335 self.send_job(
4336 Some(format!("git switch {branch_name}").into()),
4337 move |repo, _cx| async move {
4338 match repo {
4339 RepositoryState::Local { backend, .. } => {
4340 backend.change_branch(branch_name).await
4341 }
4342 RepositoryState::Remote { project_id, client } => {
4343 client
4344 .request(proto::GitChangeBranch {
4345 project_id: project_id.0,
4346 repository_id: id.to_proto(),
4347 branch_name,
4348 })
4349 .await?;
4350
4351 Ok(())
4352 }
4353 }
4354 },
4355 )
4356 }
4357
4358 pub fn rename_branch(
4359 &mut self,
4360 branch: String,
4361 new_name: String,
4362 ) -> oneshot::Receiver<Result<()>> {
4363 let id = self.id;
4364 self.send_job(
4365 Some(format!("git branch -m {branch} {new_name}").into()),
4366 move |repo, _cx| async move {
4367 match repo {
4368 RepositoryState::Local { backend, .. } => {
4369 backend.rename_branch(branch, new_name).await
4370 }
4371 RepositoryState::Remote { project_id, client } => {
4372 client
4373 .request(proto::GitRenameBranch {
4374 project_id: project_id.0,
4375 repository_id: id.to_proto(),
4376 branch,
4377 new_name,
4378 })
4379 .await?;
4380
4381 Ok(())
4382 }
4383 }
4384 },
4385 )
4386 }
4387
4388 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4389 let id = self.id;
4390 self.send_job(None, move |repo, _cx| async move {
4391 match repo {
4392 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4393 RepositoryState::Remote { project_id, client } => {
4394 let response = client
4395 .request(proto::CheckForPushedCommits {
4396 project_id: project_id.0,
4397 repository_id: id.to_proto(),
4398 })
4399 .await?;
4400
4401 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4402
4403 Ok(branches)
4404 }
4405 }
4406 })
4407 }
4408
4409 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4410 self.send_job(None, |repo, _cx| async move {
4411 match repo {
4412 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4413 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4414 }
4415 })
4416 }
4417
4418 pub fn restore_checkpoint(
4419 &mut self,
4420 checkpoint: GitRepositoryCheckpoint,
4421 ) -> oneshot::Receiver<Result<()>> {
4422 self.send_job(None, move |repo, _cx| async move {
4423 match repo {
4424 RepositoryState::Local { backend, .. } => {
4425 backend.restore_checkpoint(checkpoint).await
4426 }
4427 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4428 }
4429 })
4430 }
4431
4432 pub(crate) fn apply_remote_update(
4433 &mut self,
4434 update: proto::UpdateRepository,
4435 is_new: bool,
4436 cx: &mut Context<Self>,
4437 ) -> Result<()> {
4438 let conflicted_paths = TreeSet::from_ordered_entries(
4439 update
4440 .current_merge_conflicts
4441 .into_iter()
4442 .map(|path| RepoPath(Path::new(&path).into())),
4443 );
4444 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4445 self.snapshot.head_commit = update
4446 .head_commit_details
4447 .as_ref()
4448 .map(proto_to_commit_details);
4449
4450 self.snapshot.merge.conflicted_paths = conflicted_paths;
4451 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4452 self.snapshot.stash_entries = GitStash {
4453 entries: update
4454 .stash_entries
4455 .iter()
4456 .filter_map(|entry| proto_to_stash(entry).ok())
4457 .collect(),
4458 };
4459
4460 let edits = update
4461 .removed_statuses
4462 .into_iter()
4463 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4464 .chain(
4465 update
4466 .updated_statuses
4467 .into_iter()
4468 .filter_map(|updated_status| {
4469 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4470 }),
4471 )
4472 .collect::<Vec<_>>();
4473 self.snapshot.statuses_by_path.edit(edits, &());
4474 if update.is_last_update {
4475 self.snapshot.scan_id = update.scan_id;
4476 }
4477 cx.emit(RepositoryEvent::Updated {
4478 full_scan: true,
4479 new_instance: is_new,
4480 });
4481 Ok(())
4482 }
4483
4484 pub fn compare_checkpoints(
4485 &mut self,
4486 left: GitRepositoryCheckpoint,
4487 right: GitRepositoryCheckpoint,
4488 ) -> oneshot::Receiver<Result<bool>> {
4489 self.send_job(None, move |repo, _cx| async move {
4490 match repo {
4491 RepositoryState::Local { backend, .. } => {
4492 backend.compare_checkpoints(left, right).await
4493 }
4494 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4495 }
4496 })
4497 }
4498
4499 pub fn diff_checkpoints(
4500 &mut self,
4501 base_checkpoint: GitRepositoryCheckpoint,
4502 target_checkpoint: GitRepositoryCheckpoint,
4503 ) -> oneshot::Receiver<Result<String>> {
4504 self.send_job(None, move |repo, _cx| async move {
4505 match repo {
4506 RepositoryState::Local { backend, .. } => {
4507 backend
4508 .diff_checkpoints(base_checkpoint, target_checkpoint)
4509 .await
4510 }
4511 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4512 }
4513 })
4514 }
4515
4516 fn schedule_scan(
4517 &mut self,
4518 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4519 cx: &mut Context<Self>,
4520 ) {
4521 let this = cx.weak_entity();
4522 let _ = self.send_keyed_job(
4523 Some(GitJobKey::ReloadGitState),
4524 None,
4525 |state, mut cx| async move {
4526 log::debug!("run scheduled git status scan");
4527
4528 let Some(this) = this.upgrade() else {
4529 return Ok(());
4530 };
4531 let RepositoryState::Local { backend, .. } = state else {
4532 bail!("not a local repository")
4533 };
4534 let (snapshot, events) = this
4535 .update(&mut cx, |this, _| {
4536 this.paths_needing_status_update.clear();
4537 compute_snapshot(
4538 this.id,
4539 this.work_directory_abs_path.clone(),
4540 this.snapshot.clone(),
4541 backend.clone(),
4542 )
4543 })?
4544 .await?;
4545 this.update(&mut cx, |this, cx| {
4546 this.snapshot = snapshot.clone();
4547 for event in events {
4548 cx.emit(event);
4549 }
4550 })?;
4551 if let Some(updates_tx) = updates_tx {
4552 updates_tx
4553 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4554 .ok();
4555 }
4556 Ok(())
4557 },
4558 );
4559 }
4560
4561 fn spawn_local_git_worker(
4562 work_directory_abs_path: Arc<Path>,
4563 dot_git_abs_path: Arc<Path>,
4564 _repository_dir_abs_path: Arc<Path>,
4565 _common_dir_abs_path: Arc<Path>,
4566 project_environment: WeakEntity<ProjectEnvironment>,
4567 fs: Arc<dyn Fs>,
4568 cx: &mut Context<Self>,
4569 ) -> mpsc::UnboundedSender<GitJob> {
4570 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4571
4572 cx.spawn(async move |_, cx| {
4573 let environment = project_environment
4574 .upgrade()
4575 .context("missing project environment")?
4576 .update(cx, |project_environment, cx| {
4577 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4578 })?
4579 .await
4580 .unwrap_or_else(|| {
4581 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4582 HashMap::default()
4583 });
4584 let backend = cx
4585 .background_spawn(async move {
4586 fs.open_repo(&dot_git_abs_path)
4587 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4588 })
4589 .await?;
4590
4591 if let Some(git_hosting_provider_registry) =
4592 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4593 {
4594 git_hosting_providers::register_additional_providers(
4595 git_hosting_provider_registry,
4596 backend.clone(),
4597 );
4598 }
4599
4600 let state = RepositoryState::Local {
4601 backend,
4602 environment: Arc::new(environment),
4603 };
4604 let mut jobs = VecDeque::new();
4605 loop {
4606 while let Ok(Some(next_job)) = job_rx.try_next() {
4607 jobs.push_back(next_job);
4608 }
4609
4610 if let Some(job) = jobs.pop_front() {
4611 if let Some(current_key) = &job.key
4612 && jobs
4613 .iter()
4614 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4615 {
4616 continue;
4617 }
4618 (job.job)(state.clone(), cx).await;
4619 } else if let Some(job) = job_rx.next().await {
4620 jobs.push_back(job);
4621 } else {
4622 break;
4623 }
4624 }
4625 anyhow::Ok(())
4626 })
4627 .detach_and_log_err(cx);
4628
4629 job_tx
4630 }
4631
4632 fn spawn_remote_git_worker(
4633 project_id: ProjectId,
4634 client: AnyProtoClient,
4635 cx: &mut Context<Self>,
4636 ) -> mpsc::UnboundedSender<GitJob> {
4637 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4638
4639 cx.spawn(async move |_, cx| {
4640 let state = RepositoryState::Remote { project_id, client };
4641 let mut jobs = VecDeque::new();
4642 loop {
4643 while let Ok(Some(next_job)) = job_rx.try_next() {
4644 jobs.push_back(next_job);
4645 }
4646
4647 if let Some(job) = jobs.pop_front() {
4648 if let Some(current_key) = &job.key
4649 && jobs
4650 .iter()
4651 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4652 {
4653 continue;
4654 }
4655 (job.job)(state.clone(), cx).await;
4656 } else if let Some(job) = job_rx.next().await {
4657 jobs.push_back(job);
4658 } else {
4659 break;
4660 }
4661 }
4662 anyhow::Ok(())
4663 })
4664 .detach_and_log_err(cx);
4665
4666 job_tx
4667 }
4668
4669 fn load_staged_text(
4670 &mut self,
4671 buffer_id: BufferId,
4672 repo_path: RepoPath,
4673 cx: &App,
4674 ) -> Task<Result<Option<String>>> {
4675 let rx = self.send_job(None, move |state, _| async move {
4676 match state {
4677 RepositoryState::Local { backend, .. } => {
4678 anyhow::Ok(backend.load_index_text(repo_path).await)
4679 }
4680 RepositoryState::Remote { project_id, client } => {
4681 let response = client
4682 .request(proto::OpenUnstagedDiff {
4683 project_id: project_id.to_proto(),
4684 buffer_id: buffer_id.to_proto(),
4685 })
4686 .await?;
4687 Ok(response.staged_text)
4688 }
4689 }
4690 });
4691 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4692 }
4693
4694 fn load_committed_text(
4695 &mut self,
4696 buffer_id: BufferId,
4697 repo_path: RepoPath,
4698 cx: &App,
4699 ) -> Task<Result<DiffBasesChange>> {
4700 let rx = self.send_job(None, move |state, _| async move {
4701 match state {
4702 RepositoryState::Local { backend, .. } => {
4703 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4704 let staged_text = backend.load_index_text(repo_path).await;
4705 let diff_bases_change = if committed_text == staged_text {
4706 DiffBasesChange::SetBoth(committed_text)
4707 } else {
4708 DiffBasesChange::SetEach {
4709 index: staged_text,
4710 head: committed_text,
4711 }
4712 };
4713 anyhow::Ok(diff_bases_change)
4714 }
4715 RepositoryState::Remote { project_id, client } => {
4716 use proto::open_uncommitted_diff_response::Mode;
4717
4718 let response = client
4719 .request(proto::OpenUncommittedDiff {
4720 project_id: project_id.to_proto(),
4721 buffer_id: buffer_id.to_proto(),
4722 })
4723 .await?;
4724 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4725 let bases = match mode {
4726 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4727 Mode::IndexAndHead => DiffBasesChange::SetEach {
4728 head: response.committed_text,
4729 index: response.staged_text,
4730 },
4731 };
4732 Ok(bases)
4733 }
4734 }
4735 });
4736
4737 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4738 }
4739
4740 fn paths_changed(
4741 &mut self,
4742 paths: Vec<RepoPath>,
4743 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4744 cx: &mut Context<Self>,
4745 ) {
4746 self.paths_needing_status_update.extend(paths);
4747
4748 let this = cx.weak_entity();
4749 let _ = self.send_keyed_job(
4750 Some(GitJobKey::RefreshStatuses),
4751 None,
4752 |state, mut cx| async move {
4753 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4754 (
4755 this.snapshot.clone(),
4756 mem::take(&mut this.paths_needing_status_update),
4757 )
4758 })?;
4759 let RepositoryState::Local { backend, .. } = state else {
4760 bail!("not a local repository")
4761 };
4762
4763 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4764 if paths.is_empty() {
4765 return Ok(());
4766 }
4767 let statuses = backend.status(&paths).await?;
4768 let stash_entries = backend.stash_entries().await?;
4769
4770 let changed_path_statuses = cx
4771 .background_spawn(async move {
4772 let mut changed_path_statuses = Vec::new();
4773 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4774 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4775
4776 for (repo_path, status) in &*statuses.entries {
4777 changed_paths.remove(repo_path);
4778 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4779 && cursor.item().is_some_and(|entry| entry.status == *status)
4780 {
4781 continue;
4782 }
4783
4784 changed_path_statuses.push(Edit::Insert(StatusEntry {
4785 repo_path: repo_path.clone(),
4786 status: *status,
4787 }));
4788 }
4789 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4790 for path in changed_paths.into_iter() {
4791 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4792 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4793 }
4794 }
4795 changed_path_statuses
4796 })
4797 .await;
4798
4799 this.update(&mut cx, |this, cx| {
4800 let needs_update = !changed_path_statuses.is_empty()
4801 || this.snapshot.stash_entries != stash_entries;
4802 this.snapshot.stash_entries = stash_entries;
4803 if !changed_path_statuses.is_empty() {
4804 this.snapshot
4805 .statuses_by_path
4806 .edit(changed_path_statuses, &());
4807 this.snapshot.scan_id += 1;
4808 }
4809
4810 if needs_update && let Some(updates_tx) = updates_tx {
4811 updates_tx
4812 .unbounded_send(DownstreamUpdate::UpdateRepository(
4813 this.snapshot.clone(),
4814 ))
4815 .ok();
4816 }
4817 cx.emit(RepositoryEvent::Updated {
4818 full_scan: false,
4819 new_instance: false,
4820 });
4821 })
4822 },
4823 );
4824 }
4825
4826 /// currently running git command and when it started
4827 pub fn current_job(&self) -> Option<JobInfo> {
4828 self.active_jobs.values().next().cloned()
4829 }
4830
4831 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4832 self.send_job(None, |_, _| async {})
4833 }
4834}
4835
4836fn get_permalink_in_rust_registry_src(
4837 provider_registry: Arc<GitHostingProviderRegistry>,
4838 path: PathBuf,
4839 selection: Range<u32>,
4840) -> Result<url::Url> {
4841 #[derive(Deserialize)]
4842 struct CargoVcsGit {
4843 sha1: String,
4844 }
4845
4846 #[derive(Deserialize)]
4847 struct CargoVcsInfo {
4848 git: CargoVcsGit,
4849 path_in_vcs: String,
4850 }
4851
4852 #[derive(Deserialize)]
4853 struct CargoPackage {
4854 repository: String,
4855 }
4856
4857 #[derive(Deserialize)]
4858 struct CargoToml {
4859 package: CargoPackage,
4860 }
4861
4862 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4863 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4864 Some((dir, json))
4865 }) else {
4866 bail!("No .cargo_vcs_info.json found in parent directories")
4867 };
4868 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4869 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4870 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4871 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4872 .context("parsing package.repository field of manifest")?;
4873 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4874 let permalink = provider.build_permalink(
4875 remote,
4876 BuildPermalinkParams {
4877 sha: &cargo_vcs_info.git.sha1,
4878 path: &path.to_string_lossy(),
4879 selection: Some(selection),
4880 },
4881 );
4882 Ok(permalink)
4883}
4884
4885fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4886 let Some(blame) = blame else {
4887 return proto::BlameBufferResponse {
4888 blame_response: None,
4889 };
4890 };
4891
4892 let entries = blame
4893 .entries
4894 .into_iter()
4895 .map(|entry| proto::BlameEntry {
4896 sha: entry.sha.as_bytes().into(),
4897 start_line: entry.range.start,
4898 end_line: entry.range.end,
4899 original_line_number: entry.original_line_number,
4900 author: entry.author,
4901 author_mail: entry.author_mail,
4902 author_time: entry.author_time,
4903 author_tz: entry.author_tz,
4904 committer: entry.committer_name,
4905 committer_mail: entry.committer_email,
4906 committer_time: entry.committer_time,
4907 committer_tz: entry.committer_tz,
4908 summary: entry.summary,
4909 previous: entry.previous,
4910 filename: entry.filename,
4911 })
4912 .collect::<Vec<_>>();
4913
4914 let messages = blame
4915 .messages
4916 .into_iter()
4917 .map(|(oid, message)| proto::CommitMessage {
4918 oid: oid.as_bytes().into(),
4919 message,
4920 })
4921 .collect::<Vec<_>>();
4922
4923 proto::BlameBufferResponse {
4924 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4925 entries,
4926 messages,
4927 remote_url: blame.remote_url,
4928 }),
4929 }
4930}
4931
4932fn deserialize_blame_buffer_response(
4933 response: proto::BlameBufferResponse,
4934) -> Option<git::blame::Blame> {
4935 let response = response.blame_response?;
4936 let entries = response
4937 .entries
4938 .into_iter()
4939 .filter_map(|entry| {
4940 Some(git::blame::BlameEntry {
4941 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4942 range: entry.start_line..entry.end_line,
4943 original_line_number: entry.original_line_number,
4944 committer_name: entry.committer,
4945 committer_time: entry.committer_time,
4946 committer_tz: entry.committer_tz,
4947 committer_email: entry.committer_mail,
4948 author: entry.author,
4949 author_mail: entry.author_mail,
4950 author_time: entry.author_time,
4951 author_tz: entry.author_tz,
4952 summary: entry.summary,
4953 previous: entry.previous,
4954 filename: entry.filename,
4955 })
4956 })
4957 .collect::<Vec<_>>();
4958
4959 let messages = response
4960 .messages
4961 .into_iter()
4962 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4963 .collect::<HashMap<_, _>>();
4964
4965 Some(Blame {
4966 entries,
4967 messages,
4968 remote_url: response.remote_url,
4969 })
4970}
4971
4972fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4973 proto::Branch {
4974 is_head: branch.is_head,
4975 ref_name: branch.ref_name.to_string(),
4976 unix_timestamp: branch
4977 .most_recent_commit
4978 .as_ref()
4979 .map(|commit| commit.commit_timestamp as u64),
4980 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4981 ref_name: upstream.ref_name.to_string(),
4982 tracking: upstream
4983 .tracking
4984 .status()
4985 .map(|upstream| proto::UpstreamTracking {
4986 ahead: upstream.ahead as u64,
4987 behind: upstream.behind as u64,
4988 }),
4989 }),
4990 most_recent_commit: branch
4991 .most_recent_commit
4992 .as_ref()
4993 .map(|commit| proto::CommitSummary {
4994 sha: commit.sha.to_string(),
4995 subject: commit.subject.to_string(),
4996 commit_timestamp: commit.commit_timestamp,
4997 author_name: commit.author_name.to_string(),
4998 }),
4999 }
5000}
5001
5002fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5003 git::repository::Branch {
5004 is_head: proto.is_head,
5005 ref_name: proto.ref_name.clone().into(),
5006 upstream: proto
5007 .upstream
5008 .as_ref()
5009 .map(|upstream| git::repository::Upstream {
5010 ref_name: upstream.ref_name.to_string().into(),
5011 tracking: upstream
5012 .tracking
5013 .as_ref()
5014 .map(|tracking| {
5015 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5016 ahead: tracking.ahead as u32,
5017 behind: tracking.behind as u32,
5018 })
5019 })
5020 .unwrap_or(git::repository::UpstreamTracking::Gone),
5021 }),
5022 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5023 git::repository::CommitSummary {
5024 sha: commit.sha.to_string().into(),
5025 subject: commit.subject.to_string().into(),
5026 commit_timestamp: commit.commit_timestamp,
5027 author_name: commit.author_name.to_string().into(),
5028 has_parent: true,
5029 }
5030 }),
5031 }
5032}
5033
5034fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5035 proto::GitCommitDetails {
5036 sha: commit.sha.to_string(),
5037 message: commit.message.to_string(),
5038 commit_timestamp: commit.commit_timestamp,
5039 author_email: commit.author_email.to_string(),
5040 author_name: commit.author_name.to_string(),
5041 }
5042}
5043
5044fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5045 CommitDetails {
5046 sha: proto.sha.clone().into(),
5047 message: proto.message.clone().into(),
5048 commit_timestamp: proto.commit_timestamp,
5049 author_email: proto.author_email.clone().into(),
5050 author_name: proto.author_name.clone().into(),
5051 }
5052}
5053
5054async fn compute_snapshot(
5055 id: RepositoryId,
5056 work_directory_abs_path: Arc<Path>,
5057 prev_snapshot: RepositorySnapshot,
5058 backend: Arc<dyn GitRepository>,
5059) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5060 let mut events = Vec::new();
5061 let branches = backend.branches().await?;
5062 let branch = branches.into_iter().find(|branch| branch.is_head);
5063 let statuses = backend
5064 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
5065 .await?;
5066 let stash_entries = backend.stash_entries().await?;
5067 let statuses_by_path = SumTree::from_iter(
5068 statuses
5069 .entries
5070 .iter()
5071 .map(|(repo_path, status)| StatusEntry {
5072 repo_path: repo_path.clone(),
5073 status: *status,
5074 }),
5075 &(),
5076 );
5077 let (merge_details, merge_heads_changed) =
5078 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5079 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5080
5081 if merge_heads_changed
5082 || branch != prev_snapshot.branch
5083 || statuses_by_path != prev_snapshot.statuses_by_path
5084 {
5085 events.push(RepositoryEvent::Updated {
5086 full_scan: true,
5087 new_instance: false,
5088 });
5089 }
5090
5091 // Cache merge conflict paths so they don't change from staging/unstaging,
5092 // until the merge heads change (at commit time, etc.).
5093 if merge_heads_changed {
5094 events.push(RepositoryEvent::MergeHeadsChanged);
5095 }
5096
5097 // Useful when branch is None in detached head state
5098 let head_commit = match backend.head_sha().await {
5099 Some(head_sha) => backend.show(head_sha).await.log_err(),
5100 None => None,
5101 };
5102
5103 // Used by edit prediction data collection
5104 let remote_origin_url = backend.remote_url("origin");
5105 let remote_upstream_url = backend.remote_url("upstream");
5106
5107 let snapshot = RepositorySnapshot {
5108 id,
5109 statuses_by_path,
5110 work_directory_abs_path,
5111 scan_id: prev_snapshot.scan_id + 1,
5112 branch,
5113 head_commit,
5114 merge: merge_details,
5115 remote_origin_url,
5116 remote_upstream_url,
5117 stash_entries,
5118 };
5119
5120 Ok((snapshot, events))
5121}
5122
5123fn status_from_proto(
5124 simple_status: i32,
5125 status: Option<proto::GitFileStatus>,
5126) -> anyhow::Result<FileStatus> {
5127 use proto::git_file_status::Variant;
5128
5129 let Some(variant) = status.and_then(|status| status.variant) else {
5130 let code = proto::GitStatus::from_i32(simple_status)
5131 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5132 let result = match code {
5133 proto::GitStatus::Added => TrackedStatus {
5134 worktree_status: StatusCode::Added,
5135 index_status: StatusCode::Unmodified,
5136 }
5137 .into(),
5138 proto::GitStatus::Modified => TrackedStatus {
5139 worktree_status: StatusCode::Modified,
5140 index_status: StatusCode::Unmodified,
5141 }
5142 .into(),
5143 proto::GitStatus::Conflict => UnmergedStatus {
5144 first_head: UnmergedStatusCode::Updated,
5145 second_head: UnmergedStatusCode::Updated,
5146 }
5147 .into(),
5148 proto::GitStatus::Deleted => TrackedStatus {
5149 worktree_status: StatusCode::Deleted,
5150 index_status: StatusCode::Unmodified,
5151 }
5152 .into(),
5153 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5154 };
5155 return Ok(result);
5156 };
5157
5158 let result = match variant {
5159 Variant::Untracked(_) => FileStatus::Untracked,
5160 Variant::Ignored(_) => FileStatus::Ignored,
5161 Variant::Unmerged(unmerged) => {
5162 let [first_head, second_head] =
5163 [unmerged.first_head, unmerged.second_head].map(|head| {
5164 let code = proto::GitStatus::from_i32(head)
5165 .with_context(|| format!("Invalid git status code: {head}"))?;
5166 let result = match code {
5167 proto::GitStatus::Added => UnmergedStatusCode::Added,
5168 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5169 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5170 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5171 };
5172 Ok(result)
5173 });
5174 let [first_head, second_head] = [first_head?, second_head?];
5175 UnmergedStatus {
5176 first_head,
5177 second_head,
5178 }
5179 .into()
5180 }
5181 Variant::Tracked(tracked) => {
5182 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5183 .map(|status| {
5184 let code = proto::GitStatus::from_i32(status)
5185 .with_context(|| format!("Invalid git status code: {status}"))?;
5186 let result = match code {
5187 proto::GitStatus::Modified => StatusCode::Modified,
5188 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5189 proto::GitStatus::Added => StatusCode::Added,
5190 proto::GitStatus::Deleted => StatusCode::Deleted,
5191 proto::GitStatus::Renamed => StatusCode::Renamed,
5192 proto::GitStatus::Copied => StatusCode::Copied,
5193 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5194 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5195 };
5196 Ok(result)
5197 });
5198 let [index_status, worktree_status] = [index_status?, worktree_status?];
5199 TrackedStatus {
5200 index_status,
5201 worktree_status,
5202 }
5203 .into()
5204 }
5205 };
5206 Ok(result)
5207}
5208
5209fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5210 use proto::git_file_status::{Tracked, Unmerged, Variant};
5211
5212 let variant = match status {
5213 FileStatus::Untracked => Variant::Untracked(Default::default()),
5214 FileStatus::Ignored => Variant::Ignored(Default::default()),
5215 FileStatus::Unmerged(UnmergedStatus {
5216 first_head,
5217 second_head,
5218 }) => Variant::Unmerged(Unmerged {
5219 first_head: unmerged_status_to_proto(first_head),
5220 second_head: unmerged_status_to_proto(second_head),
5221 }),
5222 FileStatus::Tracked(TrackedStatus {
5223 index_status,
5224 worktree_status,
5225 }) => Variant::Tracked(Tracked {
5226 index_status: tracked_status_to_proto(index_status),
5227 worktree_status: tracked_status_to_proto(worktree_status),
5228 }),
5229 };
5230 proto::GitFileStatus {
5231 variant: Some(variant),
5232 }
5233}
5234
5235fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5236 match code {
5237 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5238 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5239 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5240 }
5241}
5242
5243fn tracked_status_to_proto(code: StatusCode) -> i32 {
5244 match code {
5245 StatusCode::Added => proto::GitStatus::Added as _,
5246 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5247 StatusCode::Modified => proto::GitStatus::Modified as _,
5248 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5249 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5250 StatusCode::Copied => proto::GitStatus::Copied as _,
5251 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5252 }
5253}