1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, FromProto, ToProto, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use text::{Bias, BufferId};
66use util::{ResultExt, debug_panic, paths::SanitizedPath, post_inc};
67use worktree::{
68 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
69 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
70};
71
72pub struct GitStore {
73 state: GitStoreState,
74 buffer_store: Entity<BufferStore>,
75 worktree_store: Entity<WorktreeStore>,
76 repositories: HashMap<RepositoryId, Entity<Repository>>,
77 active_repo_id: Option<RepositoryId>,
78 #[allow(clippy::type_complexity)]
79 loading_diffs:
80 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
81 diffs: HashMap<BufferId, Entity<BufferGitState>>,
82 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
83 _subscriptions: Vec<Subscription>,
84}
85
86#[derive(Default)]
87struct SharedDiffs {
88 unstaged: Option<Entity<BufferDiff>>,
89 uncommitted: Option<Entity<BufferDiff>>,
90}
91
92struct BufferGitState {
93 unstaged_diff: Option<WeakEntity<BufferDiff>>,
94 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
95 conflict_set: Option<WeakEntity<ConflictSet>>,
96 recalculate_diff_task: Option<Task<Result<()>>>,
97 reparse_conflict_markers_task: Option<Task<Result<()>>>,
98 language: Option<Arc<Language>>,
99 language_registry: Option<Arc<LanguageRegistry>>,
100 conflict_updated_futures: Vec<oneshot::Sender<()>>,
101 recalculating_tx: postage::watch::Sender<bool>,
102
103 /// These operation counts are used to ensure that head and index text
104 /// values read from the git repository are up-to-date with any hunk staging
105 /// operations that have been performed on the BufferDiff.
106 ///
107 /// The operation count is incremented immediately when the user initiates a
108 /// hunk stage/unstage operation. Then, upon finishing writing the new index
109 /// text do disk, the `operation count as of write` is updated to reflect
110 /// the operation count that prompted the write.
111 hunk_staging_operation_count: usize,
112 hunk_staging_operation_count_as_of_write: usize,
113
114 head_text: Option<Arc<String>>,
115 index_text: Option<Arc<String>>,
116 head_changed: bool,
117 index_changed: bool,
118 language_changed: bool,
119}
120
121#[derive(Clone, Debug)]
122enum DiffBasesChange {
123 SetIndex(Option<String>),
124 SetHead(Option<String>),
125 SetEach {
126 index: Option<String>,
127 head: Option<String>,
128 },
129 SetBoth(Option<String>),
130}
131
132#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
133enum DiffKind {
134 Unstaged,
135 Uncommitted,
136}
137
138enum GitStoreState {
139 Local {
140 next_repository_id: Arc<AtomicU64>,
141 downstream: Option<LocalDownstreamState>,
142 project_environment: Entity<ProjectEnvironment>,
143 fs: Arc<dyn Fs>,
144 },
145 Remote {
146 upstream_client: AnyProtoClient,
147 upstream_project_id: u64,
148 downstream: Option<(AnyProtoClient, ProjectId)>,
149 },
150}
151
152enum DownstreamUpdate {
153 UpdateRepository(RepositorySnapshot),
154 RemoveRepository(RepositoryId),
155}
156
157struct LocalDownstreamState {
158 client: AnyProtoClient,
159 project_id: ProjectId,
160 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
161 _task: Task<Result<()>>,
162}
163
164#[derive(Clone, Debug)]
165pub struct GitStoreCheckpoint {
166 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
167}
168
169#[derive(Clone, Debug, PartialEq, Eq)]
170pub struct StatusEntry {
171 pub repo_path: RepoPath,
172 pub status: FileStatus,
173}
174
175impl StatusEntry {
176 fn to_proto(&self) -> proto::StatusEntry {
177 let simple_status = match self.status {
178 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
179 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
180 FileStatus::Tracked(TrackedStatus {
181 index_status,
182 worktree_status,
183 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
184 worktree_status
185 } else {
186 index_status
187 }),
188 };
189
190 proto::StatusEntry {
191 repo_path: self.repo_path.as_ref().to_proto(),
192 simple_status,
193 status: Some(status_to_proto(self.status)),
194 }
195 }
196}
197
198impl TryFrom<proto::StatusEntry> for StatusEntry {
199 type Error = anyhow::Error;
200
201 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
202 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
203 let status = status_from_proto(value.simple_status, value.status)?;
204 Ok(Self { repo_path, status })
205 }
206}
207
208impl sum_tree::Item for StatusEntry {
209 type Summary = PathSummary<GitSummary>;
210
211 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
212 PathSummary {
213 max_path: self.repo_path.0.clone(),
214 item_summary: self.status.summary(),
215 }
216 }
217}
218
219impl sum_tree::KeyedItem for StatusEntry {
220 type Key = PathKey;
221
222 fn key(&self) -> Self::Key {
223 PathKey(self.repo_path.0.clone())
224 }
225}
226
227#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
228pub struct RepositoryId(pub u64);
229
230#[derive(Clone, Debug, Default, PartialEq, Eq)]
231pub struct MergeDetails {
232 pub conflicted_paths: TreeSet<RepoPath>,
233 pub message: Option<SharedString>,
234 pub heads: Vec<Option<SharedString>>,
235}
236
237#[derive(Clone, Debug, PartialEq, Eq)]
238pub struct RepositorySnapshot {
239 pub id: RepositoryId,
240 pub statuses_by_path: SumTree<StatusEntry>,
241 pub work_directory_abs_path: Arc<Path>,
242 pub branch: Option<Branch>,
243 pub head_commit: Option<CommitDetails>,
244 pub scan_id: u64,
245 pub merge: MergeDetails,
246 pub remote_origin_url: Option<String>,
247 pub remote_upstream_url: Option<String>,
248 pub stash_entries: GitStash,
249}
250
251type JobId = u64;
252
253#[derive(Clone, Debug, PartialEq, Eq)]
254pub struct JobInfo {
255 pub start: Instant,
256 pub message: SharedString,
257}
258
259pub struct Repository {
260 this: WeakEntity<Self>,
261 snapshot: RepositorySnapshot,
262 commit_message_buffer: Option<Entity<Buffer>>,
263 git_store: WeakEntity<GitStore>,
264 // For a local repository, holds paths that have had worktree events since the last status scan completed,
265 // and that should be examined during the next status scan.
266 paths_needing_status_update: BTreeSet<RepoPath>,
267 job_sender: mpsc::UnboundedSender<GitJob>,
268 active_jobs: HashMap<JobId, JobInfo>,
269 job_id: JobId,
270 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
271 latest_askpass_id: u64,
272}
273
274impl std::ops::Deref for Repository {
275 type Target = RepositorySnapshot;
276
277 fn deref(&self) -> &Self::Target {
278 &self.snapshot
279 }
280}
281
282#[derive(Clone)]
283pub enum RepositoryState {
284 Local {
285 backend: Arc<dyn GitRepository>,
286 environment: Arc<HashMap<String, String>>,
287 },
288 Remote {
289 project_id: ProjectId,
290 client: AnyProtoClient,
291 },
292}
293
294#[derive(Clone, Debug)]
295pub enum RepositoryEvent {
296 Updated { full_scan: bool, new_instance: bool },
297 MergeHeadsChanged,
298}
299
300#[derive(Clone, Debug)]
301pub struct JobsUpdated;
302
303#[derive(Debug)]
304pub enum GitStoreEvent {
305 ActiveRepositoryChanged(Option<RepositoryId>),
306 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
307 RepositoryAdded(RepositoryId),
308 RepositoryRemoved(RepositoryId),
309 IndexWriteError(anyhow::Error),
310 JobsUpdated,
311 ConflictsUpdated,
312}
313
314impl EventEmitter<RepositoryEvent> for Repository {}
315impl EventEmitter<JobsUpdated> for Repository {}
316impl EventEmitter<GitStoreEvent> for GitStore {}
317
318pub struct GitJob {
319 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
320 key: Option<GitJobKey>,
321}
322
323#[derive(PartialEq, Eq)]
324enum GitJobKey {
325 WriteIndex(RepoPath),
326 ReloadBufferDiffBases,
327 RefreshStatuses,
328 ReloadGitState,
329}
330
331impl GitStore {
332 pub fn local(
333 worktree_store: &Entity<WorktreeStore>,
334 buffer_store: Entity<BufferStore>,
335 environment: Entity<ProjectEnvironment>,
336 fs: Arc<dyn Fs>,
337 cx: &mut Context<Self>,
338 ) -> Self {
339 Self::new(
340 worktree_store.clone(),
341 buffer_store,
342 GitStoreState::Local {
343 next_repository_id: Arc::new(AtomicU64::new(1)),
344 downstream: None,
345 project_environment: environment,
346 fs,
347 },
348 cx,
349 )
350 }
351
352 pub fn remote(
353 worktree_store: &Entity<WorktreeStore>,
354 buffer_store: Entity<BufferStore>,
355 upstream_client: AnyProtoClient,
356 project_id: u64,
357 cx: &mut Context<Self>,
358 ) -> Self {
359 Self::new(
360 worktree_store.clone(),
361 buffer_store,
362 GitStoreState::Remote {
363 upstream_client,
364 upstream_project_id: project_id,
365 downstream: None,
366 },
367 cx,
368 )
369 }
370
371 fn new(
372 worktree_store: Entity<WorktreeStore>,
373 buffer_store: Entity<BufferStore>,
374 state: GitStoreState,
375 cx: &mut Context<Self>,
376 ) -> Self {
377 let _subscriptions = vec![
378 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
379 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
380 ];
381
382 GitStore {
383 state,
384 buffer_store,
385 worktree_store,
386 repositories: HashMap::default(),
387 active_repo_id: None,
388 _subscriptions,
389 loading_diffs: HashMap::default(),
390 shared_diffs: HashMap::default(),
391 diffs: HashMap::default(),
392 }
393 }
394
395 pub fn init(client: &AnyProtoClient) {
396 client.add_entity_request_handler(Self::handle_get_remotes);
397 client.add_entity_request_handler(Self::handle_get_branches);
398 client.add_entity_request_handler(Self::handle_get_default_branch);
399 client.add_entity_request_handler(Self::handle_change_branch);
400 client.add_entity_request_handler(Self::handle_create_branch);
401 client.add_entity_request_handler(Self::handle_rename_branch);
402 client.add_entity_request_handler(Self::handle_git_init);
403 client.add_entity_request_handler(Self::handle_push);
404 client.add_entity_request_handler(Self::handle_pull);
405 client.add_entity_request_handler(Self::handle_fetch);
406 client.add_entity_request_handler(Self::handle_stage);
407 client.add_entity_request_handler(Self::handle_unstage);
408 client.add_entity_request_handler(Self::handle_stash);
409 client.add_entity_request_handler(Self::handle_stash_pop);
410 client.add_entity_request_handler(Self::handle_stash_apply);
411 client.add_entity_request_handler(Self::handle_stash_drop);
412 client.add_entity_request_handler(Self::handle_commit);
413 client.add_entity_request_handler(Self::handle_reset);
414 client.add_entity_request_handler(Self::handle_show);
415 client.add_entity_request_handler(Self::handle_load_commit_diff);
416 client.add_entity_request_handler(Self::handle_checkout_files);
417 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
418 client.add_entity_request_handler(Self::handle_set_index_text);
419 client.add_entity_request_handler(Self::handle_askpass);
420 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
421 client.add_entity_request_handler(Self::handle_git_diff);
422 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
423 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
424 client.add_entity_message_handler(Self::handle_update_diff_bases);
425 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
426 client.add_entity_request_handler(Self::handle_blame_buffer);
427 client.add_entity_message_handler(Self::handle_update_repository);
428 client.add_entity_message_handler(Self::handle_remove_repository);
429 client.add_entity_request_handler(Self::handle_git_clone);
430 }
431
432 pub fn is_local(&self) -> bool {
433 matches!(self.state, GitStoreState::Local { .. })
434 }
435
436 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
437 match &mut self.state {
438 GitStoreState::Remote {
439 downstream: downstream_client,
440 ..
441 } => {
442 for repo in self.repositories.values() {
443 let update = repo.read(cx).snapshot.initial_update(project_id);
444 for update in split_repository_update(update) {
445 client.send(update).log_err();
446 }
447 }
448 *downstream_client = Some((client, ProjectId(project_id)));
449 }
450 GitStoreState::Local {
451 downstream: downstream_client,
452 ..
453 } => {
454 let mut snapshots = HashMap::default();
455 let (updates_tx, mut updates_rx) = mpsc::unbounded();
456 for repo in self.repositories.values() {
457 updates_tx
458 .unbounded_send(DownstreamUpdate::UpdateRepository(
459 repo.read(cx).snapshot.clone(),
460 ))
461 .ok();
462 }
463 *downstream_client = Some(LocalDownstreamState {
464 client: client.clone(),
465 project_id: ProjectId(project_id),
466 updates_tx,
467 _task: cx.spawn(async move |this, cx| {
468 cx.background_spawn(async move {
469 while let Some(update) = updates_rx.next().await {
470 match update {
471 DownstreamUpdate::UpdateRepository(snapshot) => {
472 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
473 {
474 let update =
475 snapshot.build_update(old_snapshot, project_id);
476 *old_snapshot = snapshot;
477 for update in split_repository_update(update) {
478 client.send(update)?;
479 }
480 } else {
481 let update = snapshot.initial_update(project_id);
482 for update in split_repository_update(update) {
483 client.send(update)?;
484 }
485 snapshots.insert(snapshot.id, snapshot);
486 }
487 }
488 DownstreamUpdate::RemoveRepository(id) => {
489 client.send(proto::RemoveRepository {
490 project_id,
491 id: id.to_proto(),
492 })?;
493 }
494 }
495 }
496 anyhow::Ok(())
497 })
498 .await
499 .ok();
500 this.update(cx, |this, _| {
501 if let GitStoreState::Local {
502 downstream: downstream_client,
503 ..
504 } = &mut this.state
505 {
506 downstream_client.take();
507 } else {
508 unreachable!("unshared called on remote store");
509 }
510 })
511 }),
512 });
513 }
514 }
515 }
516
517 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
518 match &mut self.state {
519 GitStoreState::Local {
520 downstream: downstream_client,
521 ..
522 } => {
523 downstream_client.take();
524 }
525 GitStoreState::Remote {
526 downstream: downstream_client,
527 ..
528 } => {
529 downstream_client.take();
530 }
531 }
532 self.shared_diffs.clear();
533 }
534
535 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
536 self.shared_diffs.remove(peer_id);
537 }
538
539 pub fn active_repository(&self) -> Option<Entity<Repository>> {
540 self.active_repo_id
541 .as_ref()
542 .map(|id| self.repositories[id].clone())
543 }
544
545 pub fn open_unstaged_diff(
546 &mut self,
547 buffer: Entity<Buffer>,
548 cx: &mut Context<Self>,
549 ) -> Task<Result<Entity<BufferDiff>>> {
550 let buffer_id = buffer.read(cx).remote_id();
551 if let Some(diff_state) = self.diffs.get(&buffer_id)
552 && let Some(unstaged_diff) = diff_state
553 .read(cx)
554 .unstaged_diff
555 .as_ref()
556 .and_then(|weak| weak.upgrade())
557 {
558 if let Some(task) =
559 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
560 {
561 return cx.background_executor().spawn(async move {
562 task.await;
563 Ok(unstaged_diff)
564 });
565 }
566 return Task::ready(Ok(unstaged_diff));
567 }
568
569 let Some((repo, repo_path)) =
570 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
571 else {
572 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
573 };
574
575 let task = self
576 .loading_diffs
577 .entry((buffer_id, DiffKind::Unstaged))
578 .or_insert_with(|| {
579 let staged_text = repo.update(cx, |repo, cx| {
580 repo.load_staged_text(buffer_id, repo_path, cx)
581 });
582 cx.spawn(async move |this, cx| {
583 Self::open_diff_internal(
584 this,
585 DiffKind::Unstaged,
586 staged_text.await.map(DiffBasesChange::SetIndex),
587 buffer,
588 cx,
589 )
590 .await
591 .map_err(Arc::new)
592 })
593 .shared()
594 })
595 .clone();
596
597 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
598 }
599
600 pub fn open_uncommitted_diff(
601 &mut self,
602 buffer: Entity<Buffer>,
603 cx: &mut Context<Self>,
604 ) -> Task<Result<Entity<BufferDiff>>> {
605 let buffer_id = buffer.read(cx).remote_id();
606
607 if let Some(diff_state) = self.diffs.get(&buffer_id)
608 && let Some(uncommitted_diff) = diff_state
609 .read(cx)
610 .uncommitted_diff
611 .as_ref()
612 .and_then(|weak| weak.upgrade())
613 {
614 if let Some(task) =
615 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
616 {
617 return cx.background_executor().spawn(async move {
618 task.await;
619 Ok(uncommitted_diff)
620 });
621 }
622 return Task::ready(Ok(uncommitted_diff));
623 }
624
625 let Some((repo, repo_path)) =
626 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
627 else {
628 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
629 };
630
631 let task = self
632 .loading_diffs
633 .entry((buffer_id, DiffKind::Uncommitted))
634 .or_insert_with(|| {
635 let changes = repo.update(cx, |repo, cx| {
636 repo.load_committed_text(buffer_id, repo_path, cx)
637 });
638
639 cx.spawn(async move |this, cx| {
640 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
641 .await
642 .map_err(Arc::new)
643 })
644 .shared()
645 })
646 .clone();
647
648 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
649 }
650
651 async fn open_diff_internal(
652 this: WeakEntity<Self>,
653 kind: DiffKind,
654 texts: Result<DiffBasesChange>,
655 buffer_entity: Entity<Buffer>,
656 cx: &mut AsyncApp,
657 ) -> Result<Entity<BufferDiff>> {
658 let diff_bases_change = match texts {
659 Err(e) => {
660 this.update(cx, |this, cx| {
661 let buffer = buffer_entity.read(cx);
662 let buffer_id = buffer.remote_id();
663 this.loading_diffs.remove(&(buffer_id, kind));
664 })?;
665 return Err(e);
666 }
667 Ok(change) => change,
668 };
669
670 this.update(cx, |this, cx| {
671 let buffer = buffer_entity.read(cx);
672 let buffer_id = buffer.remote_id();
673 let language = buffer.language().cloned();
674 let language_registry = buffer.language_registry();
675 let text_snapshot = buffer.text_snapshot();
676 this.loading_diffs.remove(&(buffer_id, kind));
677
678 let git_store = cx.weak_entity();
679 let diff_state = this
680 .diffs
681 .entry(buffer_id)
682 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
683
684 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
685
686 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
687 diff_state.update(cx, |diff_state, cx| {
688 diff_state.language = language;
689 diff_state.language_registry = language_registry;
690
691 match kind {
692 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
693 DiffKind::Uncommitted => {
694 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
695 diff
696 } else {
697 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
698 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
699 unstaged_diff
700 };
701
702 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
703 diff_state.uncommitted_diff = Some(diff.downgrade())
704 }
705 }
706
707 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
708 let rx = diff_state.wait_for_recalculation();
709
710 anyhow::Ok(async move {
711 if let Some(rx) = rx {
712 rx.await;
713 }
714 Ok(diff)
715 })
716 })
717 })??
718 .await
719 }
720
721 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
722 let diff_state = self.diffs.get(&buffer_id)?;
723 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
724 }
725
726 pub fn get_uncommitted_diff(
727 &self,
728 buffer_id: BufferId,
729 cx: &App,
730 ) -> Option<Entity<BufferDiff>> {
731 let diff_state = self.diffs.get(&buffer_id)?;
732 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
733 }
734
735 pub fn open_conflict_set(
736 &mut self,
737 buffer: Entity<Buffer>,
738 cx: &mut Context<Self>,
739 ) -> Entity<ConflictSet> {
740 log::debug!("open conflict set");
741 let buffer_id = buffer.read(cx).remote_id();
742
743 if let Some(git_state) = self.diffs.get(&buffer_id)
744 && let Some(conflict_set) = git_state
745 .read(cx)
746 .conflict_set
747 .as_ref()
748 .and_then(|weak| weak.upgrade())
749 {
750 let conflict_set = conflict_set;
751 let buffer_snapshot = buffer.read(cx).text_snapshot();
752
753 git_state.update(cx, |state, cx| {
754 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
755 });
756
757 return conflict_set;
758 }
759
760 let is_unmerged = self
761 .repository_and_path_for_buffer_id(buffer_id, cx)
762 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
763 let git_store = cx.weak_entity();
764 let buffer_git_state = self
765 .diffs
766 .entry(buffer_id)
767 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
768 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
769
770 self._subscriptions
771 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
772 cx.emit(GitStoreEvent::ConflictsUpdated);
773 }));
774
775 buffer_git_state.update(cx, |state, cx| {
776 state.conflict_set = Some(conflict_set.downgrade());
777 let buffer_snapshot = buffer.read(cx).text_snapshot();
778 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
779 });
780
781 conflict_set
782 }
783
784 pub fn project_path_git_status(
785 &self,
786 project_path: &ProjectPath,
787 cx: &App,
788 ) -> Option<FileStatus> {
789 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
790 Some(repo.read(cx).status_for_path(&repo_path)?.status)
791 }
792
793 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
794 let mut work_directory_abs_paths = Vec::new();
795 let mut checkpoints = Vec::new();
796 for repository in self.repositories.values() {
797 repository.update(cx, |repository, _| {
798 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
799 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
800 });
801 }
802
803 cx.background_executor().spawn(async move {
804 let checkpoints = future::try_join_all(checkpoints).await?;
805 Ok(GitStoreCheckpoint {
806 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
807 .into_iter()
808 .zip(checkpoints)
809 .collect(),
810 })
811 })
812 }
813
814 pub fn restore_checkpoint(
815 &self,
816 checkpoint: GitStoreCheckpoint,
817 cx: &mut App,
818 ) -> Task<Result<()>> {
819 let repositories_by_work_dir_abs_path = self
820 .repositories
821 .values()
822 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
823 .collect::<HashMap<_, _>>();
824
825 let mut tasks = Vec::new();
826 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
827 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
828 let restore = repository.update(cx, |repository, _| {
829 repository.restore_checkpoint(checkpoint)
830 });
831 tasks.push(async move { restore.await? });
832 }
833 }
834 cx.background_spawn(async move {
835 future::try_join_all(tasks).await?;
836 Ok(())
837 })
838 }
839
840 /// Compares two checkpoints, returning true if they are equal.
841 pub fn compare_checkpoints(
842 &self,
843 left: GitStoreCheckpoint,
844 mut right: GitStoreCheckpoint,
845 cx: &mut App,
846 ) -> Task<Result<bool>> {
847 let repositories_by_work_dir_abs_path = self
848 .repositories
849 .values()
850 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
851 .collect::<HashMap<_, _>>();
852
853 let mut tasks = Vec::new();
854 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
855 if let Some(right_checkpoint) = right
856 .checkpoints_by_work_dir_abs_path
857 .remove(&work_dir_abs_path)
858 {
859 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
860 {
861 let compare = repository.update(cx, |repository, _| {
862 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
863 });
864
865 tasks.push(async move { compare.await? });
866 }
867 } else {
868 return Task::ready(Ok(false));
869 }
870 }
871 cx.background_spawn(async move {
872 Ok(future::try_join_all(tasks)
873 .await?
874 .into_iter()
875 .all(|result| result))
876 })
877 }
878
879 /// Blames a buffer.
880 pub fn blame_buffer(
881 &self,
882 buffer: &Entity<Buffer>,
883 version: Option<clock::Global>,
884 cx: &mut App,
885 ) -> Task<Result<Option<Blame>>> {
886 let buffer = buffer.read(cx);
887 let Some((repo, repo_path)) =
888 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
889 else {
890 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
891 };
892 let content = match &version {
893 Some(version) => buffer.rope_for_version(version),
894 None => buffer.as_rope().clone(),
895 };
896 let version = version.unwrap_or(buffer.version());
897 let buffer_id = buffer.remote_id();
898
899 let rx = repo.update(cx, |repo, _| {
900 repo.send_job(None, move |state, _| async move {
901 match state {
902 RepositoryState::Local { backend, .. } => backend
903 .blame(repo_path.clone(), content)
904 .await
905 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
906 .map(Some),
907 RepositoryState::Remote { project_id, client } => {
908 let response = client
909 .request(proto::BlameBuffer {
910 project_id: project_id.to_proto(),
911 buffer_id: buffer_id.into(),
912 version: serialize_version(&version),
913 })
914 .await?;
915 Ok(deserialize_blame_buffer_response(response))
916 }
917 }
918 })
919 });
920
921 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
922 }
923
924 pub fn get_permalink_to_line(
925 &self,
926 buffer: &Entity<Buffer>,
927 selection: Range<u32>,
928 cx: &mut App,
929 ) -> Task<Result<url::Url>> {
930 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
931 return Task::ready(Err(anyhow!("buffer has no file")));
932 };
933
934 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
935 &(file.worktree.read(cx).id(), file.path.clone()).into(),
936 cx,
937 ) else {
938 // If we're not in a Git repo, check whether this is a Rust source
939 // file in the Cargo registry (presumably opened with go-to-definition
940 // from a normal Rust file). If so, we can put together a permalink
941 // using crate metadata.
942 if buffer
943 .read(cx)
944 .language()
945 .is_none_or(|lang| lang.name() != "Rust".into())
946 {
947 return Task::ready(Err(anyhow!("no permalink available")));
948 }
949 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
950 return Task::ready(Err(anyhow!("no permalink available")));
951 };
952 return cx.spawn(async move |cx| {
953 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
954 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
955 .context("no permalink available")
956 });
957
958 // TODO remote case
959 };
960
961 let buffer_id = buffer.read(cx).remote_id();
962 let branch = repo.read(cx).branch.clone();
963 let remote = branch
964 .as_ref()
965 .and_then(|b| b.upstream.as_ref())
966 .and_then(|b| b.remote_name())
967 .unwrap_or("origin")
968 .to_string();
969
970 let rx = repo.update(cx, |repo, _| {
971 repo.send_job(None, move |state, cx| async move {
972 match state {
973 RepositoryState::Local { backend, .. } => {
974 let origin_url = backend
975 .remote_url(&remote)
976 .with_context(|| format!("remote \"{remote}\" not found"))?;
977
978 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
979
980 let provider_registry =
981 cx.update(GitHostingProviderRegistry::default_global)?;
982
983 let (provider, remote) =
984 parse_git_remote_url(provider_registry, &origin_url)
985 .context("parsing Git remote URL")?;
986
987 let path = repo_path.to_str().with_context(|| {
988 format!("converting repo path {repo_path:?} to string")
989 })?;
990
991 Ok(provider.build_permalink(
992 remote,
993 BuildPermalinkParams {
994 sha: &sha,
995 path,
996 selection: Some(selection),
997 },
998 ))
999 }
1000 RepositoryState::Remote { project_id, client } => {
1001 let response = client
1002 .request(proto::GetPermalinkToLine {
1003 project_id: project_id.to_proto(),
1004 buffer_id: buffer_id.into(),
1005 selection: Some(proto::Range {
1006 start: selection.start as u64,
1007 end: selection.end as u64,
1008 }),
1009 })
1010 .await?;
1011
1012 url::Url::parse(&response.permalink).context("failed to parse permalink")
1013 }
1014 }
1015 })
1016 });
1017 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1018 }
1019
1020 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1021 match &self.state {
1022 GitStoreState::Local {
1023 downstream: downstream_client,
1024 ..
1025 } => downstream_client
1026 .as_ref()
1027 .map(|state| (state.client.clone(), state.project_id)),
1028 GitStoreState::Remote {
1029 downstream: downstream_client,
1030 ..
1031 } => downstream_client.clone(),
1032 }
1033 }
1034
1035 fn upstream_client(&self) -> Option<AnyProtoClient> {
1036 match &self.state {
1037 GitStoreState::Local { .. } => None,
1038 GitStoreState::Remote {
1039 upstream_client, ..
1040 } => Some(upstream_client.clone()),
1041 }
1042 }
1043
1044 fn on_worktree_store_event(
1045 &mut self,
1046 worktree_store: Entity<WorktreeStore>,
1047 event: &WorktreeStoreEvent,
1048 cx: &mut Context<Self>,
1049 ) {
1050 let GitStoreState::Local {
1051 project_environment,
1052 downstream,
1053 next_repository_id,
1054 fs,
1055 } = &self.state
1056 else {
1057 return;
1058 };
1059
1060 match event {
1061 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1062 if let Some(worktree) = self
1063 .worktree_store
1064 .read(cx)
1065 .worktree_for_id(*worktree_id, cx)
1066 {
1067 let paths_by_git_repo =
1068 self.process_updated_entries(&worktree, updated_entries, cx);
1069 let downstream = downstream
1070 .as_ref()
1071 .map(|downstream| downstream.updates_tx.clone());
1072 cx.spawn(async move |_, cx| {
1073 let paths_by_git_repo = paths_by_git_repo.await;
1074 for (repo, paths) in paths_by_git_repo {
1075 repo.update(cx, |repo, cx| {
1076 repo.paths_changed(paths, downstream.clone(), cx);
1077 })
1078 .ok();
1079 }
1080 })
1081 .detach();
1082 }
1083 }
1084 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1085 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1086 else {
1087 return;
1088 };
1089 if !worktree.read(cx).is_visible() {
1090 log::debug!(
1091 "not adding repositories for local worktree {:?} because it's not visible",
1092 worktree.read(cx).abs_path()
1093 );
1094 return;
1095 }
1096 self.update_repositories_from_worktree(
1097 project_environment.clone(),
1098 next_repository_id.clone(),
1099 downstream
1100 .as_ref()
1101 .map(|downstream| downstream.updates_tx.clone()),
1102 changed_repos.clone(),
1103 fs.clone(),
1104 cx,
1105 );
1106 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1107 }
1108 _ => {}
1109 }
1110 }
1111
1112 fn on_repository_event(
1113 &mut self,
1114 repo: Entity<Repository>,
1115 event: &RepositoryEvent,
1116 cx: &mut Context<Self>,
1117 ) {
1118 let id = repo.read(cx).id;
1119 let repo_snapshot = repo.read(cx).snapshot.clone();
1120 for (buffer_id, diff) in self.diffs.iter() {
1121 if let Some((buffer_repo, repo_path)) =
1122 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1123 && buffer_repo == repo
1124 {
1125 diff.update(cx, |diff, cx| {
1126 if let Some(conflict_set) = &diff.conflict_set {
1127 let conflict_status_changed =
1128 conflict_set.update(cx, |conflict_set, cx| {
1129 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1130 conflict_set.set_has_conflict(has_conflict, cx)
1131 })?;
1132 if conflict_status_changed {
1133 let buffer_store = self.buffer_store.read(cx);
1134 if let Some(buffer) = buffer_store.get(*buffer_id) {
1135 let _ = diff
1136 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1137 }
1138 }
1139 }
1140 anyhow::Ok(())
1141 })
1142 .ok();
1143 }
1144 }
1145 cx.emit(GitStoreEvent::RepositoryUpdated(
1146 id,
1147 event.clone(),
1148 self.active_repo_id == Some(id),
1149 ))
1150 }
1151
1152 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1153 cx.emit(GitStoreEvent::JobsUpdated)
1154 }
1155
1156 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1157 fn update_repositories_from_worktree(
1158 &mut self,
1159 project_environment: Entity<ProjectEnvironment>,
1160 next_repository_id: Arc<AtomicU64>,
1161 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1162 updated_git_repositories: UpdatedGitRepositoriesSet,
1163 fs: Arc<dyn Fs>,
1164 cx: &mut Context<Self>,
1165 ) {
1166 let mut removed_ids = Vec::new();
1167 for update in updated_git_repositories.iter() {
1168 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1169 let existing_work_directory_abs_path =
1170 repo.read(cx).work_directory_abs_path.clone();
1171 Some(&existing_work_directory_abs_path)
1172 == update.old_work_directory_abs_path.as_ref()
1173 || Some(&existing_work_directory_abs_path)
1174 == update.new_work_directory_abs_path.as_ref()
1175 }) {
1176 if let Some(new_work_directory_abs_path) =
1177 update.new_work_directory_abs_path.clone()
1178 {
1179 existing.update(cx, |existing, cx| {
1180 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1181 existing.schedule_scan(updates_tx.clone(), cx);
1182 });
1183 } else {
1184 removed_ids.push(*id);
1185 }
1186 } else if let UpdatedGitRepository {
1187 new_work_directory_abs_path: Some(work_directory_abs_path),
1188 dot_git_abs_path: Some(dot_git_abs_path),
1189 repository_dir_abs_path: Some(repository_dir_abs_path),
1190 common_dir_abs_path: Some(common_dir_abs_path),
1191 ..
1192 } = update
1193 {
1194 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1195 let git_store = cx.weak_entity();
1196 let repo = cx.new(|cx| {
1197 let mut repo = Repository::local(
1198 id,
1199 work_directory_abs_path.clone(),
1200 dot_git_abs_path.clone(),
1201 repository_dir_abs_path.clone(),
1202 common_dir_abs_path.clone(),
1203 project_environment.downgrade(),
1204 fs.clone(),
1205 git_store,
1206 cx,
1207 );
1208 repo.schedule_scan(updates_tx.clone(), cx);
1209 repo
1210 });
1211 self._subscriptions
1212 .push(cx.subscribe(&repo, Self::on_repository_event));
1213 self._subscriptions
1214 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1215 self.repositories.insert(id, repo);
1216 cx.emit(GitStoreEvent::RepositoryAdded(id));
1217 self.active_repo_id.get_or_insert_with(|| {
1218 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1219 id
1220 });
1221 }
1222 }
1223
1224 for id in removed_ids {
1225 if self.active_repo_id == Some(id) {
1226 self.active_repo_id = None;
1227 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1228 }
1229 self.repositories.remove(&id);
1230 if let Some(updates_tx) = updates_tx.as_ref() {
1231 updates_tx
1232 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1233 .ok();
1234 }
1235 }
1236 }
1237
1238 fn on_buffer_store_event(
1239 &mut self,
1240 _: Entity<BufferStore>,
1241 event: &BufferStoreEvent,
1242 cx: &mut Context<Self>,
1243 ) {
1244 match event {
1245 BufferStoreEvent::BufferAdded(buffer) => {
1246 cx.subscribe(buffer, |this, buffer, event, cx| {
1247 if let BufferEvent::LanguageChanged = event {
1248 let buffer_id = buffer.read(cx).remote_id();
1249 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1250 diff_state.update(cx, |diff_state, cx| {
1251 diff_state.buffer_language_changed(buffer, cx);
1252 });
1253 }
1254 }
1255 })
1256 .detach();
1257 }
1258 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1259 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1260 diffs.remove(buffer_id);
1261 }
1262 }
1263 BufferStoreEvent::BufferDropped(buffer_id) => {
1264 self.diffs.remove(buffer_id);
1265 for diffs in self.shared_diffs.values_mut() {
1266 diffs.remove(buffer_id);
1267 }
1268 }
1269
1270 _ => {}
1271 }
1272 }
1273
1274 pub fn recalculate_buffer_diffs(
1275 &mut self,
1276 buffers: Vec<Entity<Buffer>>,
1277 cx: &mut Context<Self>,
1278 ) -> impl Future<Output = ()> + use<> {
1279 let mut futures = Vec::new();
1280 for buffer in buffers {
1281 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1282 let buffer = buffer.read(cx).text_snapshot();
1283 diff_state.update(cx, |diff_state, cx| {
1284 diff_state.recalculate_diffs(buffer.clone(), cx);
1285 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1286 });
1287 futures.push(diff_state.update(cx, |diff_state, cx| {
1288 diff_state
1289 .reparse_conflict_markers(buffer, cx)
1290 .map(|_| {})
1291 .boxed()
1292 }));
1293 }
1294 }
1295 async move {
1296 futures::future::join_all(futures).await;
1297 }
1298 }
1299
1300 fn on_buffer_diff_event(
1301 &mut self,
1302 diff: Entity<buffer_diff::BufferDiff>,
1303 event: &BufferDiffEvent,
1304 cx: &mut Context<Self>,
1305 ) {
1306 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1307 let buffer_id = diff.read(cx).buffer_id;
1308 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1309 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1310 diff_state.hunk_staging_operation_count += 1;
1311 diff_state.hunk_staging_operation_count
1312 });
1313 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1314 let recv = repo.update(cx, |repo, cx| {
1315 log::debug!("hunks changed for {}", path.display());
1316 repo.spawn_set_index_text_job(
1317 path,
1318 new_index_text.as_ref().map(|rope| rope.to_string()),
1319 Some(hunk_staging_operation_count),
1320 cx,
1321 )
1322 });
1323 let diff = diff.downgrade();
1324 cx.spawn(async move |this, cx| {
1325 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1326 diff.update(cx, |diff, cx| {
1327 diff.clear_pending_hunks(cx);
1328 })
1329 .ok();
1330 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1331 .ok();
1332 }
1333 })
1334 .detach();
1335 }
1336 }
1337 }
1338 }
1339
1340 fn local_worktree_git_repos_changed(
1341 &mut self,
1342 worktree: Entity<Worktree>,
1343 changed_repos: &UpdatedGitRepositoriesSet,
1344 cx: &mut Context<Self>,
1345 ) {
1346 log::debug!("local worktree repos changed");
1347 debug_assert!(worktree.read(cx).is_local());
1348
1349 for repository in self.repositories.values() {
1350 repository.update(cx, |repository, cx| {
1351 let repo_abs_path = &repository.work_directory_abs_path;
1352 if changed_repos.iter().any(|update| {
1353 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1354 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1355 }) {
1356 repository.reload_buffer_diff_bases(cx);
1357 }
1358 });
1359 }
1360 }
1361
1362 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1363 &self.repositories
1364 }
1365
1366 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1367 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1368 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1369 Some(status.status)
1370 }
1371
1372 pub fn repository_and_path_for_buffer_id(
1373 &self,
1374 buffer_id: BufferId,
1375 cx: &App,
1376 ) -> Option<(Entity<Repository>, RepoPath)> {
1377 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1378 let project_path = buffer.read(cx).project_path(cx)?;
1379 self.repository_and_path_for_project_path(&project_path, cx)
1380 }
1381
1382 pub fn repository_and_path_for_project_path(
1383 &self,
1384 path: &ProjectPath,
1385 cx: &App,
1386 ) -> Option<(Entity<Repository>, RepoPath)> {
1387 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1388 self.repositories
1389 .values()
1390 .filter_map(|repo| {
1391 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1392 Some((repo.clone(), repo_path))
1393 })
1394 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1395 }
1396
1397 pub fn git_init(
1398 &self,
1399 path: Arc<Path>,
1400 fallback_branch_name: String,
1401 cx: &App,
1402 ) -> Task<Result<()>> {
1403 match &self.state {
1404 GitStoreState::Local { fs, .. } => {
1405 let fs = fs.clone();
1406 cx.background_executor()
1407 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1408 }
1409 GitStoreState::Remote {
1410 upstream_client,
1411 upstream_project_id: project_id,
1412 ..
1413 } => {
1414 let client = upstream_client.clone();
1415 let project_id = *project_id;
1416 cx.background_executor().spawn(async move {
1417 client
1418 .request(proto::GitInit {
1419 project_id: project_id,
1420 abs_path: path.to_string_lossy().to_string(),
1421 fallback_branch_name,
1422 })
1423 .await?;
1424 Ok(())
1425 })
1426 }
1427 }
1428 }
1429
1430 pub fn git_clone(
1431 &self,
1432 repo: String,
1433 path: impl Into<Arc<std::path::Path>>,
1434 cx: &App,
1435 ) -> Task<Result<()>> {
1436 let path = path.into();
1437 match &self.state {
1438 GitStoreState::Local { fs, .. } => {
1439 let fs = fs.clone();
1440 cx.background_executor()
1441 .spawn(async move { fs.git_clone(&repo, &path).await })
1442 }
1443 GitStoreState::Remote {
1444 upstream_client,
1445 upstream_project_id,
1446 ..
1447 } => {
1448 if upstream_client.is_via_collab() {
1449 return Task::ready(Err(anyhow!(
1450 "Git Clone isn't supported for project guests"
1451 )));
1452 }
1453 let request = upstream_client.request(proto::GitClone {
1454 project_id: *upstream_project_id,
1455 abs_path: path.to_string_lossy().to_string(),
1456 remote_repo: repo,
1457 });
1458
1459 cx.background_spawn(async move {
1460 let result = request.await?;
1461
1462 match result.success {
1463 true => Ok(()),
1464 false => Err(anyhow!("Git Clone failed")),
1465 }
1466 })
1467 }
1468 }
1469 }
1470
1471 async fn handle_update_repository(
1472 this: Entity<Self>,
1473 envelope: TypedEnvelope<proto::UpdateRepository>,
1474 mut cx: AsyncApp,
1475 ) -> Result<()> {
1476 this.update(&mut cx, |this, cx| {
1477 let mut update = envelope.payload;
1478
1479 let id = RepositoryId::from_proto(update.id);
1480 let client = this.upstream_client().context("no upstream client")?;
1481
1482 let mut is_new = false;
1483 let repo = this.repositories.entry(id).or_insert_with(|| {
1484 is_new = true;
1485 let git_store = cx.weak_entity();
1486 cx.new(|cx| {
1487 Repository::remote(
1488 id,
1489 Path::new(&update.abs_path).into(),
1490 ProjectId(update.project_id),
1491 client,
1492 git_store,
1493 cx,
1494 )
1495 })
1496 });
1497 if is_new {
1498 this._subscriptions
1499 .push(cx.subscribe(repo, Self::on_repository_event))
1500 }
1501
1502 repo.update(cx, {
1503 let update = update.clone();
1504 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1505 })?;
1506
1507 this.active_repo_id.get_or_insert_with(|| {
1508 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1509 id
1510 });
1511
1512 if let Some((client, project_id)) = this.downstream_client() {
1513 update.project_id = project_id.to_proto();
1514 client.send(update).log_err();
1515 }
1516 Ok(())
1517 })?
1518 }
1519
1520 async fn handle_remove_repository(
1521 this: Entity<Self>,
1522 envelope: TypedEnvelope<proto::RemoveRepository>,
1523 mut cx: AsyncApp,
1524 ) -> Result<()> {
1525 this.update(&mut cx, |this, cx| {
1526 let mut update = envelope.payload;
1527 let id = RepositoryId::from_proto(update.id);
1528 this.repositories.remove(&id);
1529 if let Some((client, project_id)) = this.downstream_client() {
1530 update.project_id = project_id.to_proto();
1531 client.send(update).log_err();
1532 }
1533 if this.active_repo_id == Some(id) {
1534 this.active_repo_id = None;
1535 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1536 }
1537 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1538 })
1539 }
1540
1541 async fn handle_git_init(
1542 this: Entity<Self>,
1543 envelope: TypedEnvelope<proto::GitInit>,
1544 cx: AsyncApp,
1545 ) -> Result<proto::Ack> {
1546 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1547 let name = envelope.payload.fallback_branch_name;
1548 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1549 .await?;
1550
1551 Ok(proto::Ack {})
1552 }
1553
1554 async fn handle_git_clone(
1555 this: Entity<Self>,
1556 envelope: TypedEnvelope<proto::GitClone>,
1557 cx: AsyncApp,
1558 ) -> Result<proto::GitCloneResponse> {
1559 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1560 let repo_name = envelope.payload.remote_repo;
1561 let result = cx
1562 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1563 .await;
1564
1565 Ok(proto::GitCloneResponse {
1566 success: result.is_ok(),
1567 })
1568 }
1569
1570 async fn handle_fetch(
1571 this: Entity<Self>,
1572 envelope: TypedEnvelope<proto::Fetch>,
1573 mut cx: AsyncApp,
1574 ) -> Result<proto::RemoteMessageResponse> {
1575 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1576 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1577 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1578 let askpass_id = envelope.payload.askpass_id;
1579
1580 let askpass = make_remote_delegate(
1581 this,
1582 envelope.payload.project_id,
1583 repository_id,
1584 askpass_id,
1585 &mut cx,
1586 );
1587
1588 let remote_output = repository_handle
1589 .update(&mut cx, |repository_handle, cx| {
1590 repository_handle.fetch(fetch_options, askpass, cx)
1591 })?
1592 .await??;
1593
1594 Ok(proto::RemoteMessageResponse {
1595 stdout: remote_output.stdout,
1596 stderr: remote_output.stderr,
1597 })
1598 }
1599
1600 async fn handle_push(
1601 this: Entity<Self>,
1602 envelope: TypedEnvelope<proto::Push>,
1603 mut cx: AsyncApp,
1604 ) -> Result<proto::RemoteMessageResponse> {
1605 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1606 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1607
1608 let askpass_id = envelope.payload.askpass_id;
1609 let askpass = make_remote_delegate(
1610 this,
1611 envelope.payload.project_id,
1612 repository_id,
1613 askpass_id,
1614 &mut cx,
1615 );
1616
1617 let options = envelope
1618 .payload
1619 .options
1620 .as_ref()
1621 .map(|_| match envelope.payload.options() {
1622 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1623 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1624 });
1625
1626 let branch_name = envelope.payload.branch_name.into();
1627 let remote_name = envelope.payload.remote_name.into();
1628
1629 let remote_output = repository_handle
1630 .update(&mut cx, |repository_handle, cx| {
1631 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1632 })?
1633 .await??;
1634 Ok(proto::RemoteMessageResponse {
1635 stdout: remote_output.stdout,
1636 stderr: remote_output.stderr,
1637 })
1638 }
1639
1640 async fn handle_pull(
1641 this: Entity<Self>,
1642 envelope: TypedEnvelope<proto::Pull>,
1643 mut cx: AsyncApp,
1644 ) -> Result<proto::RemoteMessageResponse> {
1645 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1646 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1647 let askpass_id = envelope.payload.askpass_id;
1648 let askpass = make_remote_delegate(
1649 this,
1650 envelope.payload.project_id,
1651 repository_id,
1652 askpass_id,
1653 &mut cx,
1654 );
1655
1656 let branch_name = envelope.payload.branch_name.into();
1657 let remote_name = envelope.payload.remote_name.into();
1658
1659 let remote_message = repository_handle
1660 .update(&mut cx, |repository_handle, cx| {
1661 repository_handle.pull(branch_name, remote_name, askpass, cx)
1662 })?
1663 .await??;
1664
1665 Ok(proto::RemoteMessageResponse {
1666 stdout: remote_message.stdout,
1667 stderr: remote_message.stderr,
1668 })
1669 }
1670
1671 async fn handle_stage(
1672 this: Entity<Self>,
1673 envelope: TypedEnvelope<proto::Stage>,
1674 mut cx: AsyncApp,
1675 ) -> Result<proto::Ack> {
1676 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1677 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1678
1679 let entries = envelope
1680 .payload
1681 .paths
1682 .into_iter()
1683 .map(PathBuf::from)
1684 .map(RepoPath::new)
1685 .collect();
1686
1687 repository_handle
1688 .update(&mut cx, |repository_handle, cx| {
1689 repository_handle.stage_entries(entries, cx)
1690 })?
1691 .await?;
1692 Ok(proto::Ack {})
1693 }
1694
1695 async fn handle_unstage(
1696 this: Entity<Self>,
1697 envelope: TypedEnvelope<proto::Unstage>,
1698 mut cx: AsyncApp,
1699 ) -> Result<proto::Ack> {
1700 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1701 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1702
1703 let entries = envelope
1704 .payload
1705 .paths
1706 .into_iter()
1707 .map(PathBuf::from)
1708 .map(RepoPath::new)
1709 .collect();
1710
1711 repository_handle
1712 .update(&mut cx, |repository_handle, cx| {
1713 repository_handle.unstage_entries(entries, cx)
1714 })?
1715 .await?;
1716
1717 Ok(proto::Ack {})
1718 }
1719
1720 async fn handle_stash(
1721 this: Entity<Self>,
1722 envelope: TypedEnvelope<proto::Stash>,
1723 mut cx: AsyncApp,
1724 ) -> Result<proto::Ack> {
1725 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1726 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1727
1728 let entries = envelope
1729 .payload
1730 .paths
1731 .into_iter()
1732 .map(PathBuf::from)
1733 .map(RepoPath::new)
1734 .collect();
1735
1736 repository_handle
1737 .update(&mut cx, |repository_handle, cx| {
1738 repository_handle.stash_entries(entries, cx)
1739 })?
1740 .await?;
1741
1742 Ok(proto::Ack {})
1743 }
1744
1745 async fn handle_stash_pop(
1746 this: Entity<Self>,
1747 envelope: TypedEnvelope<proto::StashPop>,
1748 mut cx: AsyncApp,
1749 ) -> Result<proto::Ack> {
1750 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1751 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1752 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1753
1754 repository_handle
1755 .update(&mut cx, |repository_handle, cx| {
1756 repository_handle.stash_pop(stash_index, cx)
1757 })?
1758 .await?;
1759
1760 Ok(proto::Ack {})
1761 }
1762
1763 async fn handle_stash_apply(
1764 this: Entity<Self>,
1765 envelope: TypedEnvelope<proto::StashApply>,
1766 mut cx: AsyncApp,
1767 ) -> Result<proto::Ack> {
1768 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1769 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1770 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1771
1772 repository_handle
1773 .update(&mut cx, |repository_handle, cx| {
1774 repository_handle.stash_apply(stash_index, cx)
1775 })?
1776 .await?;
1777
1778 Ok(proto::Ack {})
1779 }
1780
1781 async fn handle_stash_drop(
1782 this: Entity<Self>,
1783 envelope: TypedEnvelope<proto::StashDrop>,
1784 mut cx: AsyncApp,
1785 ) -> Result<proto::Ack> {
1786 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1787 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1788 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1789
1790 repository_handle
1791 .update(&mut cx, |repository_handle, cx| {
1792 repository_handle.stash_drop(stash_index, cx)
1793 })?
1794 .await??;
1795
1796 Ok(proto::Ack {})
1797 }
1798
1799 async fn handle_set_index_text(
1800 this: Entity<Self>,
1801 envelope: TypedEnvelope<proto::SetIndexText>,
1802 mut cx: AsyncApp,
1803 ) -> Result<proto::Ack> {
1804 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1805 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1806 let repo_path = RepoPath::from_str(&envelope.payload.path);
1807
1808 repository_handle
1809 .update(&mut cx, |repository_handle, cx| {
1810 repository_handle.spawn_set_index_text_job(
1811 repo_path,
1812 envelope.payload.text,
1813 None,
1814 cx,
1815 )
1816 })?
1817 .await??;
1818 Ok(proto::Ack {})
1819 }
1820
1821 async fn handle_commit(
1822 this: Entity<Self>,
1823 envelope: TypedEnvelope<proto::Commit>,
1824 mut cx: AsyncApp,
1825 ) -> Result<proto::Ack> {
1826 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1827 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1828
1829 let message = SharedString::from(envelope.payload.message);
1830 let name = envelope.payload.name.map(SharedString::from);
1831 let email = envelope.payload.email.map(SharedString::from);
1832 let options = envelope.payload.options.unwrap_or_default();
1833
1834 repository_handle
1835 .update(&mut cx, |repository_handle, cx| {
1836 repository_handle.commit(
1837 message,
1838 name.zip(email),
1839 CommitOptions {
1840 amend: options.amend,
1841 signoff: options.signoff,
1842 },
1843 cx,
1844 )
1845 })?
1846 .await??;
1847 Ok(proto::Ack {})
1848 }
1849
1850 async fn handle_get_remotes(
1851 this: Entity<Self>,
1852 envelope: TypedEnvelope<proto::GetRemotes>,
1853 mut cx: AsyncApp,
1854 ) -> Result<proto::GetRemotesResponse> {
1855 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1856 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1857
1858 let branch_name = envelope.payload.branch_name;
1859
1860 let remotes = repository_handle
1861 .update(&mut cx, |repository_handle, _| {
1862 repository_handle.get_remotes(branch_name)
1863 })?
1864 .await??;
1865
1866 Ok(proto::GetRemotesResponse {
1867 remotes: remotes
1868 .into_iter()
1869 .map(|remotes| proto::get_remotes_response::Remote {
1870 name: remotes.name.to_string(),
1871 })
1872 .collect::<Vec<_>>(),
1873 })
1874 }
1875
1876 async fn handle_get_branches(
1877 this: Entity<Self>,
1878 envelope: TypedEnvelope<proto::GitGetBranches>,
1879 mut cx: AsyncApp,
1880 ) -> Result<proto::GitBranchesResponse> {
1881 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1882 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1883
1884 let branches = repository_handle
1885 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1886 .await??;
1887
1888 Ok(proto::GitBranchesResponse {
1889 branches: branches
1890 .into_iter()
1891 .map(|branch| branch_to_proto(&branch))
1892 .collect::<Vec<_>>(),
1893 })
1894 }
1895 async fn handle_get_default_branch(
1896 this: Entity<Self>,
1897 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1898 mut cx: AsyncApp,
1899 ) -> Result<proto::GetDefaultBranchResponse> {
1900 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1901 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1902
1903 let branch = repository_handle
1904 .update(&mut cx, |repository_handle, _| {
1905 repository_handle.default_branch()
1906 })?
1907 .await??
1908 .map(Into::into);
1909
1910 Ok(proto::GetDefaultBranchResponse { branch })
1911 }
1912 async fn handle_create_branch(
1913 this: Entity<Self>,
1914 envelope: TypedEnvelope<proto::GitCreateBranch>,
1915 mut cx: AsyncApp,
1916 ) -> Result<proto::Ack> {
1917 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1918 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1919 let branch_name = envelope.payload.branch_name;
1920
1921 repository_handle
1922 .update(&mut cx, |repository_handle, _| {
1923 repository_handle.create_branch(branch_name)
1924 })?
1925 .await??;
1926
1927 Ok(proto::Ack {})
1928 }
1929
1930 async fn handle_change_branch(
1931 this: Entity<Self>,
1932 envelope: TypedEnvelope<proto::GitChangeBranch>,
1933 mut cx: AsyncApp,
1934 ) -> Result<proto::Ack> {
1935 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1936 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1937 let branch_name = envelope.payload.branch_name;
1938
1939 repository_handle
1940 .update(&mut cx, |repository_handle, _| {
1941 repository_handle.change_branch(branch_name)
1942 })?
1943 .await??;
1944
1945 Ok(proto::Ack {})
1946 }
1947
1948 async fn handle_rename_branch(
1949 this: Entity<Self>,
1950 envelope: TypedEnvelope<proto::GitRenameBranch>,
1951 mut cx: AsyncApp,
1952 ) -> Result<proto::Ack> {
1953 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1954 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1955 let branch = envelope.payload.branch;
1956 let new_name = envelope.payload.new_name;
1957
1958 repository_handle
1959 .update(&mut cx, |repository_handle, _| {
1960 repository_handle.rename_branch(branch, new_name)
1961 })?
1962 .await??;
1963
1964 Ok(proto::Ack {})
1965 }
1966
1967 async fn handle_show(
1968 this: Entity<Self>,
1969 envelope: TypedEnvelope<proto::GitShow>,
1970 mut cx: AsyncApp,
1971 ) -> Result<proto::GitCommitDetails> {
1972 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1973 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1974
1975 let commit = repository_handle
1976 .update(&mut cx, |repository_handle, _| {
1977 repository_handle.show(envelope.payload.commit)
1978 })?
1979 .await??;
1980 Ok(proto::GitCommitDetails {
1981 sha: commit.sha.into(),
1982 message: commit.message.into(),
1983 commit_timestamp: commit.commit_timestamp,
1984 author_email: commit.author_email.into(),
1985 author_name: commit.author_name.into(),
1986 })
1987 }
1988
1989 async fn handle_load_commit_diff(
1990 this: Entity<Self>,
1991 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1992 mut cx: AsyncApp,
1993 ) -> Result<proto::LoadCommitDiffResponse> {
1994 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1995 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1996
1997 let commit_diff = repository_handle
1998 .update(&mut cx, |repository_handle, _| {
1999 repository_handle.load_commit_diff(envelope.payload.commit)
2000 })?
2001 .await??;
2002 Ok(proto::LoadCommitDiffResponse {
2003 files: commit_diff
2004 .files
2005 .into_iter()
2006 .map(|file| proto::CommitFile {
2007 path: file.path.to_string(),
2008 old_text: file.old_text,
2009 new_text: file.new_text,
2010 })
2011 .collect(),
2012 })
2013 }
2014
2015 async fn handle_reset(
2016 this: Entity<Self>,
2017 envelope: TypedEnvelope<proto::GitReset>,
2018 mut cx: AsyncApp,
2019 ) -> Result<proto::Ack> {
2020 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2021 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2022
2023 let mode = match envelope.payload.mode() {
2024 git_reset::ResetMode::Soft => ResetMode::Soft,
2025 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2026 };
2027
2028 repository_handle
2029 .update(&mut cx, |repository_handle, cx| {
2030 repository_handle.reset(envelope.payload.commit, mode, cx)
2031 })?
2032 .await??;
2033 Ok(proto::Ack {})
2034 }
2035
2036 async fn handle_checkout_files(
2037 this: Entity<Self>,
2038 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2039 mut cx: AsyncApp,
2040 ) -> Result<proto::Ack> {
2041 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2042 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2043 let paths = envelope
2044 .payload
2045 .paths
2046 .iter()
2047 .map(|s| RepoPath::from_str(s))
2048 .collect();
2049
2050 repository_handle
2051 .update(&mut cx, |repository_handle, cx| {
2052 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2053 })?
2054 .await??;
2055 Ok(proto::Ack {})
2056 }
2057
2058 async fn handle_open_commit_message_buffer(
2059 this: Entity<Self>,
2060 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2061 mut cx: AsyncApp,
2062 ) -> Result<proto::OpenBufferResponse> {
2063 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2064 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2065 let buffer = repository
2066 .update(&mut cx, |repository, cx| {
2067 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2068 })?
2069 .await?;
2070
2071 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2072 this.update(&mut cx, |this, cx| {
2073 this.buffer_store.update(cx, |buffer_store, cx| {
2074 buffer_store
2075 .create_buffer_for_peer(
2076 &buffer,
2077 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2078 cx,
2079 )
2080 .detach_and_log_err(cx);
2081 })
2082 })?;
2083
2084 Ok(proto::OpenBufferResponse {
2085 buffer_id: buffer_id.to_proto(),
2086 })
2087 }
2088
2089 async fn handle_askpass(
2090 this: Entity<Self>,
2091 envelope: TypedEnvelope<proto::AskPassRequest>,
2092 mut cx: AsyncApp,
2093 ) -> Result<proto::AskPassResponse> {
2094 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2095 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2096
2097 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2098 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2099 debug_panic!("no askpass found");
2100 anyhow::bail!("no askpass found");
2101 };
2102
2103 let response = askpass.ask_password(envelope.payload.prompt).await?;
2104
2105 delegates
2106 .lock()
2107 .insert(envelope.payload.askpass_id, askpass);
2108
2109 Ok(proto::AskPassResponse { response })
2110 }
2111
2112 async fn handle_check_for_pushed_commits(
2113 this: Entity<Self>,
2114 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2115 mut cx: AsyncApp,
2116 ) -> Result<proto::CheckForPushedCommitsResponse> {
2117 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2118 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2119
2120 let branches = repository_handle
2121 .update(&mut cx, |repository_handle, _| {
2122 repository_handle.check_for_pushed_commits()
2123 })?
2124 .await??;
2125 Ok(proto::CheckForPushedCommitsResponse {
2126 pushed_to: branches
2127 .into_iter()
2128 .map(|commit| commit.to_string())
2129 .collect(),
2130 })
2131 }
2132
2133 async fn handle_git_diff(
2134 this: Entity<Self>,
2135 envelope: TypedEnvelope<proto::GitDiff>,
2136 mut cx: AsyncApp,
2137 ) -> Result<proto::GitDiffResponse> {
2138 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2139 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2140 let diff_type = match envelope.payload.diff_type() {
2141 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2142 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2143 };
2144
2145 let mut diff = repository_handle
2146 .update(&mut cx, |repository_handle, cx| {
2147 repository_handle.diff(diff_type, cx)
2148 })?
2149 .await??;
2150 const ONE_MB: usize = 1_000_000;
2151 if diff.len() > ONE_MB {
2152 diff = diff.chars().take(ONE_MB).collect()
2153 }
2154
2155 Ok(proto::GitDiffResponse { diff })
2156 }
2157
2158 async fn handle_open_unstaged_diff(
2159 this: Entity<Self>,
2160 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2161 mut cx: AsyncApp,
2162 ) -> Result<proto::OpenUnstagedDiffResponse> {
2163 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2164 let diff = this
2165 .update(&mut cx, |this, cx| {
2166 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2167 Some(this.open_unstaged_diff(buffer, cx))
2168 })?
2169 .context("missing buffer")?
2170 .await?;
2171 this.update(&mut cx, |this, _| {
2172 let shared_diffs = this
2173 .shared_diffs
2174 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2175 .or_default();
2176 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2177 })?;
2178 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2179 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2180 }
2181
2182 async fn handle_open_uncommitted_diff(
2183 this: Entity<Self>,
2184 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2185 mut cx: AsyncApp,
2186 ) -> Result<proto::OpenUncommittedDiffResponse> {
2187 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2188 let diff = this
2189 .update(&mut cx, |this, cx| {
2190 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2191 Some(this.open_uncommitted_diff(buffer, cx))
2192 })?
2193 .context("missing buffer")?
2194 .await?;
2195 this.update(&mut cx, |this, _| {
2196 let shared_diffs = this
2197 .shared_diffs
2198 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2199 .or_default();
2200 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2201 })?;
2202 diff.read_with(&cx, |diff, cx| {
2203 use proto::open_uncommitted_diff_response::Mode;
2204
2205 let unstaged_diff = diff.secondary_diff();
2206 let index_snapshot = unstaged_diff.and_then(|diff| {
2207 let diff = diff.read(cx);
2208 diff.base_text_exists().then(|| diff.base_text())
2209 });
2210
2211 let mode;
2212 let staged_text;
2213 let committed_text;
2214 if diff.base_text_exists() {
2215 let committed_snapshot = diff.base_text();
2216 committed_text = Some(committed_snapshot.text());
2217 if let Some(index_text) = index_snapshot {
2218 if index_text.remote_id() == committed_snapshot.remote_id() {
2219 mode = Mode::IndexMatchesHead;
2220 staged_text = None;
2221 } else {
2222 mode = Mode::IndexAndHead;
2223 staged_text = Some(index_text.text());
2224 }
2225 } else {
2226 mode = Mode::IndexAndHead;
2227 staged_text = None;
2228 }
2229 } else {
2230 mode = Mode::IndexAndHead;
2231 committed_text = None;
2232 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2233 }
2234
2235 proto::OpenUncommittedDiffResponse {
2236 committed_text,
2237 staged_text,
2238 mode: mode.into(),
2239 }
2240 })
2241 }
2242
2243 async fn handle_update_diff_bases(
2244 this: Entity<Self>,
2245 request: TypedEnvelope<proto::UpdateDiffBases>,
2246 mut cx: AsyncApp,
2247 ) -> Result<()> {
2248 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2249 this.update(&mut cx, |this, cx| {
2250 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2251 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2252 {
2253 let buffer = buffer.read(cx).text_snapshot();
2254 diff_state.update(cx, |diff_state, cx| {
2255 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2256 })
2257 }
2258 })
2259 }
2260
2261 async fn handle_blame_buffer(
2262 this: Entity<Self>,
2263 envelope: TypedEnvelope<proto::BlameBuffer>,
2264 mut cx: AsyncApp,
2265 ) -> Result<proto::BlameBufferResponse> {
2266 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2267 let version = deserialize_version(&envelope.payload.version);
2268 let buffer = this.read_with(&cx, |this, cx| {
2269 this.buffer_store.read(cx).get_existing(buffer_id)
2270 })??;
2271 buffer
2272 .update(&mut cx, |buffer, _| {
2273 buffer.wait_for_version(version.clone())
2274 })?
2275 .await?;
2276 let blame = this
2277 .update(&mut cx, |this, cx| {
2278 this.blame_buffer(&buffer, Some(version), cx)
2279 })?
2280 .await?;
2281 Ok(serialize_blame_buffer_response(blame))
2282 }
2283
2284 async fn handle_get_permalink_to_line(
2285 this: Entity<Self>,
2286 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2287 mut cx: AsyncApp,
2288 ) -> Result<proto::GetPermalinkToLineResponse> {
2289 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2290 // let version = deserialize_version(&envelope.payload.version);
2291 let selection = {
2292 let proto_selection = envelope
2293 .payload
2294 .selection
2295 .context("no selection to get permalink for defined")?;
2296 proto_selection.start as u32..proto_selection.end as u32
2297 };
2298 let buffer = this.read_with(&cx, |this, cx| {
2299 this.buffer_store.read(cx).get_existing(buffer_id)
2300 })??;
2301 let permalink = this
2302 .update(&mut cx, |this, cx| {
2303 this.get_permalink_to_line(&buffer, selection, cx)
2304 })?
2305 .await?;
2306 Ok(proto::GetPermalinkToLineResponse {
2307 permalink: permalink.to_string(),
2308 })
2309 }
2310
2311 fn repository_for_request(
2312 this: &Entity<Self>,
2313 id: RepositoryId,
2314 cx: &mut AsyncApp,
2315 ) -> Result<Entity<Repository>> {
2316 this.read_with(cx, |this, _| {
2317 this.repositories
2318 .get(&id)
2319 .context("missing repository handle")
2320 .cloned()
2321 })?
2322 }
2323
2324 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2325 self.repositories
2326 .iter()
2327 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2328 .collect()
2329 }
2330
2331 fn process_updated_entries(
2332 &self,
2333 worktree: &Entity<Worktree>,
2334 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2335 cx: &mut App,
2336 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2337 let mut repo_paths = self
2338 .repositories
2339 .values()
2340 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2341 .collect::<Vec<_>>();
2342 let mut entries: Vec<_> = updated_entries
2343 .iter()
2344 .map(|(path, _, _)| path.clone())
2345 .collect();
2346 entries.sort();
2347 let worktree = worktree.read(cx);
2348
2349 let entries = entries
2350 .into_iter()
2351 .filter_map(|path| worktree.absolutize(&path).ok())
2352 .collect::<Arc<[_]>>();
2353
2354 let executor = cx.background_executor().clone();
2355 cx.background_executor().spawn(async move {
2356 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2357 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2358 let mut tasks = FuturesOrdered::new();
2359 for (repo_path, repo) in repo_paths.into_iter().rev() {
2360 let entries = entries.clone();
2361 let task = executor.spawn(async move {
2362 // Find all repository paths that belong to this repo
2363 let mut ix = entries.partition_point(|path| path < &*repo_path);
2364 if ix == entries.len() {
2365 return None;
2366 };
2367
2368 let mut paths = Vec::new();
2369 // All paths prefixed by a given repo will constitute a continuous range.
2370 while let Some(path) = entries.get(ix)
2371 && let Some(repo_path) =
2372 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, path)
2373 {
2374 paths.push((repo_path, ix));
2375 ix += 1;
2376 }
2377 if paths.is_empty() {
2378 None
2379 } else {
2380 Some((repo, paths))
2381 }
2382 });
2383 tasks.push_back(task);
2384 }
2385
2386 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2387 let mut path_was_used = vec![false; entries.len()];
2388 let tasks = tasks.collect::<Vec<_>>().await;
2389 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2390 // We always want to assign a path to it's innermost repository.
2391 for t in tasks {
2392 let Some((repo, paths)) = t else {
2393 continue;
2394 };
2395 let entry = paths_by_git_repo.entry(repo).or_default();
2396 for (repo_path, ix) in paths {
2397 if path_was_used[ix] {
2398 continue;
2399 }
2400 path_was_used[ix] = true;
2401 entry.push(repo_path);
2402 }
2403 }
2404
2405 paths_by_git_repo
2406 })
2407 }
2408}
2409
2410impl BufferGitState {
2411 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2412 Self {
2413 unstaged_diff: Default::default(),
2414 uncommitted_diff: Default::default(),
2415 recalculate_diff_task: Default::default(),
2416 language: Default::default(),
2417 language_registry: Default::default(),
2418 recalculating_tx: postage::watch::channel_with(false).0,
2419 hunk_staging_operation_count: 0,
2420 hunk_staging_operation_count_as_of_write: 0,
2421 head_text: Default::default(),
2422 index_text: Default::default(),
2423 head_changed: Default::default(),
2424 index_changed: Default::default(),
2425 language_changed: Default::default(),
2426 conflict_updated_futures: Default::default(),
2427 conflict_set: Default::default(),
2428 reparse_conflict_markers_task: Default::default(),
2429 }
2430 }
2431
2432 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2433 self.language = buffer.read(cx).language().cloned();
2434 self.language_changed = true;
2435 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2436 }
2437
2438 fn reparse_conflict_markers(
2439 &mut self,
2440 buffer: text::BufferSnapshot,
2441 cx: &mut Context<Self>,
2442 ) -> oneshot::Receiver<()> {
2443 let (tx, rx) = oneshot::channel();
2444
2445 let Some(conflict_set) = self
2446 .conflict_set
2447 .as_ref()
2448 .and_then(|conflict_set| conflict_set.upgrade())
2449 else {
2450 return rx;
2451 };
2452
2453 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2454 if conflict_set.has_conflict {
2455 Some(conflict_set.snapshot())
2456 } else {
2457 None
2458 }
2459 });
2460
2461 if let Some(old_snapshot) = old_snapshot {
2462 self.conflict_updated_futures.push(tx);
2463 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2464 let (snapshot, changed_range) = cx
2465 .background_spawn(async move {
2466 let new_snapshot = ConflictSet::parse(&buffer);
2467 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2468 (new_snapshot, changed_range)
2469 })
2470 .await;
2471 this.update(cx, |this, cx| {
2472 if let Some(conflict_set) = &this.conflict_set {
2473 conflict_set
2474 .update(cx, |conflict_set, cx| {
2475 conflict_set.set_snapshot(snapshot, changed_range, cx);
2476 })
2477 .ok();
2478 }
2479 let futures = std::mem::take(&mut this.conflict_updated_futures);
2480 for tx in futures {
2481 tx.send(()).ok();
2482 }
2483 })
2484 }))
2485 }
2486
2487 rx
2488 }
2489
2490 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2491 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2492 }
2493
2494 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2495 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2496 }
2497
2498 fn handle_base_texts_updated(
2499 &mut self,
2500 buffer: text::BufferSnapshot,
2501 message: proto::UpdateDiffBases,
2502 cx: &mut Context<Self>,
2503 ) {
2504 use proto::update_diff_bases::Mode;
2505
2506 let Some(mode) = Mode::from_i32(message.mode) else {
2507 return;
2508 };
2509
2510 let diff_bases_change = match mode {
2511 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2512 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2513 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2514 Mode::IndexAndHead => DiffBasesChange::SetEach {
2515 index: message.staged_text,
2516 head: message.committed_text,
2517 },
2518 };
2519
2520 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2521 }
2522
2523 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2524 if *self.recalculating_tx.borrow() {
2525 let mut rx = self.recalculating_tx.subscribe();
2526 Some(async move {
2527 loop {
2528 let is_recalculating = rx.recv().await;
2529 if is_recalculating != Some(true) {
2530 break;
2531 }
2532 }
2533 })
2534 } else {
2535 None
2536 }
2537 }
2538
2539 fn diff_bases_changed(
2540 &mut self,
2541 buffer: text::BufferSnapshot,
2542 diff_bases_change: Option<DiffBasesChange>,
2543 cx: &mut Context<Self>,
2544 ) {
2545 match diff_bases_change {
2546 Some(DiffBasesChange::SetIndex(index)) => {
2547 self.index_text = index.map(|mut index| {
2548 text::LineEnding::normalize(&mut index);
2549 Arc::new(index)
2550 });
2551 self.index_changed = true;
2552 }
2553 Some(DiffBasesChange::SetHead(head)) => {
2554 self.head_text = head.map(|mut head| {
2555 text::LineEnding::normalize(&mut head);
2556 Arc::new(head)
2557 });
2558 self.head_changed = true;
2559 }
2560 Some(DiffBasesChange::SetBoth(text)) => {
2561 let text = text.map(|mut text| {
2562 text::LineEnding::normalize(&mut text);
2563 Arc::new(text)
2564 });
2565 self.head_text = text.clone();
2566 self.index_text = text;
2567 self.head_changed = true;
2568 self.index_changed = true;
2569 }
2570 Some(DiffBasesChange::SetEach { index, head }) => {
2571 self.index_text = index.map(|mut index| {
2572 text::LineEnding::normalize(&mut index);
2573 Arc::new(index)
2574 });
2575 self.index_changed = true;
2576 self.head_text = head.map(|mut head| {
2577 text::LineEnding::normalize(&mut head);
2578 Arc::new(head)
2579 });
2580 self.head_changed = true;
2581 }
2582 None => {}
2583 }
2584
2585 self.recalculate_diffs(buffer, cx)
2586 }
2587
2588 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2589 *self.recalculating_tx.borrow_mut() = true;
2590
2591 let language = self.language.clone();
2592 let language_registry = self.language_registry.clone();
2593 let unstaged_diff = self.unstaged_diff();
2594 let uncommitted_diff = self.uncommitted_diff();
2595 let head = self.head_text.clone();
2596 let index = self.index_text.clone();
2597 let index_changed = self.index_changed;
2598 let head_changed = self.head_changed;
2599 let language_changed = self.language_changed;
2600 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2601 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2602 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2603 (None, None) => true,
2604 _ => false,
2605 };
2606 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2607 log::debug!(
2608 "start recalculating diffs for buffer {}",
2609 buffer.remote_id()
2610 );
2611
2612 let mut new_unstaged_diff = None;
2613 if let Some(unstaged_diff) = &unstaged_diff {
2614 new_unstaged_diff = Some(
2615 BufferDiff::update_diff(
2616 unstaged_diff.clone(),
2617 buffer.clone(),
2618 index,
2619 index_changed,
2620 language_changed,
2621 language.clone(),
2622 language_registry.clone(),
2623 cx,
2624 )
2625 .await?,
2626 );
2627 }
2628
2629 let mut new_uncommitted_diff = None;
2630 if let Some(uncommitted_diff) = &uncommitted_diff {
2631 new_uncommitted_diff = if index_matches_head {
2632 new_unstaged_diff.clone()
2633 } else {
2634 Some(
2635 BufferDiff::update_diff(
2636 uncommitted_diff.clone(),
2637 buffer.clone(),
2638 head,
2639 head_changed,
2640 language_changed,
2641 language.clone(),
2642 language_registry.clone(),
2643 cx,
2644 )
2645 .await?,
2646 )
2647 }
2648 }
2649
2650 let cancel = this.update(cx, |this, _| {
2651 // This checks whether all pending stage/unstage operations
2652 // have quiesced (i.e. both the corresponding write and the
2653 // read of that write have completed). If not, then we cancel
2654 // this recalculation attempt to avoid invalidating pending
2655 // state too quickly; another recalculation will come along
2656 // later and clear the pending state once the state of the index has settled.
2657 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2658 *this.recalculating_tx.borrow_mut() = false;
2659 true
2660 } else {
2661 false
2662 }
2663 })?;
2664 if cancel {
2665 log::debug!(
2666 concat!(
2667 "aborting recalculating diffs for buffer {}",
2668 "due to subsequent hunk operations",
2669 ),
2670 buffer.remote_id()
2671 );
2672 return Ok(());
2673 }
2674
2675 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2676 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2677 {
2678 unstaged_diff.update(cx, |diff, cx| {
2679 if language_changed {
2680 diff.language_changed(cx);
2681 }
2682 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2683 })?
2684 } else {
2685 None
2686 };
2687
2688 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2689 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2690 {
2691 uncommitted_diff.update(cx, |diff, cx| {
2692 if language_changed {
2693 diff.language_changed(cx);
2694 }
2695 diff.set_snapshot_with_secondary(
2696 new_uncommitted_diff,
2697 &buffer,
2698 unstaged_changed_range,
2699 true,
2700 cx,
2701 );
2702 })?;
2703 }
2704
2705 log::debug!(
2706 "finished recalculating diffs for buffer {}",
2707 buffer.remote_id()
2708 );
2709
2710 if let Some(this) = this.upgrade() {
2711 this.update(cx, |this, _| {
2712 this.index_changed = false;
2713 this.head_changed = false;
2714 this.language_changed = false;
2715 *this.recalculating_tx.borrow_mut() = false;
2716 })?;
2717 }
2718
2719 Ok(())
2720 }));
2721 }
2722}
2723
2724fn make_remote_delegate(
2725 this: Entity<GitStore>,
2726 project_id: u64,
2727 repository_id: RepositoryId,
2728 askpass_id: u64,
2729 cx: &mut AsyncApp,
2730) -> AskPassDelegate {
2731 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2732 this.update(cx, |this, cx| {
2733 let Some((client, _)) = this.downstream_client() else {
2734 return;
2735 };
2736 let response = client.request(proto::AskPassRequest {
2737 project_id,
2738 repository_id: repository_id.to_proto(),
2739 askpass_id,
2740 prompt,
2741 });
2742 cx.spawn(async move |_, _| {
2743 tx.send(response.await?.response).ok();
2744 anyhow::Ok(())
2745 })
2746 .detach_and_log_err(cx);
2747 })
2748 .log_err();
2749 })
2750}
2751
2752impl RepositoryId {
2753 pub fn to_proto(self) -> u64 {
2754 self.0
2755 }
2756
2757 pub fn from_proto(id: u64) -> Self {
2758 RepositoryId(id)
2759 }
2760}
2761
2762impl RepositorySnapshot {
2763 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2764 Self {
2765 id,
2766 statuses_by_path: Default::default(),
2767 work_directory_abs_path,
2768 branch: None,
2769 head_commit: None,
2770 scan_id: 0,
2771 merge: Default::default(),
2772 remote_origin_url: None,
2773 remote_upstream_url: None,
2774 stash_entries: Default::default(),
2775 }
2776 }
2777
2778 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2779 proto::UpdateRepository {
2780 branch_summary: self.branch.as_ref().map(branch_to_proto),
2781 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2782 updated_statuses: self
2783 .statuses_by_path
2784 .iter()
2785 .map(|entry| entry.to_proto())
2786 .collect(),
2787 removed_statuses: Default::default(),
2788 current_merge_conflicts: self
2789 .merge
2790 .conflicted_paths
2791 .iter()
2792 .map(|repo_path| repo_path.to_proto())
2793 .collect(),
2794 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2795 project_id,
2796 id: self.id.to_proto(),
2797 abs_path: self.work_directory_abs_path.to_proto(),
2798 entry_ids: vec![self.id.to_proto()],
2799 scan_id: self.scan_id,
2800 is_last_update: true,
2801 stash_entries: self
2802 .stash_entries
2803 .entries
2804 .iter()
2805 .map(stash_to_proto)
2806 .collect(),
2807 }
2808 }
2809
2810 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2811 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2812 let mut removed_statuses: Vec<String> = Vec::new();
2813
2814 let mut new_statuses = self.statuses_by_path.iter().peekable();
2815 let mut old_statuses = old.statuses_by_path.iter().peekable();
2816
2817 let mut current_new_entry = new_statuses.next();
2818 let mut current_old_entry = old_statuses.next();
2819 loop {
2820 match (current_new_entry, current_old_entry) {
2821 (Some(new_entry), Some(old_entry)) => {
2822 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2823 Ordering::Less => {
2824 updated_statuses.push(new_entry.to_proto());
2825 current_new_entry = new_statuses.next();
2826 }
2827 Ordering::Equal => {
2828 if new_entry.status != old_entry.status {
2829 updated_statuses.push(new_entry.to_proto());
2830 }
2831 current_old_entry = old_statuses.next();
2832 current_new_entry = new_statuses.next();
2833 }
2834 Ordering::Greater => {
2835 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2836 current_old_entry = old_statuses.next();
2837 }
2838 }
2839 }
2840 (None, Some(old_entry)) => {
2841 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2842 current_old_entry = old_statuses.next();
2843 }
2844 (Some(new_entry), None) => {
2845 updated_statuses.push(new_entry.to_proto());
2846 current_new_entry = new_statuses.next();
2847 }
2848 (None, None) => break,
2849 }
2850 }
2851
2852 proto::UpdateRepository {
2853 branch_summary: self.branch.as_ref().map(branch_to_proto),
2854 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2855 updated_statuses,
2856 removed_statuses,
2857 current_merge_conflicts: self
2858 .merge
2859 .conflicted_paths
2860 .iter()
2861 .map(|path| path.as_ref().to_proto())
2862 .collect(),
2863 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2864 project_id,
2865 id: self.id.to_proto(),
2866 abs_path: self.work_directory_abs_path.to_proto(),
2867 entry_ids: vec![],
2868 scan_id: self.scan_id,
2869 is_last_update: true,
2870 stash_entries: self
2871 .stash_entries
2872 .entries
2873 .iter()
2874 .map(stash_to_proto)
2875 .collect(),
2876 }
2877 }
2878
2879 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2880 self.statuses_by_path.iter().cloned()
2881 }
2882
2883 pub fn status_summary(&self) -> GitSummary {
2884 self.statuses_by_path.summary().item_summary
2885 }
2886
2887 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2888 self.statuses_by_path
2889 .get(&PathKey(path.0.clone()), &())
2890 .cloned()
2891 }
2892
2893 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2894 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2895 }
2896
2897 #[inline]
2898 fn abs_path_to_repo_path_inner(
2899 work_directory_abs_path: &Path,
2900 abs_path: &Path,
2901 ) -> Option<RepoPath> {
2902 abs_path
2903 .strip_prefix(&work_directory_abs_path)
2904 .map(RepoPath::from)
2905 .ok()
2906 }
2907
2908 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2909 self.merge.conflicted_paths.contains(repo_path)
2910 }
2911
2912 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2913 let had_conflict_on_last_merge_head_change =
2914 self.merge.conflicted_paths.contains(repo_path);
2915 let has_conflict_currently = self
2916 .status_for_path(repo_path)
2917 .is_some_and(|entry| entry.status.is_conflicted());
2918 had_conflict_on_last_merge_head_change || has_conflict_currently
2919 }
2920
2921 /// This is the name that will be displayed in the repository selector for this repository.
2922 pub fn display_name(&self) -> SharedString {
2923 self.work_directory_abs_path
2924 .file_name()
2925 .unwrap_or_default()
2926 .to_string_lossy()
2927 .to_string()
2928 .into()
2929 }
2930}
2931
2932pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2933 proto::StashEntry {
2934 oid: entry.oid.as_bytes().to_vec(),
2935 message: entry.message.clone(),
2936 branch: entry.branch.clone(),
2937 index: entry.index as u64,
2938 timestamp: entry.timestamp,
2939 }
2940}
2941
2942pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2943 Ok(StashEntry {
2944 oid: Oid::from_bytes(&entry.oid)?,
2945 message: entry.message.clone(),
2946 index: entry.index as usize,
2947 branch: entry.branch.clone(),
2948 timestamp: entry.timestamp,
2949 })
2950}
2951
2952impl MergeDetails {
2953 async fn load(
2954 backend: &Arc<dyn GitRepository>,
2955 status: &SumTree<StatusEntry>,
2956 prev_snapshot: &RepositorySnapshot,
2957 ) -> Result<(MergeDetails, bool)> {
2958 log::debug!("load merge details");
2959 let message = backend.merge_message().await;
2960 let heads = backend
2961 .revparse_batch(vec![
2962 "MERGE_HEAD".into(),
2963 "CHERRY_PICK_HEAD".into(),
2964 "REBASE_HEAD".into(),
2965 "REVERT_HEAD".into(),
2966 "APPLY_HEAD".into(),
2967 ])
2968 .await
2969 .log_err()
2970 .unwrap_or_default()
2971 .into_iter()
2972 .map(|opt| opt.map(SharedString::from))
2973 .collect::<Vec<_>>();
2974 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2975 let conflicted_paths = if merge_heads_changed {
2976 let current_conflicted_paths = TreeSet::from_ordered_entries(
2977 status
2978 .iter()
2979 .filter(|entry| entry.status.is_conflicted())
2980 .map(|entry| entry.repo_path.clone()),
2981 );
2982
2983 // It can happen that we run a scan while a lengthy merge is in progress
2984 // that will eventually result in conflicts, but before those conflicts
2985 // are reported by `git status`. Since for the moment we only care about
2986 // the merge heads state for the purposes of tracking conflicts, don't update
2987 // this state until we see some conflicts.
2988 if heads.iter().any(Option::is_some)
2989 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2990 && current_conflicted_paths.is_empty()
2991 {
2992 log::debug!("not updating merge heads because no conflicts found");
2993 return Ok((
2994 MergeDetails {
2995 message: message.map(SharedString::from),
2996 ..prev_snapshot.merge.clone()
2997 },
2998 false,
2999 ));
3000 }
3001
3002 current_conflicted_paths
3003 } else {
3004 prev_snapshot.merge.conflicted_paths.clone()
3005 };
3006 let details = MergeDetails {
3007 conflicted_paths,
3008 message: message.map(SharedString::from),
3009 heads,
3010 };
3011 Ok((details, merge_heads_changed))
3012 }
3013}
3014
3015impl Repository {
3016 pub fn snapshot(&self) -> RepositorySnapshot {
3017 self.snapshot.clone()
3018 }
3019
3020 fn local(
3021 id: RepositoryId,
3022 work_directory_abs_path: Arc<Path>,
3023 dot_git_abs_path: Arc<Path>,
3024 repository_dir_abs_path: Arc<Path>,
3025 common_dir_abs_path: Arc<Path>,
3026 project_environment: WeakEntity<ProjectEnvironment>,
3027 fs: Arc<dyn Fs>,
3028 git_store: WeakEntity<GitStore>,
3029 cx: &mut Context<Self>,
3030 ) -> Self {
3031 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
3032 Repository {
3033 this: cx.weak_entity(),
3034 git_store,
3035 snapshot,
3036 commit_message_buffer: None,
3037 askpass_delegates: Default::default(),
3038 paths_needing_status_update: Default::default(),
3039 latest_askpass_id: 0,
3040 job_sender: Repository::spawn_local_git_worker(
3041 work_directory_abs_path,
3042 dot_git_abs_path,
3043 repository_dir_abs_path,
3044 common_dir_abs_path,
3045 project_environment,
3046 fs,
3047 cx,
3048 ),
3049 job_id: 0,
3050 active_jobs: Default::default(),
3051 }
3052 }
3053
3054 fn remote(
3055 id: RepositoryId,
3056 work_directory_abs_path: Arc<Path>,
3057 project_id: ProjectId,
3058 client: AnyProtoClient,
3059 git_store: WeakEntity<GitStore>,
3060 cx: &mut Context<Self>,
3061 ) -> Self {
3062 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
3063 Self {
3064 this: cx.weak_entity(),
3065 snapshot,
3066 commit_message_buffer: None,
3067 git_store,
3068 paths_needing_status_update: Default::default(),
3069 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3070 askpass_delegates: Default::default(),
3071 latest_askpass_id: 0,
3072 active_jobs: Default::default(),
3073 job_id: 0,
3074 }
3075 }
3076
3077 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3078 self.git_store.upgrade()
3079 }
3080
3081 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3082 let this = cx.weak_entity();
3083 let git_store = self.git_store.clone();
3084 let _ = self.send_keyed_job(
3085 Some(GitJobKey::ReloadBufferDiffBases),
3086 None,
3087 |state, mut cx| async move {
3088 let RepositoryState::Local { backend, .. } = state else {
3089 log::error!("tried to recompute diffs for a non-local repository");
3090 return Ok(());
3091 };
3092
3093 let Some(this) = this.upgrade() else {
3094 return Ok(());
3095 };
3096
3097 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3098 git_store.update(cx, |git_store, cx| {
3099 git_store
3100 .diffs
3101 .iter()
3102 .filter_map(|(buffer_id, diff_state)| {
3103 let buffer_store = git_store.buffer_store.read(cx);
3104 let buffer = buffer_store.get(*buffer_id)?;
3105 let file = File::from_dyn(buffer.read(cx).file())?;
3106 let abs_path =
3107 file.worktree.read(cx).absolutize(&file.path).ok()?;
3108 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3109 log::debug!(
3110 "start reload diff bases for repo path {}",
3111 repo_path.0.display()
3112 );
3113 diff_state.update(cx, |diff_state, _| {
3114 let has_unstaged_diff = diff_state
3115 .unstaged_diff
3116 .as_ref()
3117 .is_some_and(|diff| diff.is_upgradable());
3118 let has_uncommitted_diff = diff_state
3119 .uncommitted_diff
3120 .as_ref()
3121 .is_some_and(|set| set.is_upgradable());
3122
3123 Some((
3124 buffer,
3125 repo_path,
3126 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3127 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3128 ))
3129 })
3130 })
3131 .collect::<Vec<_>>()
3132 })
3133 })??;
3134
3135 let buffer_diff_base_changes = cx
3136 .background_spawn(async move {
3137 let mut changes = Vec::new();
3138 for (buffer, repo_path, current_index_text, current_head_text) in
3139 &repo_diff_state_updates
3140 {
3141 let index_text = if current_index_text.is_some() {
3142 backend.load_index_text(repo_path.clone()).await
3143 } else {
3144 None
3145 };
3146 let head_text = if current_head_text.is_some() {
3147 backend.load_committed_text(repo_path.clone()).await
3148 } else {
3149 None
3150 };
3151
3152 let change =
3153 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3154 (Some(current_index), Some(current_head)) => {
3155 let index_changed =
3156 index_text.as_ref() != current_index.as_deref();
3157 let head_changed =
3158 head_text.as_ref() != current_head.as_deref();
3159 if index_changed && head_changed {
3160 if index_text == head_text {
3161 Some(DiffBasesChange::SetBoth(head_text))
3162 } else {
3163 Some(DiffBasesChange::SetEach {
3164 index: index_text,
3165 head: head_text,
3166 })
3167 }
3168 } else if index_changed {
3169 Some(DiffBasesChange::SetIndex(index_text))
3170 } else if head_changed {
3171 Some(DiffBasesChange::SetHead(head_text))
3172 } else {
3173 None
3174 }
3175 }
3176 (Some(current_index), None) => {
3177 let index_changed =
3178 index_text.as_ref() != current_index.as_deref();
3179 index_changed
3180 .then_some(DiffBasesChange::SetIndex(index_text))
3181 }
3182 (None, Some(current_head)) => {
3183 let head_changed =
3184 head_text.as_ref() != current_head.as_deref();
3185 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3186 }
3187 (None, None) => None,
3188 };
3189
3190 changes.push((buffer.clone(), change))
3191 }
3192 changes
3193 })
3194 .await;
3195
3196 git_store.update(&mut cx, |git_store, cx| {
3197 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3198 let buffer_snapshot = buffer.read(cx).text_snapshot();
3199 let buffer_id = buffer_snapshot.remote_id();
3200 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3201 continue;
3202 };
3203
3204 let downstream_client = git_store.downstream_client();
3205 diff_state.update(cx, |diff_state, cx| {
3206 use proto::update_diff_bases::Mode;
3207
3208 if let Some((diff_bases_change, (client, project_id))) =
3209 diff_bases_change.clone().zip(downstream_client)
3210 {
3211 let (staged_text, committed_text, mode) = match diff_bases_change {
3212 DiffBasesChange::SetIndex(index) => {
3213 (index, None, Mode::IndexOnly)
3214 }
3215 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3216 DiffBasesChange::SetEach { index, head } => {
3217 (index, head, Mode::IndexAndHead)
3218 }
3219 DiffBasesChange::SetBoth(text) => {
3220 (None, text, Mode::IndexMatchesHead)
3221 }
3222 };
3223 client
3224 .send(proto::UpdateDiffBases {
3225 project_id: project_id.to_proto(),
3226 buffer_id: buffer_id.to_proto(),
3227 staged_text,
3228 committed_text,
3229 mode: mode as i32,
3230 })
3231 .log_err();
3232 }
3233
3234 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3235 });
3236 }
3237 })
3238 },
3239 );
3240 }
3241
3242 pub fn send_job<F, Fut, R>(
3243 &mut self,
3244 status: Option<SharedString>,
3245 job: F,
3246 ) -> oneshot::Receiver<R>
3247 where
3248 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3249 Fut: Future<Output = R> + 'static,
3250 R: Send + 'static,
3251 {
3252 self.send_keyed_job(None, status, job)
3253 }
3254
3255 fn send_keyed_job<F, Fut, R>(
3256 &mut self,
3257 key: Option<GitJobKey>,
3258 status: Option<SharedString>,
3259 job: F,
3260 ) -> oneshot::Receiver<R>
3261 where
3262 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3263 Fut: Future<Output = R> + 'static,
3264 R: Send + 'static,
3265 {
3266 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3267 let job_id = post_inc(&mut self.job_id);
3268 let this = self.this.clone();
3269 self.job_sender
3270 .unbounded_send(GitJob {
3271 key,
3272 job: Box::new(move |state, cx: &mut AsyncApp| {
3273 let job = job(state, cx.clone());
3274 cx.spawn(async move |cx| {
3275 if let Some(s) = status.clone() {
3276 this.update(cx, |this, cx| {
3277 this.active_jobs.insert(
3278 job_id,
3279 JobInfo {
3280 start: Instant::now(),
3281 message: s.clone(),
3282 },
3283 );
3284
3285 cx.notify();
3286 })
3287 .ok();
3288 }
3289 let result = job.await;
3290
3291 this.update(cx, |this, cx| {
3292 this.active_jobs.remove(&job_id);
3293 cx.notify();
3294 })
3295 .ok();
3296
3297 result_tx.send(result).ok();
3298 })
3299 }),
3300 })
3301 .ok();
3302 result_rx
3303 }
3304
3305 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3306 let Some(git_store) = self.git_store.upgrade() else {
3307 return;
3308 };
3309 let entity = cx.entity();
3310 git_store.update(cx, |git_store, cx| {
3311 let Some((&id, _)) = git_store
3312 .repositories
3313 .iter()
3314 .find(|(_, handle)| *handle == &entity)
3315 else {
3316 return;
3317 };
3318 git_store.active_repo_id = Some(id);
3319 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3320 });
3321 }
3322
3323 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3324 self.snapshot.status()
3325 }
3326
3327 pub fn cached_stash(&self) -> GitStash {
3328 self.snapshot.stash_entries.clone()
3329 }
3330
3331 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3332 let git_store = self.git_store.upgrade()?;
3333 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3334 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3335 let abs_path = SanitizedPath::new(&abs_path);
3336 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3337 Some(ProjectPath {
3338 worktree_id: worktree.read(cx).id(),
3339 path: relative_path.into(),
3340 })
3341 }
3342
3343 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3344 let git_store = self.git_store.upgrade()?;
3345 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3346 let abs_path = worktree_store.absolutize(path, cx)?;
3347 self.snapshot.abs_path_to_repo_path(&abs_path)
3348 }
3349
3350 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3351 other
3352 .read(cx)
3353 .snapshot
3354 .work_directory_abs_path
3355 .starts_with(&self.snapshot.work_directory_abs_path)
3356 }
3357
3358 pub fn open_commit_buffer(
3359 &mut self,
3360 languages: Option<Arc<LanguageRegistry>>,
3361 buffer_store: Entity<BufferStore>,
3362 cx: &mut Context<Self>,
3363 ) -> Task<Result<Entity<Buffer>>> {
3364 let id = self.id;
3365 if let Some(buffer) = self.commit_message_buffer.clone() {
3366 return Task::ready(Ok(buffer));
3367 }
3368 let this = cx.weak_entity();
3369
3370 let rx = self.send_job(None, move |state, mut cx| async move {
3371 let Some(this) = this.upgrade() else {
3372 bail!("git store was dropped");
3373 };
3374 match state {
3375 RepositoryState::Local { .. } => {
3376 this.update(&mut cx, |_, cx| {
3377 Self::open_local_commit_buffer(languages, buffer_store, cx)
3378 })?
3379 .await
3380 }
3381 RepositoryState::Remote { project_id, client } => {
3382 let request = client.request(proto::OpenCommitMessageBuffer {
3383 project_id: project_id.0,
3384 repository_id: id.to_proto(),
3385 });
3386 let response = request.await.context("requesting to open commit buffer")?;
3387 let buffer_id = BufferId::new(response.buffer_id)?;
3388 let buffer = buffer_store
3389 .update(&mut cx, |buffer_store, cx| {
3390 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3391 })?
3392 .await?;
3393 if let Some(language_registry) = languages {
3394 let git_commit_language =
3395 language_registry.language_for_name("Git Commit").await?;
3396 buffer.update(&mut cx, |buffer, cx| {
3397 buffer.set_language(Some(git_commit_language), cx);
3398 })?;
3399 }
3400 this.update(&mut cx, |this, _| {
3401 this.commit_message_buffer = Some(buffer.clone());
3402 })?;
3403 Ok(buffer)
3404 }
3405 }
3406 });
3407
3408 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3409 }
3410
3411 fn open_local_commit_buffer(
3412 language_registry: Option<Arc<LanguageRegistry>>,
3413 buffer_store: Entity<BufferStore>,
3414 cx: &mut Context<Self>,
3415 ) -> Task<Result<Entity<Buffer>>> {
3416 cx.spawn(async move |repository, cx| {
3417 let buffer = buffer_store
3418 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3419 .await?;
3420
3421 if let Some(language_registry) = language_registry {
3422 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3423 buffer.update(cx, |buffer, cx| {
3424 buffer.set_language(Some(git_commit_language), cx);
3425 })?;
3426 }
3427
3428 repository.update(cx, |repository, _| {
3429 repository.commit_message_buffer = Some(buffer.clone());
3430 })?;
3431 Ok(buffer)
3432 })
3433 }
3434
3435 pub fn checkout_files(
3436 &mut self,
3437 commit: &str,
3438 paths: Vec<RepoPath>,
3439 _cx: &mut App,
3440 ) -> oneshot::Receiver<Result<()>> {
3441 let commit = commit.to_string();
3442 let id = self.id;
3443
3444 self.send_job(
3445 Some(format!("git checkout {}", commit).into()),
3446 move |git_repo, _| async move {
3447 match git_repo {
3448 RepositoryState::Local {
3449 backend,
3450 environment,
3451 ..
3452 } => {
3453 backend
3454 .checkout_files(commit, paths, environment.clone())
3455 .await
3456 }
3457 RepositoryState::Remote { project_id, client } => {
3458 client
3459 .request(proto::GitCheckoutFiles {
3460 project_id: project_id.0,
3461 repository_id: id.to_proto(),
3462 commit,
3463 paths: paths
3464 .into_iter()
3465 .map(|p| p.to_string_lossy().to_string())
3466 .collect(),
3467 })
3468 .await?;
3469
3470 Ok(())
3471 }
3472 }
3473 },
3474 )
3475 }
3476
3477 pub fn reset(
3478 &mut self,
3479 commit: String,
3480 reset_mode: ResetMode,
3481 _cx: &mut App,
3482 ) -> oneshot::Receiver<Result<()>> {
3483 let id = self.id;
3484
3485 self.send_job(None, move |git_repo, _| async move {
3486 match git_repo {
3487 RepositoryState::Local {
3488 backend,
3489 environment,
3490 ..
3491 } => backend.reset(commit, reset_mode, environment).await,
3492 RepositoryState::Remote { project_id, client } => {
3493 client
3494 .request(proto::GitReset {
3495 project_id: project_id.0,
3496 repository_id: id.to_proto(),
3497 commit,
3498 mode: match reset_mode {
3499 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3500 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3501 },
3502 })
3503 .await?;
3504
3505 Ok(())
3506 }
3507 }
3508 })
3509 }
3510
3511 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3512 let id = self.id;
3513 self.send_job(None, move |git_repo, _cx| async move {
3514 match git_repo {
3515 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3516 RepositoryState::Remote { project_id, client } => {
3517 let resp = client
3518 .request(proto::GitShow {
3519 project_id: project_id.0,
3520 repository_id: id.to_proto(),
3521 commit,
3522 })
3523 .await?;
3524
3525 Ok(CommitDetails {
3526 sha: resp.sha.into(),
3527 message: resp.message.into(),
3528 commit_timestamp: resp.commit_timestamp,
3529 author_email: resp.author_email.into(),
3530 author_name: resp.author_name.into(),
3531 })
3532 }
3533 }
3534 })
3535 }
3536
3537 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3538 let id = self.id;
3539 self.send_job(None, move |git_repo, cx| async move {
3540 match git_repo {
3541 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3542 RepositoryState::Remote {
3543 client, project_id, ..
3544 } => {
3545 let response = client
3546 .request(proto::LoadCommitDiff {
3547 project_id: project_id.0,
3548 repository_id: id.to_proto(),
3549 commit,
3550 })
3551 .await?;
3552 Ok(CommitDiff {
3553 files: response
3554 .files
3555 .into_iter()
3556 .map(|file| CommitFile {
3557 path: Path::new(&file.path).into(),
3558 old_text: file.old_text,
3559 new_text: file.new_text,
3560 })
3561 .collect(),
3562 })
3563 }
3564 }
3565 })
3566 }
3567
3568 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3569 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3570 }
3571
3572 pub fn stage_entries(
3573 &self,
3574 entries: Vec<RepoPath>,
3575 cx: &mut Context<Self>,
3576 ) -> Task<anyhow::Result<()>> {
3577 if entries.is_empty() {
3578 return Task::ready(Ok(()));
3579 }
3580 let id = self.id;
3581
3582 let mut save_futures = Vec::new();
3583 if let Some(buffer_store) = self.buffer_store(cx) {
3584 buffer_store.update(cx, |buffer_store, cx| {
3585 for path in &entries {
3586 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3587 continue;
3588 };
3589 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3590 && buffer
3591 .read(cx)
3592 .file()
3593 .is_some_and(|file| file.disk_state().exists())
3594 {
3595 save_futures.push(buffer_store.save_buffer(buffer, cx));
3596 }
3597 }
3598 })
3599 }
3600
3601 cx.spawn(async move |this, cx| {
3602 for save_future in save_futures {
3603 save_future.await?;
3604 }
3605
3606 this.update(cx, |this, _| {
3607 this.send_job(None, move |git_repo, _cx| async move {
3608 match git_repo {
3609 RepositoryState::Local {
3610 backend,
3611 environment,
3612 ..
3613 } => backend.stage_paths(entries, environment.clone()).await,
3614 RepositoryState::Remote { project_id, client } => {
3615 client
3616 .request(proto::Stage {
3617 project_id: project_id.0,
3618 repository_id: id.to_proto(),
3619 paths: entries
3620 .into_iter()
3621 .map(|repo_path| repo_path.as_ref().to_proto())
3622 .collect(),
3623 })
3624 .await
3625 .context("sending stage request")?;
3626
3627 Ok(())
3628 }
3629 }
3630 })
3631 })?
3632 .await??;
3633
3634 Ok(())
3635 })
3636 }
3637
3638 pub fn unstage_entries(
3639 &self,
3640 entries: Vec<RepoPath>,
3641 cx: &mut Context<Self>,
3642 ) -> Task<anyhow::Result<()>> {
3643 if entries.is_empty() {
3644 return Task::ready(Ok(()));
3645 }
3646 let id = self.id;
3647
3648 let mut save_futures = Vec::new();
3649 if let Some(buffer_store) = self.buffer_store(cx) {
3650 buffer_store.update(cx, |buffer_store, cx| {
3651 for path in &entries {
3652 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3653 continue;
3654 };
3655 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3656 && buffer
3657 .read(cx)
3658 .file()
3659 .is_some_and(|file| file.disk_state().exists())
3660 {
3661 save_futures.push(buffer_store.save_buffer(buffer, cx));
3662 }
3663 }
3664 })
3665 }
3666
3667 cx.spawn(async move |this, cx| {
3668 for save_future in save_futures {
3669 save_future.await?;
3670 }
3671
3672 this.update(cx, |this, _| {
3673 this.send_job(None, move |git_repo, _cx| async move {
3674 match git_repo {
3675 RepositoryState::Local {
3676 backend,
3677 environment,
3678 ..
3679 } => backend.unstage_paths(entries, environment).await,
3680 RepositoryState::Remote { project_id, client } => {
3681 client
3682 .request(proto::Unstage {
3683 project_id: project_id.0,
3684 repository_id: id.to_proto(),
3685 paths: entries
3686 .into_iter()
3687 .map(|repo_path| repo_path.as_ref().to_proto())
3688 .collect(),
3689 })
3690 .await
3691 .context("sending unstage request")?;
3692
3693 Ok(())
3694 }
3695 }
3696 })
3697 })?
3698 .await??;
3699
3700 Ok(())
3701 })
3702 }
3703
3704 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3705 let to_stage = self
3706 .cached_status()
3707 .filter(|entry| !entry.status.staging().is_fully_staged())
3708 .map(|entry| entry.repo_path)
3709 .collect();
3710 self.stage_entries(to_stage, cx)
3711 }
3712
3713 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3714 let to_unstage = self
3715 .cached_status()
3716 .filter(|entry| entry.status.staging().has_staged())
3717 .map(|entry| entry.repo_path)
3718 .collect();
3719 self.unstage_entries(to_unstage, cx)
3720 }
3721
3722 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3723 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3724
3725 self.stash_entries(to_stash, cx)
3726 }
3727
3728 pub fn stash_entries(
3729 &mut self,
3730 entries: Vec<RepoPath>,
3731 cx: &mut Context<Self>,
3732 ) -> Task<anyhow::Result<()>> {
3733 let id = self.id;
3734
3735 cx.spawn(async move |this, cx| {
3736 this.update(cx, |this, _| {
3737 this.send_job(None, move |git_repo, _cx| async move {
3738 match git_repo {
3739 RepositoryState::Local {
3740 backend,
3741 environment,
3742 ..
3743 } => backend.stash_paths(entries, environment).await,
3744 RepositoryState::Remote { project_id, client } => {
3745 client
3746 .request(proto::Stash {
3747 project_id: project_id.0,
3748 repository_id: id.to_proto(),
3749 paths: entries
3750 .into_iter()
3751 .map(|repo_path| repo_path.as_ref().to_proto())
3752 .collect(),
3753 })
3754 .await
3755 .context("sending stash request")?;
3756 Ok(())
3757 }
3758 }
3759 })
3760 })?
3761 .await??;
3762 Ok(())
3763 })
3764 }
3765
3766 pub fn stash_pop(
3767 &mut self,
3768 index: Option<usize>,
3769 cx: &mut Context<Self>,
3770 ) -> Task<anyhow::Result<()>> {
3771 let id = self.id;
3772 cx.spawn(async move |this, cx| {
3773 this.update(cx, |this, _| {
3774 this.send_job(None, move |git_repo, _cx| async move {
3775 match git_repo {
3776 RepositoryState::Local {
3777 backend,
3778 environment,
3779 ..
3780 } => backend.stash_pop(index, environment).await,
3781 RepositoryState::Remote { project_id, client } => {
3782 client
3783 .request(proto::StashPop {
3784 project_id: project_id.0,
3785 repository_id: id.to_proto(),
3786 stash_index: index.map(|i| i as u64),
3787 })
3788 .await
3789 .context("sending stash pop request")?;
3790 Ok(())
3791 }
3792 }
3793 })
3794 })?
3795 .await??;
3796 Ok(())
3797 })
3798 }
3799
3800 pub fn stash_apply(
3801 &mut self,
3802 index: Option<usize>,
3803 cx: &mut Context<Self>,
3804 ) -> Task<anyhow::Result<()>> {
3805 let id = self.id;
3806 cx.spawn(async move |this, cx| {
3807 this.update(cx, |this, _| {
3808 this.send_job(None, move |git_repo, _cx| async move {
3809 match git_repo {
3810 RepositoryState::Local {
3811 backend,
3812 environment,
3813 ..
3814 } => backend.stash_apply(index, environment).await,
3815 RepositoryState::Remote { project_id, client } => {
3816 client
3817 .request(proto::StashApply {
3818 project_id: project_id.0,
3819 repository_id: id.to_proto(),
3820 stash_index: index.map(|i| i as u64),
3821 })
3822 .await
3823 .context("sending stash apply request")?;
3824 Ok(())
3825 }
3826 }
3827 })
3828 })?
3829 .await??;
3830 Ok(())
3831 })
3832 }
3833
3834 pub fn stash_drop(
3835 &mut self,
3836 index: Option<usize>,
3837 cx: &mut Context<Self>,
3838 ) -> oneshot::Receiver<anyhow::Result<()>> {
3839 let id = self.id;
3840 let updates_tx = self
3841 .git_store()
3842 .and_then(|git_store| match &git_store.read(cx).state {
3843 GitStoreState::Local { downstream, .. } => downstream
3844 .as_ref()
3845 .map(|downstream| downstream.updates_tx.clone()),
3846 _ => None,
3847 });
3848 let this = cx.weak_entity();
3849 self.send_job(None, move |git_repo, mut cx| async move {
3850 match git_repo {
3851 RepositoryState::Local {
3852 backend,
3853 environment,
3854 ..
3855 } => {
3856 let result = backend.stash_drop(index, environment).await;
3857 if result.is_ok()
3858 && let Ok(stash_entries) = backend.stash_entries().await
3859 {
3860 let snapshot = this.update(&mut cx, |this, cx| {
3861 this.snapshot.stash_entries = stash_entries;
3862 let snapshot = this.snapshot.clone();
3863 cx.emit(RepositoryEvent::Updated {
3864 full_scan: false,
3865 new_instance: false,
3866 });
3867 snapshot
3868 })?;
3869 if let Some(updates_tx) = updates_tx {
3870 updates_tx
3871 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3872 .ok();
3873 }
3874 }
3875
3876 result
3877 }
3878 RepositoryState::Remote { project_id, client } => {
3879 client
3880 .request(proto::StashDrop {
3881 project_id: project_id.0,
3882 repository_id: id.to_proto(),
3883 stash_index: index.map(|i| i as u64),
3884 })
3885 .await
3886 .context("sending stash pop request")?;
3887 Ok(())
3888 }
3889 }
3890 })
3891 }
3892
3893 pub fn commit(
3894 &mut self,
3895 message: SharedString,
3896 name_and_email: Option<(SharedString, SharedString)>,
3897 options: CommitOptions,
3898 _cx: &mut App,
3899 ) -> oneshot::Receiver<Result<()>> {
3900 let id = self.id;
3901
3902 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3903 match git_repo {
3904 RepositoryState::Local {
3905 backend,
3906 environment,
3907 ..
3908 } => {
3909 backend
3910 .commit(message, name_and_email, options, environment)
3911 .await
3912 }
3913 RepositoryState::Remote { project_id, client } => {
3914 let (name, email) = name_and_email.unzip();
3915 client
3916 .request(proto::Commit {
3917 project_id: project_id.0,
3918 repository_id: id.to_proto(),
3919 message: String::from(message),
3920 name: name.map(String::from),
3921 email: email.map(String::from),
3922 options: Some(proto::commit::CommitOptions {
3923 amend: options.amend,
3924 signoff: options.signoff,
3925 }),
3926 })
3927 .await
3928 .context("sending commit request")?;
3929
3930 Ok(())
3931 }
3932 }
3933 })
3934 }
3935
3936 pub fn fetch(
3937 &mut self,
3938 fetch_options: FetchOptions,
3939 askpass: AskPassDelegate,
3940 _cx: &mut App,
3941 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3942 let askpass_delegates = self.askpass_delegates.clone();
3943 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3944 let id = self.id;
3945
3946 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3947 match git_repo {
3948 RepositoryState::Local {
3949 backend,
3950 environment,
3951 ..
3952 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3953 RepositoryState::Remote { project_id, client } => {
3954 askpass_delegates.lock().insert(askpass_id, askpass);
3955 let _defer = util::defer(|| {
3956 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3957 debug_assert!(askpass_delegate.is_some());
3958 });
3959
3960 let response = client
3961 .request(proto::Fetch {
3962 project_id: project_id.0,
3963 repository_id: id.to_proto(),
3964 askpass_id,
3965 remote: fetch_options.to_proto(),
3966 })
3967 .await
3968 .context("sending fetch request")?;
3969
3970 Ok(RemoteCommandOutput {
3971 stdout: response.stdout,
3972 stderr: response.stderr,
3973 })
3974 }
3975 }
3976 })
3977 }
3978
3979 pub fn push(
3980 &mut self,
3981 branch: SharedString,
3982 remote: SharedString,
3983 options: Option<PushOptions>,
3984 askpass: AskPassDelegate,
3985 cx: &mut Context<Self>,
3986 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3987 let askpass_delegates = self.askpass_delegates.clone();
3988 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3989 let id = self.id;
3990
3991 let args = options
3992 .map(|option| match option {
3993 PushOptions::SetUpstream => " --set-upstream",
3994 PushOptions::Force => " --force-with-lease",
3995 })
3996 .unwrap_or("");
3997
3998 let updates_tx = self
3999 .git_store()
4000 .and_then(|git_store| match &git_store.read(cx).state {
4001 GitStoreState::Local { downstream, .. } => downstream
4002 .as_ref()
4003 .map(|downstream| downstream.updates_tx.clone()),
4004 _ => None,
4005 });
4006
4007 let this = cx.weak_entity();
4008 self.send_job(
4009 Some(format!("git push {} {} {}", args, branch, remote).into()),
4010 move |git_repo, mut cx| async move {
4011 match git_repo {
4012 RepositoryState::Local {
4013 backend,
4014 environment,
4015 ..
4016 } => {
4017 let result = backend
4018 .push(
4019 branch.to_string(),
4020 remote.to_string(),
4021 options,
4022 askpass,
4023 environment.clone(),
4024 cx.clone(),
4025 )
4026 .await;
4027 if result.is_ok() {
4028 let branches = backend.branches().await?;
4029 let branch = branches.into_iter().find(|branch| branch.is_head);
4030 log::info!("head branch after scan is {branch:?}");
4031 let snapshot = this.update(&mut cx, |this, cx| {
4032 this.snapshot.branch = branch;
4033 let snapshot = this.snapshot.clone();
4034 cx.emit(RepositoryEvent::Updated {
4035 full_scan: false,
4036 new_instance: false,
4037 });
4038 snapshot
4039 })?;
4040 if let Some(updates_tx) = updates_tx {
4041 updates_tx
4042 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4043 .ok();
4044 }
4045 }
4046 result
4047 }
4048 RepositoryState::Remote { project_id, client } => {
4049 askpass_delegates.lock().insert(askpass_id, askpass);
4050 let _defer = util::defer(|| {
4051 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4052 debug_assert!(askpass_delegate.is_some());
4053 });
4054 let response = client
4055 .request(proto::Push {
4056 project_id: project_id.0,
4057 repository_id: id.to_proto(),
4058 askpass_id,
4059 branch_name: branch.to_string(),
4060 remote_name: remote.to_string(),
4061 options: options.map(|options| match options {
4062 PushOptions::Force => proto::push::PushOptions::Force,
4063 PushOptions::SetUpstream => {
4064 proto::push::PushOptions::SetUpstream
4065 }
4066 }
4067 as i32),
4068 })
4069 .await
4070 .context("sending push request")?;
4071
4072 Ok(RemoteCommandOutput {
4073 stdout: response.stdout,
4074 stderr: response.stderr,
4075 })
4076 }
4077 }
4078 },
4079 )
4080 }
4081
4082 pub fn pull(
4083 &mut self,
4084 branch: SharedString,
4085 remote: SharedString,
4086 askpass: AskPassDelegate,
4087 _cx: &mut App,
4088 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4089 let askpass_delegates = self.askpass_delegates.clone();
4090 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4091 let id = self.id;
4092
4093 self.send_job(
4094 Some(format!("git pull {} {}", remote, branch).into()),
4095 move |git_repo, cx| async move {
4096 match git_repo {
4097 RepositoryState::Local {
4098 backend,
4099 environment,
4100 ..
4101 } => {
4102 backend
4103 .pull(
4104 branch.to_string(),
4105 remote.to_string(),
4106 askpass,
4107 environment.clone(),
4108 cx,
4109 )
4110 .await
4111 }
4112 RepositoryState::Remote { project_id, client } => {
4113 askpass_delegates.lock().insert(askpass_id, askpass);
4114 let _defer = util::defer(|| {
4115 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4116 debug_assert!(askpass_delegate.is_some());
4117 });
4118 let response = client
4119 .request(proto::Pull {
4120 project_id: project_id.0,
4121 repository_id: id.to_proto(),
4122 askpass_id,
4123 branch_name: branch.to_string(),
4124 remote_name: remote.to_string(),
4125 })
4126 .await
4127 .context("sending pull request")?;
4128
4129 Ok(RemoteCommandOutput {
4130 stdout: response.stdout,
4131 stderr: response.stderr,
4132 })
4133 }
4134 }
4135 },
4136 )
4137 }
4138
4139 fn spawn_set_index_text_job(
4140 &mut self,
4141 path: RepoPath,
4142 content: Option<String>,
4143 hunk_staging_operation_count: Option<usize>,
4144 cx: &mut Context<Self>,
4145 ) -> oneshot::Receiver<anyhow::Result<()>> {
4146 let id = self.id;
4147 let this = cx.weak_entity();
4148 let git_store = self.git_store.clone();
4149 self.send_keyed_job(
4150 Some(GitJobKey::WriteIndex(path.clone())),
4151 None,
4152 move |git_repo, mut cx| async move {
4153 log::debug!("start updating index text for buffer {}", path.display());
4154 match git_repo {
4155 RepositoryState::Local {
4156 backend,
4157 environment,
4158 ..
4159 } => {
4160 backend
4161 .set_index_text(path.clone(), content, environment.clone())
4162 .await?;
4163 }
4164 RepositoryState::Remote { project_id, client } => {
4165 client
4166 .request(proto::SetIndexText {
4167 project_id: project_id.0,
4168 repository_id: id.to_proto(),
4169 path: path.as_ref().to_proto(),
4170 text: content,
4171 })
4172 .await?;
4173 }
4174 }
4175 log::debug!("finish updating index text for buffer {}", path.display());
4176
4177 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4178 let project_path = this
4179 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4180 .ok()
4181 .flatten();
4182 git_store.update(&mut cx, |git_store, cx| {
4183 let buffer_id = git_store
4184 .buffer_store
4185 .read(cx)
4186 .get_by_path(&project_path?)?
4187 .read(cx)
4188 .remote_id();
4189 let diff_state = git_store.diffs.get(&buffer_id)?;
4190 diff_state.update(cx, |diff_state, _| {
4191 diff_state.hunk_staging_operation_count_as_of_write =
4192 hunk_staging_operation_count;
4193 });
4194 Some(())
4195 })?;
4196 }
4197 Ok(())
4198 },
4199 )
4200 }
4201
4202 pub fn get_remotes(
4203 &mut self,
4204 branch_name: Option<String>,
4205 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4206 let id = self.id;
4207 self.send_job(None, move |repo, _cx| async move {
4208 match repo {
4209 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4210 RepositoryState::Remote { project_id, client } => {
4211 let response = client
4212 .request(proto::GetRemotes {
4213 project_id: project_id.0,
4214 repository_id: id.to_proto(),
4215 branch_name,
4216 })
4217 .await?;
4218
4219 let remotes = response
4220 .remotes
4221 .into_iter()
4222 .map(|remotes| git::repository::Remote {
4223 name: remotes.name.into(),
4224 })
4225 .collect();
4226
4227 Ok(remotes)
4228 }
4229 }
4230 })
4231 }
4232
4233 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4234 let id = self.id;
4235 self.send_job(None, move |repo, _| async move {
4236 match repo {
4237 RepositoryState::Local { backend, .. } => backend.branches().await,
4238 RepositoryState::Remote { project_id, client } => {
4239 let response = client
4240 .request(proto::GitGetBranches {
4241 project_id: project_id.0,
4242 repository_id: id.to_proto(),
4243 })
4244 .await?;
4245
4246 let branches = response
4247 .branches
4248 .into_iter()
4249 .map(|branch| proto_to_branch(&branch))
4250 .collect();
4251
4252 Ok(branches)
4253 }
4254 }
4255 })
4256 }
4257
4258 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4259 let id = self.id;
4260 self.send_job(None, move |repo, _| async move {
4261 match repo {
4262 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4263 RepositoryState::Remote { project_id, client } => {
4264 let response = client
4265 .request(proto::GetDefaultBranch {
4266 project_id: project_id.0,
4267 repository_id: id.to_proto(),
4268 })
4269 .await?;
4270
4271 anyhow::Ok(response.branch.map(SharedString::from))
4272 }
4273 }
4274 })
4275 }
4276
4277 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4278 let id = self.id;
4279 self.send_job(None, move |repo, _cx| async move {
4280 match repo {
4281 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4282 RepositoryState::Remote { project_id, client } => {
4283 let response = client
4284 .request(proto::GitDiff {
4285 project_id: project_id.0,
4286 repository_id: id.to_proto(),
4287 diff_type: match diff_type {
4288 DiffType::HeadToIndex => {
4289 proto::git_diff::DiffType::HeadToIndex.into()
4290 }
4291 DiffType::HeadToWorktree => {
4292 proto::git_diff::DiffType::HeadToWorktree.into()
4293 }
4294 },
4295 })
4296 .await?;
4297
4298 Ok(response.diff)
4299 }
4300 }
4301 })
4302 }
4303
4304 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4305 let id = self.id;
4306 self.send_job(
4307 Some(format!("git switch -c {branch_name}").into()),
4308 move |repo, _cx| async move {
4309 match repo {
4310 RepositoryState::Local { backend, .. } => {
4311 backend.create_branch(branch_name).await
4312 }
4313 RepositoryState::Remote { project_id, client } => {
4314 client
4315 .request(proto::GitCreateBranch {
4316 project_id: project_id.0,
4317 repository_id: id.to_proto(),
4318 branch_name,
4319 })
4320 .await?;
4321
4322 Ok(())
4323 }
4324 }
4325 },
4326 )
4327 }
4328
4329 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4330 let id = self.id;
4331 self.send_job(
4332 Some(format!("git switch {branch_name}").into()),
4333 move |repo, _cx| async move {
4334 match repo {
4335 RepositoryState::Local { backend, .. } => {
4336 backend.change_branch(branch_name).await
4337 }
4338 RepositoryState::Remote { project_id, client } => {
4339 client
4340 .request(proto::GitChangeBranch {
4341 project_id: project_id.0,
4342 repository_id: id.to_proto(),
4343 branch_name,
4344 })
4345 .await?;
4346
4347 Ok(())
4348 }
4349 }
4350 },
4351 )
4352 }
4353
4354 pub fn rename_branch(
4355 &mut self,
4356 branch: String,
4357 new_name: String,
4358 ) -> oneshot::Receiver<Result<()>> {
4359 let id = self.id;
4360 self.send_job(
4361 Some(format!("git branch -m {branch} {new_name}").into()),
4362 move |repo, _cx| async move {
4363 match repo {
4364 RepositoryState::Local { backend, .. } => {
4365 backend.rename_branch(branch, new_name).await
4366 }
4367 RepositoryState::Remote { project_id, client } => {
4368 client
4369 .request(proto::GitRenameBranch {
4370 project_id: project_id.0,
4371 repository_id: id.to_proto(),
4372 branch,
4373 new_name,
4374 })
4375 .await?;
4376
4377 Ok(())
4378 }
4379 }
4380 },
4381 )
4382 }
4383
4384 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4385 let id = self.id;
4386 self.send_job(None, move |repo, _cx| async move {
4387 match repo {
4388 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4389 RepositoryState::Remote { project_id, client } => {
4390 let response = client
4391 .request(proto::CheckForPushedCommits {
4392 project_id: project_id.0,
4393 repository_id: id.to_proto(),
4394 })
4395 .await?;
4396
4397 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4398
4399 Ok(branches)
4400 }
4401 }
4402 })
4403 }
4404
4405 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4406 self.send_job(None, |repo, _cx| async move {
4407 match repo {
4408 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4409 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4410 }
4411 })
4412 }
4413
4414 pub fn restore_checkpoint(
4415 &mut self,
4416 checkpoint: GitRepositoryCheckpoint,
4417 ) -> oneshot::Receiver<Result<()>> {
4418 self.send_job(None, move |repo, _cx| async move {
4419 match repo {
4420 RepositoryState::Local { backend, .. } => {
4421 backend.restore_checkpoint(checkpoint).await
4422 }
4423 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4424 }
4425 })
4426 }
4427
4428 pub(crate) fn apply_remote_update(
4429 &mut self,
4430 update: proto::UpdateRepository,
4431 is_new: bool,
4432 cx: &mut Context<Self>,
4433 ) -> Result<()> {
4434 let conflicted_paths = TreeSet::from_ordered_entries(
4435 update
4436 .current_merge_conflicts
4437 .into_iter()
4438 .map(|path| RepoPath(Path::new(&path).into())),
4439 );
4440 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4441 self.snapshot.head_commit = update
4442 .head_commit_details
4443 .as_ref()
4444 .map(proto_to_commit_details);
4445
4446 self.snapshot.merge.conflicted_paths = conflicted_paths;
4447 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4448 self.snapshot.stash_entries = GitStash {
4449 entries: update
4450 .stash_entries
4451 .iter()
4452 .filter_map(|entry| proto_to_stash(entry).ok())
4453 .collect(),
4454 };
4455
4456 let edits = update
4457 .removed_statuses
4458 .into_iter()
4459 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4460 .chain(
4461 update
4462 .updated_statuses
4463 .into_iter()
4464 .filter_map(|updated_status| {
4465 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4466 }),
4467 )
4468 .collect::<Vec<_>>();
4469 self.snapshot.statuses_by_path.edit(edits, &());
4470 if update.is_last_update {
4471 self.snapshot.scan_id = update.scan_id;
4472 }
4473 cx.emit(RepositoryEvent::Updated {
4474 full_scan: true,
4475 new_instance: is_new,
4476 });
4477 Ok(())
4478 }
4479
4480 pub fn compare_checkpoints(
4481 &mut self,
4482 left: GitRepositoryCheckpoint,
4483 right: GitRepositoryCheckpoint,
4484 ) -> oneshot::Receiver<Result<bool>> {
4485 self.send_job(None, move |repo, _cx| async move {
4486 match repo {
4487 RepositoryState::Local { backend, .. } => {
4488 backend.compare_checkpoints(left, right).await
4489 }
4490 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4491 }
4492 })
4493 }
4494
4495 pub fn diff_checkpoints(
4496 &mut self,
4497 base_checkpoint: GitRepositoryCheckpoint,
4498 target_checkpoint: GitRepositoryCheckpoint,
4499 ) -> oneshot::Receiver<Result<String>> {
4500 self.send_job(None, move |repo, _cx| async move {
4501 match repo {
4502 RepositoryState::Local { backend, .. } => {
4503 backend
4504 .diff_checkpoints(base_checkpoint, target_checkpoint)
4505 .await
4506 }
4507 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4508 }
4509 })
4510 }
4511
4512 fn schedule_scan(
4513 &mut self,
4514 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4515 cx: &mut Context<Self>,
4516 ) {
4517 let this = cx.weak_entity();
4518 let _ = self.send_keyed_job(
4519 Some(GitJobKey::ReloadGitState),
4520 None,
4521 |state, mut cx| async move {
4522 log::debug!("run scheduled git status scan");
4523
4524 let Some(this) = this.upgrade() else {
4525 return Ok(());
4526 };
4527 let RepositoryState::Local { backend, .. } = state else {
4528 bail!("not a local repository")
4529 };
4530 let (snapshot, events) = this
4531 .update(&mut cx, |this, _| {
4532 this.paths_needing_status_update.clear();
4533 compute_snapshot(
4534 this.id,
4535 this.work_directory_abs_path.clone(),
4536 this.snapshot.clone(),
4537 backend.clone(),
4538 )
4539 })?
4540 .await?;
4541 this.update(&mut cx, |this, cx| {
4542 this.snapshot = snapshot.clone();
4543 for event in events {
4544 cx.emit(event);
4545 }
4546 })?;
4547 if let Some(updates_tx) = updates_tx {
4548 updates_tx
4549 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4550 .ok();
4551 }
4552 Ok(())
4553 },
4554 );
4555 }
4556
4557 fn spawn_local_git_worker(
4558 work_directory_abs_path: Arc<Path>,
4559 dot_git_abs_path: Arc<Path>,
4560 _repository_dir_abs_path: Arc<Path>,
4561 _common_dir_abs_path: Arc<Path>,
4562 project_environment: WeakEntity<ProjectEnvironment>,
4563 fs: Arc<dyn Fs>,
4564 cx: &mut Context<Self>,
4565 ) -> mpsc::UnboundedSender<GitJob> {
4566 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4567
4568 cx.spawn(async move |_, cx| {
4569 let environment = project_environment
4570 .upgrade()
4571 .context("missing project environment")?
4572 .update(cx, |project_environment, cx| {
4573 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4574 })?
4575 .await
4576 .unwrap_or_else(|| {
4577 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4578 HashMap::default()
4579 });
4580 let backend = cx
4581 .background_spawn(async move {
4582 fs.open_repo(&dot_git_abs_path)
4583 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4584 })
4585 .await?;
4586
4587 if let Some(git_hosting_provider_registry) =
4588 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4589 {
4590 git_hosting_providers::register_additional_providers(
4591 git_hosting_provider_registry,
4592 backend.clone(),
4593 );
4594 }
4595
4596 let state = RepositoryState::Local {
4597 backend,
4598 environment: Arc::new(environment),
4599 };
4600 let mut jobs = VecDeque::new();
4601 loop {
4602 while let Ok(Some(next_job)) = job_rx.try_next() {
4603 jobs.push_back(next_job);
4604 }
4605
4606 if let Some(job) = jobs.pop_front() {
4607 if let Some(current_key) = &job.key
4608 && jobs
4609 .iter()
4610 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4611 {
4612 continue;
4613 }
4614 (job.job)(state.clone(), cx).await;
4615 } else if let Some(job) = job_rx.next().await {
4616 jobs.push_back(job);
4617 } else {
4618 break;
4619 }
4620 }
4621 anyhow::Ok(())
4622 })
4623 .detach_and_log_err(cx);
4624
4625 job_tx
4626 }
4627
4628 fn spawn_remote_git_worker(
4629 project_id: ProjectId,
4630 client: AnyProtoClient,
4631 cx: &mut Context<Self>,
4632 ) -> mpsc::UnboundedSender<GitJob> {
4633 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4634
4635 cx.spawn(async move |_, cx| {
4636 let state = RepositoryState::Remote { project_id, client };
4637 let mut jobs = VecDeque::new();
4638 loop {
4639 while let Ok(Some(next_job)) = job_rx.try_next() {
4640 jobs.push_back(next_job);
4641 }
4642
4643 if let Some(job) = jobs.pop_front() {
4644 if let Some(current_key) = &job.key
4645 && jobs
4646 .iter()
4647 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4648 {
4649 continue;
4650 }
4651 (job.job)(state.clone(), cx).await;
4652 } else if let Some(job) = job_rx.next().await {
4653 jobs.push_back(job);
4654 } else {
4655 break;
4656 }
4657 }
4658 anyhow::Ok(())
4659 })
4660 .detach_and_log_err(cx);
4661
4662 job_tx
4663 }
4664
4665 fn load_staged_text(
4666 &mut self,
4667 buffer_id: BufferId,
4668 repo_path: RepoPath,
4669 cx: &App,
4670 ) -> Task<Result<Option<String>>> {
4671 let rx = self.send_job(None, move |state, _| async move {
4672 match state {
4673 RepositoryState::Local { backend, .. } => {
4674 anyhow::Ok(backend.load_index_text(repo_path).await)
4675 }
4676 RepositoryState::Remote { project_id, client } => {
4677 let response = client
4678 .request(proto::OpenUnstagedDiff {
4679 project_id: project_id.to_proto(),
4680 buffer_id: buffer_id.to_proto(),
4681 })
4682 .await?;
4683 Ok(response.staged_text)
4684 }
4685 }
4686 });
4687 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4688 }
4689
4690 fn load_committed_text(
4691 &mut self,
4692 buffer_id: BufferId,
4693 repo_path: RepoPath,
4694 cx: &App,
4695 ) -> Task<Result<DiffBasesChange>> {
4696 let rx = self.send_job(None, move |state, _| async move {
4697 match state {
4698 RepositoryState::Local { backend, .. } => {
4699 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4700 let staged_text = backend.load_index_text(repo_path).await;
4701 let diff_bases_change = if committed_text == staged_text {
4702 DiffBasesChange::SetBoth(committed_text)
4703 } else {
4704 DiffBasesChange::SetEach {
4705 index: staged_text,
4706 head: committed_text,
4707 }
4708 };
4709 anyhow::Ok(diff_bases_change)
4710 }
4711 RepositoryState::Remote { project_id, client } => {
4712 use proto::open_uncommitted_diff_response::Mode;
4713
4714 let response = client
4715 .request(proto::OpenUncommittedDiff {
4716 project_id: project_id.to_proto(),
4717 buffer_id: buffer_id.to_proto(),
4718 })
4719 .await?;
4720 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4721 let bases = match mode {
4722 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4723 Mode::IndexAndHead => DiffBasesChange::SetEach {
4724 head: response.committed_text,
4725 index: response.staged_text,
4726 },
4727 };
4728 Ok(bases)
4729 }
4730 }
4731 });
4732
4733 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4734 }
4735
4736 fn paths_changed(
4737 &mut self,
4738 paths: Vec<RepoPath>,
4739 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4740 cx: &mut Context<Self>,
4741 ) {
4742 self.paths_needing_status_update.extend(paths);
4743
4744 let this = cx.weak_entity();
4745 let _ = self.send_keyed_job(
4746 Some(GitJobKey::RefreshStatuses),
4747 None,
4748 |state, mut cx| async move {
4749 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4750 (
4751 this.snapshot.clone(),
4752 mem::take(&mut this.paths_needing_status_update),
4753 )
4754 })?;
4755 let RepositoryState::Local { backend, .. } = state else {
4756 bail!("not a local repository")
4757 };
4758
4759 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4760 if paths.is_empty() {
4761 return Ok(());
4762 }
4763 let statuses = backend.status(&paths).await?;
4764 let stash_entries = backend.stash_entries().await?;
4765
4766 let changed_path_statuses = cx
4767 .background_spawn(async move {
4768 let mut changed_path_statuses = Vec::new();
4769 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4770 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4771
4772 for (repo_path, status) in &*statuses.entries {
4773 changed_paths.remove(repo_path);
4774 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4775 && cursor.item().is_some_and(|entry| entry.status == *status)
4776 {
4777 continue;
4778 }
4779
4780 changed_path_statuses.push(Edit::Insert(StatusEntry {
4781 repo_path: repo_path.clone(),
4782 status: *status,
4783 }));
4784 }
4785 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4786 for path in changed_paths.into_iter() {
4787 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4788 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4789 }
4790 }
4791 changed_path_statuses
4792 })
4793 .await;
4794
4795 this.update(&mut cx, |this, cx| {
4796 let needs_update = !changed_path_statuses.is_empty()
4797 || this.snapshot.stash_entries != stash_entries;
4798 this.snapshot.stash_entries = stash_entries;
4799 if !changed_path_statuses.is_empty() {
4800 this.snapshot
4801 .statuses_by_path
4802 .edit(changed_path_statuses, &());
4803 this.snapshot.scan_id += 1;
4804 }
4805
4806 if needs_update && let Some(updates_tx) = updates_tx {
4807 updates_tx
4808 .unbounded_send(DownstreamUpdate::UpdateRepository(
4809 this.snapshot.clone(),
4810 ))
4811 .ok();
4812 }
4813 cx.emit(RepositoryEvent::Updated {
4814 full_scan: false,
4815 new_instance: false,
4816 });
4817 })
4818 },
4819 );
4820 }
4821
4822 /// currently running git command and when it started
4823 pub fn current_job(&self) -> Option<JobInfo> {
4824 self.active_jobs.values().next().cloned()
4825 }
4826
4827 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4828 self.send_job(None, |_, _| async {})
4829 }
4830}
4831
4832fn get_permalink_in_rust_registry_src(
4833 provider_registry: Arc<GitHostingProviderRegistry>,
4834 path: PathBuf,
4835 selection: Range<u32>,
4836) -> Result<url::Url> {
4837 #[derive(Deserialize)]
4838 struct CargoVcsGit {
4839 sha1: String,
4840 }
4841
4842 #[derive(Deserialize)]
4843 struct CargoVcsInfo {
4844 git: CargoVcsGit,
4845 path_in_vcs: String,
4846 }
4847
4848 #[derive(Deserialize)]
4849 struct CargoPackage {
4850 repository: String,
4851 }
4852
4853 #[derive(Deserialize)]
4854 struct CargoToml {
4855 package: CargoPackage,
4856 }
4857
4858 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4859 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4860 Some((dir, json))
4861 }) else {
4862 bail!("No .cargo_vcs_info.json found in parent directories")
4863 };
4864 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4865 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4866 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4867 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4868 .context("parsing package.repository field of manifest")?;
4869 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4870 let permalink = provider.build_permalink(
4871 remote,
4872 BuildPermalinkParams {
4873 sha: &cargo_vcs_info.git.sha1,
4874 path: &path.to_string_lossy(),
4875 selection: Some(selection),
4876 },
4877 );
4878 Ok(permalink)
4879}
4880
4881fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4882 let Some(blame) = blame else {
4883 return proto::BlameBufferResponse {
4884 blame_response: None,
4885 };
4886 };
4887
4888 let entries = blame
4889 .entries
4890 .into_iter()
4891 .map(|entry| proto::BlameEntry {
4892 sha: entry.sha.as_bytes().into(),
4893 start_line: entry.range.start,
4894 end_line: entry.range.end,
4895 original_line_number: entry.original_line_number,
4896 author: entry.author,
4897 author_mail: entry.author_mail,
4898 author_time: entry.author_time,
4899 author_tz: entry.author_tz,
4900 committer: entry.committer_name,
4901 committer_mail: entry.committer_email,
4902 committer_time: entry.committer_time,
4903 committer_tz: entry.committer_tz,
4904 summary: entry.summary,
4905 previous: entry.previous,
4906 filename: entry.filename,
4907 })
4908 .collect::<Vec<_>>();
4909
4910 let messages = blame
4911 .messages
4912 .into_iter()
4913 .map(|(oid, message)| proto::CommitMessage {
4914 oid: oid.as_bytes().into(),
4915 message,
4916 })
4917 .collect::<Vec<_>>();
4918
4919 proto::BlameBufferResponse {
4920 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4921 entries,
4922 messages,
4923 remote_url: blame.remote_url,
4924 }),
4925 }
4926}
4927
4928fn deserialize_blame_buffer_response(
4929 response: proto::BlameBufferResponse,
4930) -> Option<git::blame::Blame> {
4931 let response = response.blame_response?;
4932 let entries = response
4933 .entries
4934 .into_iter()
4935 .filter_map(|entry| {
4936 Some(git::blame::BlameEntry {
4937 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4938 range: entry.start_line..entry.end_line,
4939 original_line_number: entry.original_line_number,
4940 committer_name: entry.committer,
4941 committer_time: entry.committer_time,
4942 committer_tz: entry.committer_tz,
4943 committer_email: entry.committer_mail,
4944 author: entry.author,
4945 author_mail: entry.author_mail,
4946 author_time: entry.author_time,
4947 author_tz: entry.author_tz,
4948 summary: entry.summary,
4949 previous: entry.previous,
4950 filename: entry.filename,
4951 })
4952 })
4953 .collect::<Vec<_>>();
4954
4955 let messages = response
4956 .messages
4957 .into_iter()
4958 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4959 .collect::<HashMap<_, _>>();
4960
4961 Some(Blame {
4962 entries,
4963 messages,
4964 remote_url: response.remote_url,
4965 })
4966}
4967
4968fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4969 proto::Branch {
4970 is_head: branch.is_head,
4971 ref_name: branch.ref_name.to_string(),
4972 unix_timestamp: branch
4973 .most_recent_commit
4974 .as_ref()
4975 .map(|commit| commit.commit_timestamp as u64),
4976 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4977 ref_name: upstream.ref_name.to_string(),
4978 tracking: upstream
4979 .tracking
4980 .status()
4981 .map(|upstream| proto::UpstreamTracking {
4982 ahead: upstream.ahead as u64,
4983 behind: upstream.behind as u64,
4984 }),
4985 }),
4986 most_recent_commit: branch
4987 .most_recent_commit
4988 .as_ref()
4989 .map(|commit| proto::CommitSummary {
4990 sha: commit.sha.to_string(),
4991 subject: commit.subject.to_string(),
4992 commit_timestamp: commit.commit_timestamp,
4993 author_name: commit.author_name.to_string(),
4994 }),
4995 }
4996}
4997
4998fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4999 git::repository::Branch {
5000 is_head: proto.is_head,
5001 ref_name: proto.ref_name.clone().into(),
5002 upstream: proto
5003 .upstream
5004 .as_ref()
5005 .map(|upstream| git::repository::Upstream {
5006 ref_name: upstream.ref_name.to_string().into(),
5007 tracking: upstream
5008 .tracking
5009 .as_ref()
5010 .map(|tracking| {
5011 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5012 ahead: tracking.ahead as u32,
5013 behind: tracking.behind as u32,
5014 })
5015 })
5016 .unwrap_or(git::repository::UpstreamTracking::Gone),
5017 }),
5018 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5019 git::repository::CommitSummary {
5020 sha: commit.sha.to_string().into(),
5021 subject: commit.subject.to_string().into(),
5022 commit_timestamp: commit.commit_timestamp,
5023 author_name: commit.author_name.to_string().into(),
5024 has_parent: true,
5025 }
5026 }),
5027 }
5028}
5029
5030fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5031 proto::GitCommitDetails {
5032 sha: commit.sha.to_string(),
5033 message: commit.message.to_string(),
5034 commit_timestamp: commit.commit_timestamp,
5035 author_email: commit.author_email.to_string(),
5036 author_name: commit.author_name.to_string(),
5037 }
5038}
5039
5040fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5041 CommitDetails {
5042 sha: proto.sha.clone().into(),
5043 message: proto.message.clone().into(),
5044 commit_timestamp: proto.commit_timestamp,
5045 author_email: proto.author_email.clone().into(),
5046 author_name: proto.author_name.clone().into(),
5047 }
5048}
5049
5050async fn compute_snapshot(
5051 id: RepositoryId,
5052 work_directory_abs_path: Arc<Path>,
5053 prev_snapshot: RepositorySnapshot,
5054 backend: Arc<dyn GitRepository>,
5055) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5056 let mut events = Vec::new();
5057 let branches = backend.branches().await?;
5058 let branch = branches.into_iter().find(|branch| branch.is_head);
5059 let statuses = backend
5060 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
5061 .await?;
5062 let stash_entries = backend.stash_entries().await?;
5063 let statuses_by_path = SumTree::from_iter(
5064 statuses
5065 .entries
5066 .iter()
5067 .map(|(repo_path, status)| StatusEntry {
5068 repo_path: repo_path.clone(),
5069 status: *status,
5070 }),
5071 &(),
5072 );
5073 let (merge_details, merge_heads_changed) =
5074 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5075 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5076
5077 if merge_heads_changed
5078 || branch != prev_snapshot.branch
5079 || statuses_by_path != prev_snapshot.statuses_by_path
5080 {
5081 events.push(RepositoryEvent::Updated {
5082 full_scan: true,
5083 new_instance: false,
5084 });
5085 }
5086
5087 // Cache merge conflict paths so they don't change from staging/unstaging,
5088 // until the merge heads change (at commit time, etc.).
5089 if merge_heads_changed {
5090 events.push(RepositoryEvent::MergeHeadsChanged);
5091 }
5092
5093 // Useful when branch is None in detached head state
5094 let head_commit = match backend.head_sha().await {
5095 Some(head_sha) => backend.show(head_sha).await.log_err(),
5096 None => None,
5097 };
5098
5099 // Used by edit prediction data collection
5100 let remote_origin_url = backend.remote_url("origin");
5101 let remote_upstream_url = backend.remote_url("upstream");
5102
5103 let snapshot = RepositorySnapshot {
5104 id,
5105 statuses_by_path,
5106 work_directory_abs_path,
5107 scan_id: prev_snapshot.scan_id + 1,
5108 branch,
5109 head_commit,
5110 merge: merge_details,
5111 remote_origin_url,
5112 remote_upstream_url,
5113 stash_entries,
5114 };
5115
5116 Ok((snapshot, events))
5117}
5118
5119fn status_from_proto(
5120 simple_status: i32,
5121 status: Option<proto::GitFileStatus>,
5122) -> anyhow::Result<FileStatus> {
5123 use proto::git_file_status::Variant;
5124
5125 let Some(variant) = status.and_then(|status| status.variant) else {
5126 let code = proto::GitStatus::from_i32(simple_status)
5127 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5128 let result = match code {
5129 proto::GitStatus::Added => TrackedStatus {
5130 worktree_status: StatusCode::Added,
5131 index_status: StatusCode::Unmodified,
5132 }
5133 .into(),
5134 proto::GitStatus::Modified => TrackedStatus {
5135 worktree_status: StatusCode::Modified,
5136 index_status: StatusCode::Unmodified,
5137 }
5138 .into(),
5139 proto::GitStatus::Conflict => UnmergedStatus {
5140 first_head: UnmergedStatusCode::Updated,
5141 second_head: UnmergedStatusCode::Updated,
5142 }
5143 .into(),
5144 proto::GitStatus::Deleted => TrackedStatus {
5145 worktree_status: StatusCode::Deleted,
5146 index_status: StatusCode::Unmodified,
5147 }
5148 .into(),
5149 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5150 };
5151 return Ok(result);
5152 };
5153
5154 let result = match variant {
5155 Variant::Untracked(_) => FileStatus::Untracked,
5156 Variant::Ignored(_) => FileStatus::Ignored,
5157 Variant::Unmerged(unmerged) => {
5158 let [first_head, second_head] =
5159 [unmerged.first_head, unmerged.second_head].map(|head| {
5160 let code = proto::GitStatus::from_i32(head)
5161 .with_context(|| format!("Invalid git status code: {head}"))?;
5162 let result = match code {
5163 proto::GitStatus::Added => UnmergedStatusCode::Added,
5164 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5165 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5166 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5167 };
5168 Ok(result)
5169 });
5170 let [first_head, second_head] = [first_head?, second_head?];
5171 UnmergedStatus {
5172 first_head,
5173 second_head,
5174 }
5175 .into()
5176 }
5177 Variant::Tracked(tracked) => {
5178 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5179 .map(|status| {
5180 let code = proto::GitStatus::from_i32(status)
5181 .with_context(|| format!("Invalid git status code: {status}"))?;
5182 let result = match code {
5183 proto::GitStatus::Modified => StatusCode::Modified,
5184 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5185 proto::GitStatus::Added => StatusCode::Added,
5186 proto::GitStatus::Deleted => StatusCode::Deleted,
5187 proto::GitStatus::Renamed => StatusCode::Renamed,
5188 proto::GitStatus::Copied => StatusCode::Copied,
5189 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5190 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5191 };
5192 Ok(result)
5193 });
5194 let [index_status, worktree_status] = [index_status?, worktree_status?];
5195 TrackedStatus {
5196 index_status,
5197 worktree_status,
5198 }
5199 .into()
5200 }
5201 };
5202 Ok(result)
5203}
5204
5205fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5206 use proto::git_file_status::{Tracked, Unmerged, Variant};
5207
5208 let variant = match status {
5209 FileStatus::Untracked => Variant::Untracked(Default::default()),
5210 FileStatus::Ignored => Variant::Ignored(Default::default()),
5211 FileStatus::Unmerged(UnmergedStatus {
5212 first_head,
5213 second_head,
5214 }) => Variant::Unmerged(Unmerged {
5215 first_head: unmerged_status_to_proto(first_head),
5216 second_head: unmerged_status_to_proto(second_head),
5217 }),
5218 FileStatus::Tracked(TrackedStatus {
5219 index_status,
5220 worktree_status,
5221 }) => Variant::Tracked(Tracked {
5222 index_status: tracked_status_to_proto(index_status),
5223 worktree_status: tracked_status_to_proto(worktree_status),
5224 }),
5225 };
5226 proto::GitFileStatus {
5227 variant: Some(variant),
5228 }
5229}
5230
5231fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5232 match code {
5233 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5234 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5235 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5236 }
5237}
5238
5239fn tracked_status_to_proto(code: StatusCode) -> i32 {
5240 match code {
5241 StatusCode::Added => proto::GitStatus::Added as _,
5242 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5243 StatusCode::Modified => proto::GitStatus::Modified as _,
5244 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5245 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5246 StatusCode::Copied => proto::GitStatus::Copied as _,
5247 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5248 }
5249}