1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use std::{
60 cmp::Ordering,
61 collections::{BTreeSet, HashSet, VecDeque},
62 future::Future,
63 mem,
64 ops::Range,
65 path::{Path, PathBuf},
66 str::FromStr,
67 sync::{
68 Arc,
69 atomic::{self, AtomicU64},
70 },
71 time::Instant,
72};
73use sum_tree::{Edit, SumTree, TreeSet};
74use task::Shell;
75use text::{Bias, BufferId};
76use util::{
77 ResultExt, debug_panic,
78 paths::{PathStyle, SanitizedPath},
79 post_inc,
80 rel_path::RelPath,
81};
82use worktree::{
83 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
84 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
85};
86use zeroize::Zeroize;
87
88pub struct GitStore {
89 state: GitStoreState,
90 buffer_store: Entity<BufferStore>,
91 worktree_store: Entity<WorktreeStore>,
92 repositories: HashMap<RepositoryId, Entity<Repository>>,
93 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
94 active_repo_id: Option<RepositoryId>,
95 #[allow(clippy::type_complexity)]
96 loading_diffs:
97 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
98 diffs: HashMap<BufferId, Entity<BufferGitState>>,
99 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
100 _subscriptions: Vec<Subscription>,
101}
102
103#[derive(Default)]
104struct SharedDiffs {
105 unstaged: Option<Entity<BufferDiff>>,
106 uncommitted: Option<Entity<BufferDiff>>,
107}
108
109struct BufferGitState {
110 unstaged_diff: Option<WeakEntity<BufferDiff>>,
111 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
112 conflict_set: Option<WeakEntity<ConflictSet>>,
113 recalculate_diff_task: Option<Task<Result<()>>>,
114 reparse_conflict_markers_task: Option<Task<Result<()>>>,
115 language: Option<Arc<Language>>,
116 language_registry: Option<Arc<LanguageRegistry>>,
117 conflict_updated_futures: Vec<oneshot::Sender<()>>,
118 recalculating_tx: postage::watch::Sender<bool>,
119
120 /// These operation counts are used to ensure that head and index text
121 /// values read from the git repository are up-to-date with any hunk staging
122 /// operations that have been performed on the BufferDiff.
123 ///
124 /// The operation count is incremented immediately when the user initiates a
125 /// hunk stage/unstage operation. Then, upon finishing writing the new index
126 /// text do disk, the `operation count as of write` is updated to reflect
127 /// the operation count that prompted the write.
128 hunk_staging_operation_count: usize,
129 hunk_staging_operation_count_as_of_write: usize,
130
131 head_text: Option<Arc<String>>,
132 index_text: Option<Arc<String>>,
133 head_changed: bool,
134 index_changed: bool,
135 language_changed: bool,
136}
137
138#[derive(Clone, Debug)]
139enum DiffBasesChange {
140 SetIndex(Option<String>),
141 SetHead(Option<String>),
142 SetEach {
143 index: Option<String>,
144 head: Option<String>,
145 },
146 SetBoth(Option<String>),
147}
148
149#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
150enum DiffKind {
151 Unstaged,
152 Uncommitted,
153}
154
155enum GitStoreState {
156 Local {
157 next_repository_id: Arc<AtomicU64>,
158 downstream: Option<LocalDownstreamState>,
159 project_environment: Entity<ProjectEnvironment>,
160 fs: Arc<dyn Fs>,
161 },
162 Remote {
163 upstream_client: AnyProtoClient,
164 upstream_project_id: u64,
165 downstream: Option<(AnyProtoClient, ProjectId)>,
166 },
167}
168
169enum DownstreamUpdate {
170 UpdateRepository(RepositorySnapshot),
171 RemoveRepository(RepositoryId),
172}
173
174struct LocalDownstreamState {
175 client: AnyProtoClient,
176 project_id: ProjectId,
177 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
178 _task: Task<Result<()>>,
179}
180
181#[derive(Clone, Debug)]
182pub struct GitStoreCheckpoint {
183 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
184}
185
186#[derive(Clone, Debug, PartialEq, Eq)]
187pub struct StatusEntry {
188 pub repo_path: RepoPath,
189 pub status: FileStatus,
190}
191
192impl StatusEntry {
193 fn to_proto(&self) -> proto::StatusEntry {
194 let simple_status = match self.status {
195 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
196 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
197 FileStatus::Tracked(TrackedStatus {
198 index_status,
199 worktree_status,
200 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
201 worktree_status
202 } else {
203 index_status
204 }),
205 };
206
207 proto::StatusEntry {
208 repo_path: self.repo_path.to_proto(),
209 simple_status,
210 status: Some(status_to_proto(self.status)),
211 }
212 }
213}
214
215impl TryFrom<proto::StatusEntry> for StatusEntry {
216 type Error = anyhow::Error;
217
218 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
219 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
220 let status = status_from_proto(value.simple_status, value.status)?;
221 Ok(Self { repo_path, status })
222 }
223}
224
225impl sum_tree::Item for StatusEntry {
226 type Summary = PathSummary<GitSummary>;
227
228 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
229 PathSummary {
230 max_path: self.repo_path.as_ref().clone(),
231 item_summary: self.status.summary(),
232 }
233 }
234}
235
236impl sum_tree::KeyedItem for StatusEntry {
237 type Key = PathKey;
238
239 fn key(&self) -> Self::Key {
240 PathKey(self.repo_path.as_ref().clone())
241 }
242}
243
244#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
245pub struct RepositoryId(pub u64);
246
247#[derive(Clone, Debug, Default, PartialEq, Eq)]
248pub struct MergeDetails {
249 pub conflicted_paths: TreeSet<RepoPath>,
250 pub message: Option<SharedString>,
251 pub heads: Vec<Option<SharedString>>,
252}
253
254#[derive(Clone, Debug, PartialEq, Eq)]
255pub struct RepositorySnapshot {
256 pub id: RepositoryId,
257 pub statuses_by_path: SumTree<StatusEntry>,
258 pub work_directory_abs_path: Arc<Path>,
259 pub path_style: PathStyle,
260 pub branch: Option<Branch>,
261 pub head_commit: Option<CommitDetails>,
262 pub scan_id: u64,
263 pub merge: MergeDetails,
264 pub remote_origin_url: Option<String>,
265 pub remote_upstream_url: Option<String>,
266 pub stash_entries: GitStash,
267}
268
269type JobId = u64;
270
271#[derive(Clone, Debug, PartialEq, Eq)]
272pub struct JobInfo {
273 pub start: Instant,
274 pub message: SharedString,
275}
276
277pub struct Repository {
278 this: WeakEntity<Self>,
279 snapshot: RepositorySnapshot,
280 commit_message_buffer: Option<Entity<Buffer>>,
281 git_store: WeakEntity<GitStore>,
282 // For a local repository, holds paths that have had worktree events since the last status scan completed,
283 // and that should be examined during the next status scan.
284 paths_needing_status_update: BTreeSet<RepoPath>,
285 job_sender: mpsc::UnboundedSender<GitJob>,
286 active_jobs: HashMap<JobId, JobInfo>,
287 pending_ops: SumTree<PendingOps>,
288 job_id: JobId,
289 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
290 latest_askpass_id: u64,
291}
292
293impl std::ops::Deref for Repository {
294 type Target = RepositorySnapshot;
295
296 fn deref(&self) -> &Self::Target {
297 &self.snapshot
298 }
299}
300
301#[derive(Clone)]
302pub enum RepositoryState {
303 Local {
304 backend: Arc<dyn GitRepository>,
305 environment: Arc<HashMap<String, String>>,
306 },
307 Remote {
308 project_id: ProjectId,
309 client: AnyProtoClient,
310 },
311}
312
313#[derive(Clone, Debug, PartialEq, Eq)]
314pub enum RepositoryEvent {
315 StatusesChanged,
316 MergeHeadsChanged,
317 BranchChanged,
318 StashEntriesChanged,
319 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
320}
321
322#[derive(Clone, Debug)]
323pub struct JobsUpdated;
324
325#[derive(Debug)]
326pub enum GitStoreEvent {
327 ActiveRepositoryChanged(Option<RepositoryId>),
328 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
329 RepositoryAdded,
330 RepositoryRemoved(RepositoryId),
331 IndexWriteError(anyhow::Error),
332 JobsUpdated,
333 ConflictsUpdated,
334}
335
336impl EventEmitter<RepositoryEvent> for Repository {}
337impl EventEmitter<JobsUpdated> for Repository {}
338impl EventEmitter<GitStoreEvent> for GitStore {}
339
340pub struct GitJob {
341 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
342 key: Option<GitJobKey>,
343}
344
345#[derive(PartialEq, Eq)]
346enum GitJobKey {
347 WriteIndex(Vec<RepoPath>),
348 ReloadBufferDiffBases,
349 RefreshStatuses,
350 ReloadGitState,
351}
352
353impl GitStore {
354 pub fn local(
355 worktree_store: &Entity<WorktreeStore>,
356 buffer_store: Entity<BufferStore>,
357 environment: Entity<ProjectEnvironment>,
358 fs: Arc<dyn Fs>,
359 cx: &mut Context<Self>,
360 ) -> Self {
361 Self::new(
362 worktree_store.clone(),
363 buffer_store,
364 GitStoreState::Local {
365 next_repository_id: Arc::new(AtomicU64::new(1)),
366 downstream: None,
367 project_environment: environment,
368 fs,
369 },
370 cx,
371 )
372 }
373
374 pub fn remote(
375 worktree_store: &Entity<WorktreeStore>,
376 buffer_store: Entity<BufferStore>,
377 upstream_client: AnyProtoClient,
378 project_id: u64,
379 cx: &mut Context<Self>,
380 ) -> Self {
381 Self::new(
382 worktree_store.clone(),
383 buffer_store,
384 GitStoreState::Remote {
385 upstream_client,
386 upstream_project_id: project_id,
387 downstream: None,
388 },
389 cx,
390 )
391 }
392
393 fn new(
394 worktree_store: Entity<WorktreeStore>,
395 buffer_store: Entity<BufferStore>,
396 state: GitStoreState,
397 cx: &mut Context<Self>,
398 ) -> Self {
399 let _subscriptions = vec![
400 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
401 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
402 ];
403
404 GitStore {
405 state,
406 buffer_store,
407 worktree_store,
408 repositories: HashMap::default(),
409 worktree_ids: HashMap::default(),
410 active_repo_id: None,
411 _subscriptions,
412 loading_diffs: HashMap::default(),
413 shared_diffs: HashMap::default(),
414 diffs: HashMap::default(),
415 }
416 }
417
418 pub fn init(client: &AnyProtoClient) {
419 client.add_entity_request_handler(Self::handle_get_remotes);
420 client.add_entity_request_handler(Self::handle_get_branches);
421 client.add_entity_request_handler(Self::handle_get_default_branch);
422 client.add_entity_request_handler(Self::handle_change_branch);
423 client.add_entity_request_handler(Self::handle_create_branch);
424 client.add_entity_request_handler(Self::handle_rename_branch);
425 client.add_entity_request_handler(Self::handle_git_init);
426 client.add_entity_request_handler(Self::handle_push);
427 client.add_entity_request_handler(Self::handle_pull);
428 client.add_entity_request_handler(Self::handle_fetch);
429 client.add_entity_request_handler(Self::handle_stage);
430 client.add_entity_request_handler(Self::handle_unstage);
431 client.add_entity_request_handler(Self::handle_stash);
432 client.add_entity_request_handler(Self::handle_stash_pop);
433 client.add_entity_request_handler(Self::handle_stash_apply);
434 client.add_entity_request_handler(Self::handle_stash_drop);
435 client.add_entity_request_handler(Self::handle_commit);
436 client.add_entity_request_handler(Self::handle_reset);
437 client.add_entity_request_handler(Self::handle_show);
438 client.add_entity_request_handler(Self::handle_load_commit_diff);
439 client.add_entity_request_handler(Self::handle_checkout_files);
440 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
441 client.add_entity_request_handler(Self::handle_set_index_text);
442 client.add_entity_request_handler(Self::handle_askpass);
443 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
444 client.add_entity_request_handler(Self::handle_git_diff);
445 client.add_entity_request_handler(Self::handle_tree_diff);
446 client.add_entity_request_handler(Self::handle_get_blob_content);
447 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
448 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
449 client.add_entity_message_handler(Self::handle_update_diff_bases);
450 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
451 client.add_entity_request_handler(Self::handle_blame_buffer);
452 client.add_entity_message_handler(Self::handle_update_repository);
453 client.add_entity_message_handler(Self::handle_remove_repository);
454 client.add_entity_request_handler(Self::handle_git_clone);
455 client.add_entity_request_handler(Self::handle_get_worktrees);
456 client.add_entity_request_handler(Self::handle_create_worktree);
457 }
458
459 pub fn is_local(&self) -> bool {
460 matches!(self.state, GitStoreState::Local { .. })
461 }
462 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
463 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
464 let id = repo.read(cx).id;
465 if self.active_repo_id != Some(id) {
466 self.active_repo_id = Some(id);
467 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
468 }
469 }
470 }
471
472 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
473 match &mut self.state {
474 GitStoreState::Remote {
475 downstream: downstream_client,
476 ..
477 } => {
478 for repo in self.repositories.values() {
479 let update = repo.read(cx).snapshot.initial_update(project_id);
480 for update in split_repository_update(update) {
481 client.send(update).log_err();
482 }
483 }
484 *downstream_client = Some((client, ProjectId(project_id)));
485 }
486 GitStoreState::Local {
487 downstream: downstream_client,
488 ..
489 } => {
490 let mut snapshots = HashMap::default();
491 let (updates_tx, mut updates_rx) = mpsc::unbounded();
492 for repo in self.repositories.values() {
493 updates_tx
494 .unbounded_send(DownstreamUpdate::UpdateRepository(
495 repo.read(cx).snapshot.clone(),
496 ))
497 .ok();
498 }
499 *downstream_client = Some(LocalDownstreamState {
500 client: client.clone(),
501 project_id: ProjectId(project_id),
502 updates_tx,
503 _task: cx.spawn(async move |this, cx| {
504 cx.background_spawn(async move {
505 while let Some(update) = updates_rx.next().await {
506 match update {
507 DownstreamUpdate::UpdateRepository(snapshot) => {
508 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
509 {
510 let update =
511 snapshot.build_update(old_snapshot, project_id);
512 *old_snapshot = snapshot;
513 for update in split_repository_update(update) {
514 client.send(update)?;
515 }
516 } else {
517 let update = snapshot.initial_update(project_id);
518 for update in split_repository_update(update) {
519 client.send(update)?;
520 }
521 snapshots.insert(snapshot.id, snapshot);
522 }
523 }
524 DownstreamUpdate::RemoveRepository(id) => {
525 client.send(proto::RemoveRepository {
526 project_id,
527 id: id.to_proto(),
528 })?;
529 }
530 }
531 }
532 anyhow::Ok(())
533 })
534 .await
535 .ok();
536 this.update(cx, |this, _| {
537 if let GitStoreState::Local {
538 downstream: downstream_client,
539 ..
540 } = &mut this.state
541 {
542 downstream_client.take();
543 } else {
544 unreachable!("unshared called on remote store");
545 }
546 })
547 }),
548 });
549 }
550 }
551 }
552
553 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
554 match &mut self.state {
555 GitStoreState::Local {
556 downstream: downstream_client,
557 ..
558 } => {
559 downstream_client.take();
560 }
561 GitStoreState::Remote {
562 downstream: downstream_client,
563 ..
564 } => {
565 downstream_client.take();
566 }
567 }
568 self.shared_diffs.clear();
569 }
570
571 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
572 self.shared_diffs.remove(peer_id);
573 }
574
575 pub fn active_repository(&self) -> Option<Entity<Repository>> {
576 self.active_repo_id
577 .as_ref()
578 .map(|id| self.repositories[id].clone())
579 }
580
581 pub fn open_unstaged_diff(
582 &mut self,
583 buffer: Entity<Buffer>,
584 cx: &mut Context<Self>,
585 ) -> Task<Result<Entity<BufferDiff>>> {
586 let buffer_id = buffer.read(cx).remote_id();
587 if let Some(diff_state) = self.diffs.get(&buffer_id)
588 && let Some(unstaged_diff) = diff_state
589 .read(cx)
590 .unstaged_diff
591 .as_ref()
592 .and_then(|weak| weak.upgrade())
593 {
594 if let Some(task) =
595 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
596 {
597 return cx.background_executor().spawn(async move {
598 task.await;
599 Ok(unstaged_diff)
600 });
601 }
602 return Task::ready(Ok(unstaged_diff));
603 }
604
605 let Some((repo, repo_path)) =
606 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
607 else {
608 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
609 };
610
611 let task = self
612 .loading_diffs
613 .entry((buffer_id, DiffKind::Unstaged))
614 .or_insert_with(|| {
615 let staged_text = repo.update(cx, |repo, cx| {
616 repo.load_staged_text(buffer_id, repo_path, cx)
617 });
618 cx.spawn(async move |this, cx| {
619 Self::open_diff_internal(
620 this,
621 DiffKind::Unstaged,
622 staged_text.await.map(DiffBasesChange::SetIndex),
623 buffer,
624 cx,
625 )
626 .await
627 .map_err(Arc::new)
628 })
629 .shared()
630 })
631 .clone();
632
633 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
634 }
635
636 pub fn open_diff_since(
637 &mut self,
638 oid: Option<git::Oid>,
639 buffer: Entity<Buffer>,
640 repo: Entity<Repository>,
641 languages: Arc<LanguageRegistry>,
642 cx: &mut Context<Self>,
643 ) -> Task<Result<Entity<BufferDiff>>> {
644 cx.spawn(async move |this, cx| {
645 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
646 let content = match oid {
647 None => None,
648 Some(oid) => Some(
649 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
650 .await?,
651 ),
652 };
653 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
654
655 buffer_diff
656 .update(cx, |buffer_diff, cx| {
657 buffer_diff.set_base_text(
658 content.map(Arc::new),
659 buffer_snapshot.language().cloned(),
660 Some(languages.clone()),
661 buffer_snapshot.text,
662 cx,
663 )
664 })?
665 .await?;
666 let unstaged_diff = this
667 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
668 .await?;
669 buffer_diff.update(cx, |buffer_diff, _| {
670 buffer_diff.set_secondary_diff(unstaged_diff);
671 })?;
672
673 this.update(cx, |_, cx| {
674 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
675 .detach();
676 })?;
677
678 Ok(buffer_diff)
679 })
680 }
681
682 pub fn open_uncommitted_diff(
683 &mut self,
684 buffer: Entity<Buffer>,
685 cx: &mut Context<Self>,
686 ) -> Task<Result<Entity<BufferDiff>>> {
687 let buffer_id = buffer.read(cx).remote_id();
688
689 if let Some(diff_state) = self.diffs.get(&buffer_id)
690 && let Some(uncommitted_diff) = diff_state
691 .read(cx)
692 .uncommitted_diff
693 .as_ref()
694 .and_then(|weak| weak.upgrade())
695 {
696 if let Some(task) =
697 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
698 {
699 return cx.background_executor().spawn(async move {
700 task.await;
701 Ok(uncommitted_diff)
702 });
703 }
704 return Task::ready(Ok(uncommitted_diff));
705 }
706
707 let Some((repo, repo_path)) =
708 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
709 else {
710 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
711 };
712
713 let task = self
714 .loading_diffs
715 .entry((buffer_id, DiffKind::Uncommitted))
716 .or_insert_with(|| {
717 let changes = repo.update(cx, |repo, cx| {
718 repo.load_committed_text(buffer_id, repo_path, cx)
719 });
720
721 // todo(lw): hot foreground spawn
722 cx.spawn(async move |this, cx| {
723 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
724 .await
725 .map_err(Arc::new)
726 })
727 .shared()
728 })
729 .clone();
730
731 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
732 }
733
734 async fn open_diff_internal(
735 this: WeakEntity<Self>,
736 kind: DiffKind,
737 texts: Result<DiffBasesChange>,
738 buffer_entity: Entity<Buffer>,
739 cx: &mut AsyncApp,
740 ) -> Result<Entity<BufferDiff>> {
741 let diff_bases_change = match texts {
742 Err(e) => {
743 this.update(cx, |this, cx| {
744 let buffer = buffer_entity.read(cx);
745 let buffer_id = buffer.remote_id();
746 this.loading_diffs.remove(&(buffer_id, kind));
747 })?;
748 return Err(e);
749 }
750 Ok(change) => change,
751 };
752
753 this.update(cx, |this, cx| {
754 let buffer = buffer_entity.read(cx);
755 let buffer_id = buffer.remote_id();
756 let language = buffer.language().cloned();
757 let language_registry = buffer.language_registry();
758 let text_snapshot = buffer.text_snapshot();
759 this.loading_diffs.remove(&(buffer_id, kind));
760
761 let git_store = cx.weak_entity();
762 let diff_state = this
763 .diffs
764 .entry(buffer_id)
765 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
766
767 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
768
769 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
770 diff_state.update(cx, |diff_state, cx| {
771 diff_state.language = language;
772 diff_state.language_registry = language_registry;
773
774 match kind {
775 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
776 DiffKind::Uncommitted => {
777 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
778 diff
779 } else {
780 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
781 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
782 unstaged_diff
783 };
784
785 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
786 diff_state.uncommitted_diff = Some(diff.downgrade())
787 }
788 }
789
790 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
791 let rx = diff_state.wait_for_recalculation();
792
793 anyhow::Ok(async move {
794 if let Some(rx) = rx {
795 rx.await;
796 }
797 Ok(diff)
798 })
799 })
800 })??
801 .await
802 }
803
804 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
805 let diff_state = self.diffs.get(&buffer_id)?;
806 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
807 }
808
809 pub fn get_uncommitted_diff(
810 &self,
811 buffer_id: BufferId,
812 cx: &App,
813 ) -> Option<Entity<BufferDiff>> {
814 let diff_state = self.diffs.get(&buffer_id)?;
815 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
816 }
817
818 pub fn open_conflict_set(
819 &mut self,
820 buffer: Entity<Buffer>,
821 cx: &mut Context<Self>,
822 ) -> Entity<ConflictSet> {
823 log::debug!("open conflict set");
824 let buffer_id = buffer.read(cx).remote_id();
825
826 if let Some(git_state) = self.diffs.get(&buffer_id)
827 && let Some(conflict_set) = git_state
828 .read(cx)
829 .conflict_set
830 .as_ref()
831 .and_then(|weak| weak.upgrade())
832 {
833 let conflict_set = conflict_set;
834 let buffer_snapshot = buffer.read(cx).text_snapshot();
835
836 git_state.update(cx, |state, cx| {
837 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
838 });
839
840 return conflict_set;
841 }
842
843 let is_unmerged = self
844 .repository_and_path_for_buffer_id(buffer_id, cx)
845 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
846 let git_store = cx.weak_entity();
847 let buffer_git_state = self
848 .diffs
849 .entry(buffer_id)
850 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
851 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
852
853 self._subscriptions
854 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
855 cx.emit(GitStoreEvent::ConflictsUpdated);
856 }));
857
858 buffer_git_state.update(cx, |state, cx| {
859 state.conflict_set = Some(conflict_set.downgrade());
860 let buffer_snapshot = buffer.read(cx).text_snapshot();
861 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
862 });
863
864 conflict_set
865 }
866
867 pub fn project_path_git_status(
868 &self,
869 project_path: &ProjectPath,
870 cx: &App,
871 ) -> Option<FileStatus> {
872 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
873 Some(repo.read(cx).status_for_path(&repo_path)?.status)
874 }
875
876 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
877 let mut work_directory_abs_paths = Vec::new();
878 let mut checkpoints = Vec::new();
879 for repository in self.repositories.values() {
880 repository.update(cx, |repository, _| {
881 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
882 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
883 });
884 }
885
886 cx.background_executor().spawn(async move {
887 let checkpoints = future::try_join_all(checkpoints).await?;
888 Ok(GitStoreCheckpoint {
889 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
890 .into_iter()
891 .zip(checkpoints)
892 .collect(),
893 })
894 })
895 }
896
897 pub fn restore_checkpoint(
898 &self,
899 checkpoint: GitStoreCheckpoint,
900 cx: &mut App,
901 ) -> Task<Result<()>> {
902 let repositories_by_work_dir_abs_path = self
903 .repositories
904 .values()
905 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
906 .collect::<HashMap<_, _>>();
907
908 let mut tasks = Vec::new();
909 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
910 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
911 let restore = repository.update(cx, |repository, _| {
912 repository.restore_checkpoint(checkpoint)
913 });
914 tasks.push(async move { restore.await? });
915 }
916 }
917 cx.background_spawn(async move {
918 future::try_join_all(tasks).await?;
919 Ok(())
920 })
921 }
922
923 /// Compares two checkpoints, returning true if they are equal.
924 pub fn compare_checkpoints(
925 &self,
926 left: GitStoreCheckpoint,
927 mut right: GitStoreCheckpoint,
928 cx: &mut App,
929 ) -> Task<Result<bool>> {
930 let repositories_by_work_dir_abs_path = self
931 .repositories
932 .values()
933 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
934 .collect::<HashMap<_, _>>();
935
936 let mut tasks = Vec::new();
937 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
938 if let Some(right_checkpoint) = right
939 .checkpoints_by_work_dir_abs_path
940 .remove(&work_dir_abs_path)
941 {
942 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
943 {
944 let compare = repository.update(cx, |repository, _| {
945 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
946 });
947
948 tasks.push(async move { compare.await? });
949 }
950 } else {
951 return Task::ready(Ok(false));
952 }
953 }
954 cx.background_spawn(async move {
955 Ok(future::try_join_all(tasks)
956 .await?
957 .into_iter()
958 .all(|result| result))
959 })
960 }
961
962 /// Blames a buffer.
963 pub fn blame_buffer(
964 &self,
965 buffer: &Entity<Buffer>,
966 version: Option<clock::Global>,
967 cx: &mut App,
968 ) -> Task<Result<Option<Blame>>> {
969 let buffer = buffer.read(cx);
970 let Some((repo, repo_path)) =
971 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
972 else {
973 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
974 };
975 let content = match &version {
976 Some(version) => buffer.rope_for_version(version),
977 None => buffer.as_rope().clone(),
978 };
979 let version = version.unwrap_or(buffer.version());
980 let buffer_id = buffer.remote_id();
981
982 let rx = repo.update(cx, |repo, _| {
983 repo.send_job(None, move |state, _| async move {
984 match state {
985 RepositoryState::Local { backend, .. } => backend
986 .blame(repo_path.clone(), content)
987 .await
988 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
989 .map(Some),
990 RepositoryState::Remote { project_id, client } => {
991 let response = client
992 .request(proto::BlameBuffer {
993 project_id: project_id.to_proto(),
994 buffer_id: buffer_id.into(),
995 version: serialize_version(&version),
996 })
997 .await?;
998 Ok(deserialize_blame_buffer_response(response))
999 }
1000 }
1001 })
1002 });
1003
1004 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1005 }
1006
1007 pub fn get_permalink_to_line(
1008 &self,
1009 buffer: &Entity<Buffer>,
1010 selection: Range<u32>,
1011 cx: &mut App,
1012 ) -> Task<Result<url::Url>> {
1013 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1014 return Task::ready(Err(anyhow!("buffer has no file")));
1015 };
1016
1017 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1018 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1019 cx,
1020 ) else {
1021 // If we're not in a Git repo, check whether this is a Rust source
1022 // file in the Cargo registry (presumably opened with go-to-definition
1023 // from a normal Rust file). If so, we can put together a permalink
1024 // using crate metadata.
1025 if buffer
1026 .read(cx)
1027 .language()
1028 .is_none_or(|lang| lang.name() != "Rust".into())
1029 {
1030 return Task::ready(Err(anyhow!("no permalink available")));
1031 }
1032 let file_path = file.worktree.read(cx).absolutize(&file.path);
1033 return cx.spawn(async move |cx| {
1034 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1035 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1036 .context("no permalink available")
1037 });
1038 };
1039
1040 let buffer_id = buffer.read(cx).remote_id();
1041 let branch = repo.read(cx).branch.clone();
1042 let remote = branch
1043 .as_ref()
1044 .and_then(|b| b.upstream.as_ref())
1045 .and_then(|b| b.remote_name())
1046 .unwrap_or("origin")
1047 .to_string();
1048
1049 let rx = repo.update(cx, |repo, _| {
1050 repo.send_job(None, move |state, cx| async move {
1051 match state {
1052 RepositoryState::Local { backend, .. } => {
1053 let origin_url = backend
1054 .remote_url(&remote)
1055 .with_context(|| format!("remote \"{remote}\" not found"))?;
1056
1057 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1058
1059 let provider_registry =
1060 cx.update(GitHostingProviderRegistry::default_global)?;
1061
1062 let (provider, remote) =
1063 parse_git_remote_url(provider_registry, &origin_url)
1064 .context("parsing Git remote URL")?;
1065
1066 Ok(provider.build_permalink(
1067 remote,
1068 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1069 ))
1070 }
1071 RepositoryState::Remote { project_id, client } => {
1072 let response = client
1073 .request(proto::GetPermalinkToLine {
1074 project_id: project_id.to_proto(),
1075 buffer_id: buffer_id.into(),
1076 selection: Some(proto::Range {
1077 start: selection.start as u64,
1078 end: selection.end as u64,
1079 }),
1080 })
1081 .await?;
1082
1083 url::Url::parse(&response.permalink).context("failed to parse permalink")
1084 }
1085 }
1086 })
1087 });
1088 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1089 }
1090
1091 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1092 match &self.state {
1093 GitStoreState::Local {
1094 downstream: downstream_client,
1095 ..
1096 } => downstream_client
1097 .as_ref()
1098 .map(|state| (state.client.clone(), state.project_id)),
1099 GitStoreState::Remote {
1100 downstream: downstream_client,
1101 ..
1102 } => downstream_client.clone(),
1103 }
1104 }
1105
1106 fn upstream_client(&self) -> Option<AnyProtoClient> {
1107 match &self.state {
1108 GitStoreState::Local { .. } => None,
1109 GitStoreState::Remote {
1110 upstream_client, ..
1111 } => Some(upstream_client.clone()),
1112 }
1113 }
1114
1115 fn on_worktree_store_event(
1116 &mut self,
1117 worktree_store: Entity<WorktreeStore>,
1118 event: &WorktreeStoreEvent,
1119 cx: &mut Context<Self>,
1120 ) {
1121 let GitStoreState::Local {
1122 project_environment,
1123 downstream,
1124 next_repository_id,
1125 fs,
1126 } = &self.state
1127 else {
1128 return;
1129 };
1130
1131 match event {
1132 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1133 if let Some(worktree) = self
1134 .worktree_store
1135 .read(cx)
1136 .worktree_for_id(*worktree_id, cx)
1137 {
1138 let paths_by_git_repo =
1139 self.process_updated_entries(&worktree, updated_entries, cx);
1140 let downstream = downstream
1141 .as_ref()
1142 .map(|downstream| downstream.updates_tx.clone());
1143 cx.spawn(async move |_, cx| {
1144 let paths_by_git_repo = paths_by_git_repo.await;
1145 for (repo, paths) in paths_by_git_repo {
1146 repo.update(cx, |repo, cx| {
1147 repo.paths_changed(paths, downstream.clone(), cx);
1148 })
1149 .ok();
1150 }
1151 })
1152 .detach();
1153 }
1154 }
1155 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1156 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1157 else {
1158 return;
1159 };
1160 if !worktree.read(cx).is_visible() {
1161 log::debug!(
1162 "not adding repositories for local worktree {:?} because it's not visible",
1163 worktree.read(cx).abs_path()
1164 );
1165 return;
1166 }
1167 self.update_repositories_from_worktree(
1168 *worktree_id,
1169 project_environment.clone(),
1170 next_repository_id.clone(),
1171 downstream
1172 .as_ref()
1173 .map(|downstream| downstream.updates_tx.clone()),
1174 changed_repos.clone(),
1175 fs.clone(),
1176 cx,
1177 );
1178 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1179 }
1180 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1181 let repos_without_worktree: Vec<RepositoryId> = self
1182 .worktree_ids
1183 .iter_mut()
1184 .filter_map(|(repo_id, worktree_ids)| {
1185 worktree_ids.remove(worktree_id);
1186 if worktree_ids.is_empty() {
1187 Some(*repo_id)
1188 } else {
1189 None
1190 }
1191 })
1192 .collect();
1193 let is_active_repo_removed = repos_without_worktree
1194 .iter()
1195 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1196
1197 for repo_id in repos_without_worktree {
1198 self.repositories.remove(&repo_id);
1199 self.worktree_ids.remove(&repo_id);
1200 if let Some(updates_tx) =
1201 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1202 {
1203 updates_tx
1204 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1205 .ok();
1206 }
1207 }
1208
1209 if is_active_repo_removed {
1210 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1211 self.active_repo_id = Some(repo_id);
1212 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1213 } else {
1214 self.active_repo_id = None;
1215 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1216 }
1217 }
1218 }
1219 _ => {}
1220 }
1221 }
1222 fn on_repository_event(
1223 &mut self,
1224 repo: Entity<Repository>,
1225 event: &RepositoryEvent,
1226 cx: &mut Context<Self>,
1227 ) {
1228 let id = repo.read(cx).id;
1229 let repo_snapshot = repo.read(cx).snapshot.clone();
1230 for (buffer_id, diff) in self.diffs.iter() {
1231 if let Some((buffer_repo, repo_path)) =
1232 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1233 && buffer_repo == repo
1234 {
1235 diff.update(cx, |diff, cx| {
1236 if let Some(conflict_set) = &diff.conflict_set {
1237 let conflict_status_changed =
1238 conflict_set.update(cx, |conflict_set, cx| {
1239 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1240 conflict_set.set_has_conflict(has_conflict, cx)
1241 })?;
1242 if conflict_status_changed {
1243 let buffer_store = self.buffer_store.read(cx);
1244 if let Some(buffer) = buffer_store.get(*buffer_id) {
1245 let _ = diff
1246 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1247 }
1248 }
1249 }
1250 anyhow::Ok(())
1251 })
1252 .ok();
1253 }
1254 }
1255 cx.emit(GitStoreEvent::RepositoryUpdated(
1256 id,
1257 event.clone(),
1258 self.active_repo_id == Some(id),
1259 ))
1260 }
1261
1262 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1263 cx.emit(GitStoreEvent::JobsUpdated)
1264 }
1265
1266 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1267 fn update_repositories_from_worktree(
1268 &mut self,
1269 worktree_id: WorktreeId,
1270 project_environment: Entity<ProjectEnvironment>,
1271 next_repository_id: Arc<AtomicU64>,
1272 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1273 updated_git_repositories: UpdatedGitRepositoriesSet,
1274 fs: Arc<dyn Fs>,
1275 cx: &mut Context<Self>,
1276 ) {
1277 let mut removed_ids = Vec::new();
1278 for update in updated_git_repositories.iter() {
1279 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1280 let existing_work_directory_abs_path =
1281 repo.read(cx).work_directory_abs_path.clone();
1282 Some(&existing_work_directory_abs_path)
1283 == update.old_work_directory_abs_path.as_ref()
1284 || Some(&existing_work_directory_abs_path)
1285 == update.new_work_directory_abs_path.as_ref()
1286 }) {
1287 let repo_id = *id;
1288 if let Some(new_work_directory_abs_path) =
1289 update.new_work_directory_abs_path.clone()
1290 {
1291 self.worktree_ids
1292 .entry(repo_id)
1293 .or_insert_with(HashSet::new)
1294 .insert(worktree_id);
1295 existing.update(cx, |existing, cx| {
1296 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1297 existing.schedule_scan(updates_tx.clone(), cx);
1298 });
1299 } else {
1300 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1301 worktree_ids.remove(&worktree_id);
1302 if worktree_ids.is_empty() {
1303 removed_ids.push(repo_id);
1304 }
1305 }
1306 }
1307 } else if let UpdatedGitRepository {
1308 new_work_directory_abs_path: Some(work_directory_abs_path),
1309 dot_git_abs_path: Some(dot_git_abs_path),
1310 repository_dir_abs_path: Some(repository_dir_abs_path),
1311 common_dir_abs_path: Some(common_dir_abs_path),
1312 ..
1313 } = update
1314 {
1315 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1316 let git_store = cx.weak_entity();
1317 let repo = cx.new(|cx| {
1318 let mut repo = Repository::local(
1319 id,
1320 work_directory_abs_path.clone(),
1321 dot_git_abs_path.clone(),
1322 repository_dir_abs_path.clone(),
1323 common_dir_abs_path.clone(),
1324 project_environment.downgrade(),
1325 fs.clone(),
1326 git_store,
1327 cx,
1328 );
1329 if let Some(updates_tx) = updates_tx.as_ref() {
1330 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1331 updates_tx
1332 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1333 .ok();
1334 }
1335 repo.schedule_scan(updates_tx.clone(), cx);
1336 repo
1337 });
1338 self._subscriptions
1339 .push(cx.subscribe(&repo, Self::on_repository_event));
1340 self._subscriptions
1341 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1342 self.repositories.insert(id, repo);
1343 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1344 cx.emit(GitStoreEvent::RepositoryAdded);
1345 self.active_repo_id.get_or_insert_with(|| {
1346 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1347 id
1348 });
1349 }
1350 }
1351
1352 for id in removed_ids {
1353 if self.active_repo_id == Some(id) {
1354 self.active_repo_id = None;
1355 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1356 }
1357 self.repositories.remove(&id);
1358 if let Some(updates_tx) = updates_tx.as_ref() {
1359 updates_tx
1360 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1361 .ok();
1362 }
1363 }
1364 }
1365
1366 fn on_buffer_store_event(
1367 &mut self,
1368 _: Entity<BufferStore>,
1369 event: &BufferStoreEvent,
1370 cx: &mut Context<Self>,
1371 ) {
1372 match event {
1373 BufferStoreEvent::BufferAdded(buffer) => {
1374 cx.subscribe(buffer, |this, buffer, event, cx| {
1375 if let BufferEvent::LanguageChanged = event {
1376 let buffer_id = buffer.read(cx).remote_id();
1377 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1378 diff_state.update(cx, |diff_state, cx| {
1379 diff_state.buffer_language_changed(buffer, cx);
1380 });
1381 }
1382 }
1383 })
1384 .detach();
1385 }
1386 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1387 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1388 diffs.remove(buffer_id);
1389 }
1390 }
1391 BufferStoreEvent::BufferDropped(buffer_id) => {
1392 self.diffs.remove(buffer_id);
1393 for diffs in self.shared_diffs.values_mut() {
1394 diffs.remove(buffer_id);
1395 }
1396 }
1397 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1398 // Whenever a buffer's file path changes, it's possible that the
1399 // new path is actually a path that is being tracked by a git
1400 // repository. In that case, we'll want to update the buffer's
1401 // `BufferDiffState`, in case it already has one.
1402 let buffer_id = buffer.read(cx).remote_id();
1403 let diff_state = self.diffs.get(&buffer_id);
1404 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1405
1406 if let Some(diff_state) = diff_state
1407 && let Some((repo, repo_path)) = repo
1408 {
1409 let buffer = buffer.clone();
1410 let diff_state = diff_state.clone();
1411
1412 cx.spawn(async move |_git_store, cx| {
1413 async {
1414 let diff_bases_change = repo
1415 .update(cx, |repo, cx| {
1416 repo.load_committed_text(buffer_id, repo_path, cx)
1417 })?
1418 .await?;
1419
1420 diff_state.update(cx, |diff_state, cx| {
1421 let buffer_snapshot = buffer.read(cx).text_snapshot();
1422 diff_state.diff_bases_changed(
1423 buffer_snapshot,
1424 Some(diff_bases_change),
1425 cx,
1426 );
1427 })
1428 }
1429 .await
1430 .log_err();
1431 })
1432 .detach();
1433 }
1434 }
1435 _ => {}
1436 }
1437 }
1438
1439 pub fn recalculate_buffer_diffs(
1440 &mut self,
1441 buffers: Vec<Entity<Buffer>>,
1442 cx: &mut Context<Self>,
1443 ) -> impl Future<Output = ()> + use<> {
1444 let mut futures = Vec::new();
1445 for buffer in buffers {
1446 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1447 let buffer = buffer.read(cx).text_snapshot();
1448 diff_state.update(cx, |diff_state, cx| {
1449 diff_state.recalculate_diffs(buffer.clone(), cx);
1450 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1451 });
1452 futures.push(diff_state.update(cx, |diff_state, cx| {
1453 diff_state
1454 .reparse_conflict_markers(buffer, cx)
1455 .map(|_| {})
1456 .boxed()
1457 }));
1458 }
1459 }
1460 async move {
1461 futures::future::join_all(futures).await;
1462 }
1463 }
1464
1465 fn on_buffer_diff_event(
1466 &mut self,
1467 diff: Entity<buffer_diff::BufferDiff>,
1468 event: &BufferDiffEvent,
1469 cx: &mut Context<Self>,
1470 ) {
1471 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1472 let buffer_id = diff.read(cx).buffer_id;
1473 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1474 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1475 diff_state.hunk_staging_operation_count += 1;
1476 diff_state.hunk_staging_operation_count
1477 });
1478 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1479 let recv = repo.update(cx, |repo, cx| {
1480 log::debug!("hunks changed for {}", path.as_unix_str());
1481 repo.spawn_set_index_text_job(
1482 path,
1483 new_index_text.as_ref().map(|rope| rope.to_string()),
1484 Some(hunk_staging_operation_count),
1485 cx,
1486 )
1487 });
1488 let diff = diff.downgrade();
1489 cx.spawn(async move |this, cx| {
1490 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1491 diff.update(cx, |diff, cx| {
1492 diff.clear_pending_hunks(cx);
1493 })
1494 .ok();
1495 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1496 .ok();
1497 }
1498 })
1499 .detach();
1500 }
1501 }
1502 }
1503 }
1504
1505 fn local_worktree_git_repos_changed(
1506 &mut self,
1507 worktree: Entity<Worktree>,
1508 changed_repos: &UpdatedGitRepositoriesSet,
1509 cx: &mut Context<Self>,
1510 ) {
1511 log::debug!("local worktree repos changed");
1512 debug_assert!(worktree.read(cx).is_local());
1513
1514 for repository in self.repositories.values() {
1515 repository.update(cx, |repository, cx| {
1516 let repo_abs_path = &repository.work_directory_abs_path;
1517 if changed_repos.iter().any(|update| {
1518 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1519 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1520 }) {
1521 repository.reload_buffer_diff_bases(cx);
1522 }
1523 });
1524 }
1525 }
1526
1527 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1528 &self.repositories
1529 }
1530
1531 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1532 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1533 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1534 Some(status.status)
1535 }
1536
1537 pub fn repository_and_path_for_buffer_id(
1538 &self,
1539 buffer_id: BufferId,
1540 cx: &App,
1541 ) -> Option<(Entity<Repository>, RepoPath)> {
1542 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1543 let project_path = buffer.read(cx).project_path(cx)?;
1544 self.repository_and_path_for_project_path(&project_path, cx)
1545 }
1546
1547 pub fn repository_and_path_for_project_path(
1548 &self,
1549 path: &ProjectPath,
1550 cx: &App,
1551 ) -> Option<(Entity<Repository>, RepoPath)> {
1552 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1553 self.repositories
1554 .values()
1555 .filter_map(|repo| {
1556 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1557 Some((repo.clone(), repo_path))
1558 })
1559 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1560 }
1561
1562 pub fn git_init(
1563 &self,
1564 path: Arc<Path>,
1565 fallback_branch_name: String,
1566 cx: &App,
1567 ) -> Task<Result<()>> {
1568 match &self.state {
1569 GitStoreState::Local { fs, .. } => {
1570 let fs = fs.clone();
1571 cx.background_executor()
1572 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1573 }
1574 GitStoreState::Remote {
1575 upstream_client,
1576 upstream_project_id: project_id,
1577 ..
1578 } => {
1579 let client = upstream_client.clone();
1580 let project_id = *project_id;
1581 cx.background_executor().spawn(async move {
1582 client
1583 .request(proto::GitInit {
1584 project_id: project_id,
1585 abs_path: path.to_string_lossy().into_owned(),
1586 fallback_branch_name,
1587 })
1588 .await?;
1589 Ok(())
1590 })
1591 }
1592 }
1593 }
1594
1595 pub fn git_clone(
1596 &self,
1597 repo: String,
1598 path: impl Into<Arc<std::path::Path>>,
1599 cx: &App,
1600 ) -> Task<Result<()>> {
1601 let path = path.into();
1602 match &self.state {
1603 GitStoreState::Local { fs, .. } => {
1604 let fs = fs.clone();
1605 cx.background_executor()
1606 .spawn(async move { fs.git_clone(&repo, &path).await })
1607 }
1608 GitStoreState::Remote {
1609 upstream_client,
1610 upstream_project_id,
1611 ..
1612 } => {
1613 if upstream_client.is_via_collab() {
1614 return Task::ready(Err(anyhow!(
1615 "Git Clone isn't supported for project guests"
1616 )));
1617 }
1618 let request = upstream_client.request(proto::GitClone {
1619 project_id: *upstream_project_id,
1620 abs_path: path.to_string_lossy().into_owned(),
1621 remote_repo: repo,
1622 });
1623
1624 cx.background_spawn(async move {
1625 let result = request.await?;
1626
1627 match result.success {
1628 true => Ok(()),
1629 false => Err(anyhow!("Git Clone failed")),
1630 }
1631 })
1632 }
1633 }
1634 }
1635
1636 async fn handle_update_repository(
1637 this: Entity<Self>,
1638 envelope: TypedEnvelope<proto::UpdateRepository>,
1639 mut cx: AsyncApp,
1640 ) -> Result<()> {
1641 this.update(&mut cx, |this, cx| {
1642 let path_style = this.worktree_store.read(cx).path_style();
1643 let mut update = envelope.payload;
1644
1645 let id = RepositoryId::from_proto(update.id);
1646 let client = this.upstream_client().context("no upstream client")?;
1647
1648 let mut repo_subscription = None;
1649 let repo = this.repositories.entry(id).or_insert_with(|| {
1650 let git_store = cx.weak_entity();
1651 let repo = cx.new(|cx| {
1652 Repository::remote(
1653 id,
1654 Path::new(&update.abs_path).into(),
1655 path_style,
1656 ProjectId(update.project_id),
1657 client,
1658 git_store,
1659 cx,
1660 )
1661 });
1662 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1663 cx.emit(GitStoreEvent::RepositoryAdded);
1664 repo
1665 });
1666 this._subscriptions.extend(repo_subscription);
1667
1668 repo.update(cx, {
1669 let update = update.clone();
1670 |repo, cx| repo.apply_remote_update(update, cx)
1671 })?;
1672
1673 this.active_repo_id.get_or_insert_with(|| {
1674 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1675 id
1676 });
1677
1678 if let Some((client, project_id)) = this.downstream_client() {
1679 update.project_id = project_id.to_proto();
1680 client.send(update).log_err();
1681 }
1682 Ok(())
1683 })?
1684 }
1685
1686 async fn handle_remove_repository(
1687 this: Entity<Self>,
1688 envelope: TypedEnvelope<proto::RemoveRepository>,
1689 mut cx: AsyncApp,
1690 ) -> Result<()> {
1691 this.update(&mut cx, |this, cx| {
1692 let mut update = envelope.payload;
1693 let id = RepositoryId::from_proto(update.id);
1694 this.repositories.remove(&id);
1695 if let Some((client, project_id)) = this.downstream_client() {
1696 update.project_id = project_id.to_proto();
1697 client.send(update).log_err();
1698 }
1699 if this.active_repo_id == Some(id) {
1700 this.active_repo_id = None;
1701 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1702 }
1703 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1704 })
1705 }
1706
1707 async fn handle_git_init(
1708 this: Entity<Self>,
1709 envelope: TypedEnvelope<proto::GitInit>,
1710 cx: AsyncApp,
1711 ) -> Result<proto::Ack> {
1712 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1713 let name = envelope.payload.fallback_branch_name;
1714 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1715 .await?;
1716
1717 Ok(proto::Ack {})
1718 }
1719
1720 async fn handle_git_clone(
1721 this: Entity<Self>,
1722 envelope: TypedEnvelope<proto::GitClone>,
1723 cx: AsyncApp,
1724 ) -> Result<proto::GitCloneResponse> {
1725 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1726 let repo_name = envelope.payload.remote_repo;
1727 let result = cx
1728 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1729 .await;
1730
1731 Ok(proto::GitCloneResponse {
1732 success: result.is_ok(),
1733 })
1734 }
1735
1736 async fn handle_fetch(
1737 this: Entity<Self>,
1738 envelope: TypedEnvelope<proto::Fetch>,
1739 mut cx: AsyncApp,
1740 ) -> Result<proto::RemoteMessageResponse> {
1741 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1742 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1743 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1744 let askpass_id = envelope.payload.askpass_id;
1745
1746 let askpass = make_remote_delegate(
1747 this,
1748 envelope.payload.project_id,
1749 repository_id,
1750 askpass_id,
1751 &mut cx,
1752 );
1753
1754 let remote_output = repository_handle
1755 .update(&mut cx, |repository_handle, cx| {
1756 repository_handle.fetch(fetch_options, askpass, cx)
1757 })?
1758 .await??;
1759
1760 Ok(proto::RemoteMessageResponse {
1761 stdout: remote_output.stdout,
1762 stderr: remote_output.stderr,
1763 })
1764 }
1765
1766 async fn handle_push(
1767 this: Entity<Self>,
1768 envelope: TypedEnvelope<proto::Push>,
1769 mut cx: AsyncApp,
1770 ) -> Result<proto::RemoteMessageResponse> {
1771 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1772 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1773
1774 let askpass_id = envelope.payload.askpass_id;
1775 let askpass = make_remote_delegate(
1776 this,
1777 envelope.payload.project_id,
1778 repository_id,
1779 askpass_id,
1780 &mut cx,
1781 );
1782
1783 let options = envelope
1784 .payload
1785 .options
1786 .as_ref()
1787 .map(|_| match envelope.payload.options() {
1788 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1789 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1790 });
1791
1792 let branch_name = envelope.payload.branch_name.into();
1793 let remote_name = envelope.payload.remote_name.into();
1794
1795 let remote_output = repository_handle
1796 .update(&mut cx, |repository_handle, cx| {
1797 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1798 })?
1799 .await??;
1800 Ok(proto::RemoteMessageResponse {
1801 stdout: remote_output.stdout,
1802 stderr: remote_output.stderr,
1803 })
1804 }
1805
1806 async fn handle_pull(
1807 this: Entity<Self>,
1808 envelope: TypedEnvelope<proto::Pull>,
1809 mut cx: AsyncApp,
1810 ) -> Result<proto::RemoteMessageResponse> {
1811 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1812 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1813 let askpass_id = envelope.payload.askpass_id;
1814 let askpass = make_remote_delegate(
1815 this,
1816 envelope.payload.project_id,
1817 repository_id,
1818 askpass_id,
1819 &mut cx,
1820 );
1821
1822 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1823 let remote_name = envelope.payload.remote_name.into();
1824 let rebase = envelope.payload.rebase;
1825
1826 let remote_message = repository_handle
1827 .update(&mut cx, |repository_handle, cx| {
1828 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1829 })?
1830 .await??;
1831
1832 Ok(proto::RemoteMessageResponse {
1833 stdout: remote_message.stdout,
1834 stderr: remote_message.stderr,
1835 })
1836 }
1837
1838 async fn handle_stage(
1839 this: Entity<Self>,
1840 envelope: TypedEnvelope<proto::Stage>,
1841 mut cx: AsyncApp,
1842 ) -> Result<proto::Ack> {
1843 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1844 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1845
1846 let entries = envelope
1847 .payload
1848 .paths
1849 .into_iter()
1850 .map(|path| RepoPath::new(&path))
1851 .collect::<Result<Vec<_>>>()?;
1852
1853 repository_handle
1854 .update(&mut cx, |repository_handle, cx| {
1855 repository_handle.stage_entries(entries, cx)
1856 })?
1857 .await?;
1858 Ok(proto::Ack {})
1859 }
1860
1861 async fn handle_unstage(
1862 this: Entity<Self>,
1863 envelope: TypedEnvelope<proto::Unstage>,
1864 mut cx: AsyncApp,
1865 ) -> Result<proto::Ack> {
1866 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1867 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1868
1869 let entries = envelope
1870 .payload
1871 .paths
1872 .into_iter()
1873 .map(|path| RepoPath::new(&path))
1874 .collect::<Result<Vec<_>>>()?;
1875
1876 repository_handle
1877 .update(&mut cx, |repository_handle, cx| {
1878 repository_handle.unstage_entries(entries, cx)
1879 })?
1880 .await?;
1881
1882 Ok(proto::Ack {})
1883 }
1884
1885 async fn handle_stash(
1886 this: Entity<Self>,
1887 envelope: TypedEnvelope<proto::Stash>,
1888 mut cx: AsyncApp,
1889 ) -> Result<proto::Ack> {
1890 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1891 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1892
1893 let entries = envelope
1894 .payload
1895 .paths
1896 .into_iter()
1897 .map(|path| RepoPath::new(&path))
1898 .collect::<Result<Vec<_>>>()?;
1899
1900 repository_handle
1901 .update(&mut cx, |repository_handle, cx| {
1902 repository_handle.stash_entries(entries, cx)
1903 })?
1904 .await?;
1905
1906 Ok(proto::Ack {})
1907 }
1908
1909 async fn handle_stash_pop(
1910 this: Entity<Self>,
1911 envelope: TypedEnvelope<proto::StashPop>,
1912 mut cx: AsyncApp,
1913 ) -> Result<proto::Ack> {
1914 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1915 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1916 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1917
1918 repository_handle
1919 .update(&mut cx, |repository_handle, cx| {
1920 repository_handle.stash_pop(stash_index, cx)
1921 })?
1922 .await?;
1923
1924 Ok(proto::Ack {})
1925 }
1926
1927 async fn handle_stash_apply(
1928 this: Entity<Self>,
1929 envelope: TypedEnvelope<proto::StashApply>,
1930 mut cx: AsyncApp,
1931 ) -> Result<proto::Ack> {
1932 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1933 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1934 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1935
1936 repository_handle
1937 .update(&mut cx, |repository_handle, cx| {
1938 repository_handle.stash_apply(stash_index, cx)
1939 })?
1940 .await?;
1941
1942 Ok(proto::Ack {})
1943 }
1944
1945 async fn handle_stash_drop(
1946 this: Entity<Self>,
1947 envelope: TypedEnvelope<proto::StashDrop>,
1948 mut cx: AsyncApp,
1949 ) -> Result<proto::Ack> {
1950 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1951 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1952 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1953
1954 repository_handle
1955 .update(&mut cx, |repository_handle, cx| {
1956 repository_handle.stash_drop(stash_index, cx)
1957 })?
1958 .await??;
1959
1960 Ok(proto::Ack {})
1961 }
1962
1963 async fn handle_set_index_text(
1964 this: Entity<Self>,
1965 envelope: TypedEnvelope<proto::SetIndexText>,
1966 mut cx: AsyncApp,
1967 ) -> Result<proto::Ack> {
1968 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1969 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1970 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1971
1972 repository_handle
1973 .update(&mut cx, |repository_handle, cx| {
1974 repository_handle.spawn_set_index_text_job(
1975 repo_path,
1976 envelope.payload.text,
1977 None,
1978 cx,
1979 )
1980 })?
1981 .await??;
1982 Ok(proto::Ack {})
1983 }
1984
1985 async fn handle_commit(
1986 this: Entity<Self>,
1987 envelope: TypedEnvelope<proto::Commit>,
1988 mut cx: AsyncApp,
1989 ) -> Result<proto::Ack> {
1990 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1991 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1992 let askpass_id = envelope.payload.askpass_id;
1993
1994 let askpass = make_remote_delegate(
1995 this,
1996 envelope.payload.project_id,
1997 repository_id,
1998 askpass_id,
1999 &mut cx,
2000 );
2001
2002 let message = SharedString::from(envelope.payload.message);
2003 let name = envelope.payload.name.map(SharedString::from);
2004 let email = envelope.payload.email.map(SharedString::from);
2005 let options = envelope.payload.options.unwrap_or_default();
2006
2007 repository_handle
2008 .update(&mut cx, |repository_handle, cx| {
2009 repository_handle.commit(
2010 message,
2011 name.zip(email),
2012 CommitOptions {
2013 amend: options.amend,
2014 signoff: options.signoff,
2015 },
2016 askpass,
2017 cx,
2018 )
2019 })?
2020 .await??;
2021 Ok(proto::Ack {})
2022 }
2023
2024 async fn handle_get_remotes(
2025 this: Entity<Self>,
2026 envelope: TypedEnvelope<proto::GetRemotes>,
2027 mut cx: AsyncApp,
2028 ) -> Result<proto::GetRemotesResponse> {
2029 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2030 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2031
2032 let branch_name = envelope.payload.branch_name;
2033
2034 let remotes = repository_handle
2035 .update(&mut cx, |repository_handle, _| {
2036 repository_handle.get_remotes(branch_name)
2037 })?
2038 .await??;
2039
2040 Ok(proto::GetRemotesResponse {
2041 remotes: remotes
2042 .into_iter()
2043 .map(|remotes| proto::get_remotes_response::Remote {
2044 name: remotes.name.to_string(),
2045 })
2046 .collect::<Vec<_>>(),
2047 })
2048 }
2049
2050 async fn handle_get_worktrees(
2051 this: Entity<Self>,
2052 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2053 mut cx: AsyncApp,
2054 ) -> Result<proto::GitWorktreesResponse> {
2055 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2056 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2057
2058 let worktrees = repository_handle
2059 .update(&mut cx, |repository_handle, _| {
2060 repository_handle.worktrees()
2061 })?
2062 .await??;
2063
2064 Ok(proto::GitWorktreesResponse {
2065 worktrees: worktrees
2066 .into_iter()
2067 .map(|worktree| worktree_to_proto(&worktree))
2068 .collect::<Vec<_>>(),
2069 })
2070 }
2071
2072 async fn handle_create_worktree(
2073 this: Entity<Self>,
2074 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2075 mut cx: AsyncApp,
2076 ) -> Result<proto::Ack> {
2077 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2078 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2079 let directory = PathBuf::from(envelope.payload.directory);
2080 let name = envelope.payload.name;
2081 let commit = envelope.payload.commit;
2082
2083 repository_handle
2084 .update(&mut cx, |repository_handle, _| {
2085 repository_handle.create_worktree(name, directory, commit)
2086 })?
2087 .await??;
2088
2089 Ok(proto::Ack {})
2090 }
2091
2092 async fn handle_get_branches(
2093 this: Entity<Self>,
2094 envelope: TypedEnvelope<proto::GitGetBranches>,
2095 mut cx: AsyncApp,
2096 ) -> Result<proto::GitBranchesResponse> {
2097 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2098 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2099
2100 let branches = repository_handle
2101 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2102 .await??;
2103
2104 Ok(proto::GitBranchesResponse {
2105 branches: branches
2106 .into_iter()
2107 .map(|branch| branch_to_proto(&branch))
2108 .collect::<Vec<_>>(),
2109 })
2110 }
2111 async fn handle_get_default_branch(
2112 this: Entity<Self>,
2113 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2114 mut cx: AsyncApp,
2115 ) -> Result<proto::GetDefaultBranchResponse> {
2116 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2117 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2118
2119 let branch = repository_handle
2120 .update(&mut cx, |repository_handle, _| {
2121 repository_handle.default_branch()
2122 })?
2123 .await??
2124 .map(Into::into);
2125
2126 Ok(proto::GetDefaultBranchResponse { branch })
2127 }
2128 async fn handle_create_branch(
2129 this: Entity<Self>,
2130 envelope: TypedEnvelope<proto::GitCreateBranch>,
2131 mut cx: AsyncApp,
2132 ) -> Result<proto::Ack> {
2133 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2134 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2135 let branch_name = envelope.payload.branch_name;
2136
2137 repository_handle
2138 .update(&mut cx, |repository_handle, _| {
2139 repository_handle.create_branch(branch_name, None)
2140 })?
2141 .await??;
2142
2143 Ok(proto::Ack {})
2144 }
2145
2146 async fn handle_change_branch(
2147 this: Entity<Self>,
2148 envelope: TypedEnvelope<proto::GitChangeBranch>,
2149 mut cx: AsyncApp,
2150 ) -> Result<proto::Ack> {
2151 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2152 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2153 let branch_name = envelope.payload.branch_name;
2154
2155 repository_handle
2156 .update(&mut cx, |repository_handle, _| {
2157 repository_handle.change_branch(branch_name)
2158 })?
2159 .await??;
2160
2161 Ok(proto::Ack {})
2162 }
2163
2164 async fn handle_rename_branch(
2165 this: Entity<Self>,
2166 envelope: TypedEnvelope<proto::GitRenameBranch>,
2167 mut cx: AsyncApp,
2168 ) -> Result<proto::Ack> {
2169 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2170 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2171 let branch = envelope.payload.branch;
2172 let new_name = envelope.payload.new_name;
2173
2174 repository_handle
2175 .update(&mut cx, |repository_handle, _| {
2176 repository_handle.rename_branch(branch, new_name)
2177 })?
2178 .await??;
2179
2180 Ok(proto::Ack {})
2181 }
2182
2183 async fn handle_show(
2184 this: Entity<Self>,
2185 envelope: TypedEnvelope<proto::GitShow>,
2186 mut cx: AsyncApp,
2187 ) -> Result<proto::GitCommitDetails> {
2188 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2189 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2190
2191 let commit = repository_handle
2192 .update(&mut cx, |repository_handle, _| {
2193 repository_handle.show(envelope.payload.commit)
2194 })?
2195 .await??;
2196 Ok(proto::GitCommitDetails {
2197 sha: commit.sha.into(),
2198 message: commit.message.into(),
2199 commit_timestamp: commit.commit_timestamp,
2200 author_email: commit.author_email.into(),
2201 author_name: commit.author_name.into(),
2202 })
2203 }
2204
2205 async fn handle_load_commit_diff(
2206 this: Entity<Self>,
2207 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2208 mut cx: AsyncApp,
2209 ) -> Result<proto::LoadCommitDiffResponse> {
2210 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2211 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2212
2213 let commit_diff = repository_handle
2214 .update(&mut cx, |repository_handle, _| {
2215 repository_handle.load_commit_diff(envelope.payload.commit)
2216 })?
2217 .await??;
2218 Ok(proto::LoadCommitDiffResponse {
2219 files: commit_diff
2220 .files
2221 .into_iter()
2222 .map(|file| proto::CommitFile {
2223 path: file.path.to_proto(),
2224 old_text: file.old_text,
2225 new_text: file.new_text,
2226 })
2227 .collect(),
2228 })
2229 }
2230
2231 async fn handle_reset(
2232 this: Entity<Self>,
2233 envelope: TypedEnvelope<proto::GitReset>,
2234 mut cx: AsyncApp,
2235 ) -> Result<proto::Ack> {
2236 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2237 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2238
2239 let mode = match envelope.payload.mode() {
2240 git_reset::ResetMode::Soft => ResetMode::Soft,
2241 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2242 };
2243
2244 repository_handle
2245 .update(&mut cx, |repository_handle, cx| {
2246 repository_handle.reset(envelope.payload.commit, mode, cx)
2247 })?
2248 .await??;
2249 Ok(proto::Ack {})
2250 }
2251
2252 async fn handle_checkout_files(
2253 this: Entity<Self>,
2254 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2255 mut cx: AsyncApp,
2256 ) -> Result<proto::Ack> {
2257 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2258 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2259 let paths = envelope
2260 .payload
2261 .paths
2262 .iter()
2263 .map(|s| RepoPath::from_proto(s))
2264 .collect::<Result<Vec<_>>>()?;
2265
2266 repository_handle
2267 .update(&mut cx, |repository_handle, cx| {
2268 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2269 })?
2270 .await?;
2271 Ok(proto::Ack {})
2272 }
2273
2274 async fn handle_open_commit_message_buffer(
2275 this: Entity<Self>,
2276 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2277 mut cx: AsyncApp,
2278 ) -> Result<proto::OpenBufferResponse> {
2279 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2280 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2281 let buffer = repository
2282 .update(&mut cx, |repository, cx| {
2283 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2284 })?
2285 .await?;
2286
2287 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2288 this.update(&mut cx, |this, cx| {
2289 this.buffer_store.update(cx, |buffer_store, cx| {
2290 buffer_store
2291 .create_buffer_for_peer(
2292 &buffer,
2293 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2294 cx,
2295 )
2296 .detach_and_log_err(cx);
2297 })
2298 })?;
2299
2300 Ok(proto::OpenBufferResponse {
2301 buffer_id: buffer_id.to_proto(),
2302 })
2303 }
2304
2305 async fn handle_askpass(
2306 this: Entity<Self>,
2307 envelope: TypedEnvelope<proto::AskPassRequest>,
2308 mut cx: AsyncApp,
2309 ) -> Result<proto::AskPassResponse> {
2310 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2311 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2312
2313 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2314 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2315 debug_panic!("no askpass found");
2316 anyhow::bail!("no askpass found");
2317 };
2318
2319 let response = askpass
2320 .ask_password(envelope.payload.prompt)
2321 .await
2322 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2323
2324 delegates
2325 .lock()
2326 .insert(envelope.payload.askpass_id, askpass);
2327
2328 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2329 Ok(proto::AskPassResponse {
2330 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2331 })
2332 }
2333
2334 async fn handle_check_for_pushed_commits(
2335 this: Entity<Self>,
2336 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2337 mut cx: AsyncApp,
2338 ) -> Result<proto::CheckForPushedCommitsResponse> {
2339 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2340 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2341
2342 let branches = repository_handle
2343 .update(&mut cx, |repository_handle, _| {
2344 repository_handle.check_for_pushed_commits()
2345 })?
2346 .await??;
2347 Ok(proto::CheckForPushedCommitsResponse {
2348 pushed_to: branches
2349 .into_iter()
2350 .map(|commit| commit.to_string())
2351 .collect(),
2352 })
2353 }
2354
2355 async fn handle_git_diff(
2356 this: Entity<Self>,
2357 envelope: TypedEnvelope<proto::GitDiff>,
2358 mut cx: AsyncApp,
2359 ) -> Result<proto::GitDiffResponse> {
2360 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2361 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2362 let diff_type = match envelope.payload.diff_type() {
2363 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2364 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2365 };
2366
2367 let mut diff = repository_handle
2368 .update(&mut cx, |repository_handle, cx| {
2369 repository_handle.diff(diff_type, cx)
2370 })?
2371 .await??;
2372 const ONE_MB: usize = 1_000_000;
2373 if diff.len() > ONE_MB {
2374 diff = diff.chars().take(ONE_MB).collect()
2375 }
2376
2377 Ok(proto::GitDiffResponse { diff })
2378 }
2379
2380 async fn handle_tree_diff(
2381 this: Entity<Self>,
2382 request: TypedEnvelope<proto::GetTreeDiff>,
2383 mut cx: AsyncApp,
2384 ) -> Result<proto::GetTreeDiffResponse> {
2385 let repository_id = RepositoryId(request.payload.repository_id);
2386 let diff_type = if request.payload.is_merge {
2387 DiffTreeType::MergeBase {
2388 base: request.payload.base.into(),
2389 head: request.payload.head.into(),
2390 }
2391 } else {
2392 DiffTreeType::Since {
2393 base: request.payload.base.into(),
2394 head: request.payload.head.into(),
2395 }
2396 };
2397
2398 let diff = this
2399 .update(&mut cx, |this, cx| {
2400 let repository = this.repositories().get(&repository_id)?;
2401 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2402 })?
2403 .context("missing repository")?
2404 .await??;
2405
2406 Ok(proto::GetTreeDiffResponse {
2407 entries: diff
2408 .entries
2409 .into_iter()
2410 .map(|(path, status)| proto::TreeDiffStatus {
2411 path: path.as_ref().to_proto(),
2412 status: match status {
2413 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2414 TreeDiffStatus::Modified { .. } => {
2415 proto::tree_diff_status::Status::Modified.into()
2416 }
2417 TreeDiffStatus::Deleted { .. } => {
2418 proto::tree_diff_status::Status::Deleted.into()
2419 }
2420 },
2421 oid: match status {
2422 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2423 Some(old.to_string())
2424 }
2425 TreeDiffStatus::Added => None,
2426 },
2427 })
2428 .collect(),
2429 })
2430 }
2431
2432 async fn handle_get_blob_content(
2433 this: Entity<Self>,
2434 request: TypedEnvelope<proto::GetBlobContent>,
2435 mut cx: AsyncApp,
2436 ) -> Result<proto::GetBlobContentResponse> {
2437 let oid = git::Oid::from_str(&request.payload.oid)?;
2438 let repository_id = RepositoryId(request.payload.repository_id);
2439 let content = this
2440 .update(&mut cx, |this, cx| {
2441 let repository = this.repositories().get(&repository_id)?;
2442 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2443 })?
2444 .context("missing repository")?
2445 .await?;
2446 Ok(proto::GetBlobContentResponse { content })
2447 }
2448
2449 async fn handle_open_unstaged_diff(
2450 this: Entity<Self>,
2451 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2452 mut cx: AsyncApp,
2453 ) -> Result<proto::OpenUnstagedDiffResponse> {
2454 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2455 let diff = this
2456 .update(&mut cx, |this, cx| {
2457 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2458 Some(this.open_unstaged_diff(buffer, cx))
2459 })?
2460 .context("missing buffer")?
2461 .await?;
2462 this.update(&mut cx, |this, _| {
2463 let shared_diffs = this
2464 .shared_diffs
2465 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2466 .or_default();
2467 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2468 })?;
2469 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2470 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2471 }
2472
2473 async fn handle_open_uncommitted_diff(
2474 this: Entity<Self>,
2475 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2476 mut cx: AsyncApp,
2477 ) -> Result<proto::OpenUncommittedDiffResponse> {
2478 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2479 let diff = this
2480 .update(&mut cx, |this, cx| {
2481 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2482 Some(this.open_uncommitted_diff(buffer, cx))
2483 })?
2484 .context("missing buffer")?
2485 .await?;
2486 this.update(&mut cx, |this, _| {
2487 let shared_diffs = this
2488 .shared_diffs
2489 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2490 .or_default();
2491 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2492 })?;
2493 diff.read_with(&cx, |diff, cx| {
2494 use proto::open_uncommitted_diff_response::Mode;
2495
2496 let unstaged_diff = diff.secondary_diff();
2497 let index_snapshot = unstaged_diff.and_then(|diff| {
2498 let diff = diff.read(cx);
2499 diff.base_text_exists().then(|| diff.base_text())
2500 });
2501
2502 let mode;
2503 let staged_text;
2504 let committed_text;
2505 if diff.base_text_exists() {
2506 let committed_snapshot = diff.base_text();
2507 committed_text = Some(committed_snapshot.text());
2508 if let Some(index_text) = index_snapshot {
2509 if index_text.remote_id() == committed_snapshot.remote_id() {
2510 mode = Mode::IndexMatchesHead;
2511 staged_text = None;
2512 } else {
2513 mode = Mode::IndexAndHead;
2514 staged_text = Some(index_text.text());
2515 }
2516 } else {
2517 mode = Mode::IndexAndHead;
2518 staged_text = None;
2519 }
2520 } else {
2521 mode = Mode::IndexAndHead;
2522 committed_text = None;
2523 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2524 }
2525
2526 proto::OpenUncommittedDiffResponse {
2527 committed_text,
2528 staged_text,
2529 mode: mode.into(),
2530 }
2531 })
2532 }
2533
2534 async fn handle_update_diff_bases(
2535 this: Entity<Self>,
2536 request: TypedEnvelope<proto::UpdateDiffBases>,
2537 mut cx: AsyncApp,
2538 ) -> Result<()> {
2539 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2540 this.update(&mut cx, |this, cx| {
2541 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2542 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2543 {
2544 let buffer = buffer.read(cx).text_snapshot();
2545 diff_state.update(cx, |diff_state, cx| {
2546 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2547 })
2548 }
2549 })
2550 }
2551
2552 async fn handle_blame_buffer(
2553 this: Entity<Self>,
2554 envelope: TypedEnvelope<proto::BlameBuffer>,
2555 mut cx: AsyncApp,
2556 ) -> Result<proto::BlameBufferResponse> {
2557 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2558 let version = deserialize_version(&envelope.payload.version);
2559 let buffer = this.read_with(&cx, |this, cx| {
2560 this.buffer_store.read(cx).get_existing(buffer_id)
2561 })??;
2562 buffer
2563 .update(&mut cx, |buffer, _| {
2564 buffer.wait_for_version(version.clone())
2565 })?
2566 .await?;
2567 let blame = this
2568 .update(&mut cx, |this, cx| {
2569 this.blame_buffer(&buffer, Some(version), cx)
2570 })?
2571 .await?;
2572 Ok(serialize_blame_buffer_response(blame))
2573 }
2574
2575 async fn handle_get_permalink_to_line(
2576 this: Entity<Self>,
2577 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2578 mut cx: AsyncApp,
2579 ) -> Result<proto::GetPermalinkToLineResponse> {
2580 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2581 // let version = deserialize_version(&envelope.payload.version);
2582 let selection = {
2583 let proto_selection = envelope
2584 .payload
2585 .selection
2586 .context("no selection to get permalink for defined")?;
2587 proto_selection.start as u32..proto_selection.end as u32
2588 };
2589 let buffer = this.read_with(&cx, |this, cx| {
2590 this.buffer_store.read(cx).get_existing(buffer_id)
2591 })??;
2592 let permalink = this
2593 .update(&mut cx, |this, cx| {
2594 this.get_permalink_to_line(&buffer, selection, cx)
2595 })?
2596 .await?;
2597 Ok(proto::GetPermalinkToLineResponse {
2598 permalink: permalink.to_string(),
2599 })
2600 }
2601
2602 fn repository_for_request(
2603 this: &Entity<Self>,
2604 id: RepositoryId,
2605 cx: &mut AsyncApp,
2606 ) -> Result<Entity<Repository>> {
2607 this.read_with(cx, |this, _| {
2608 this.repositories
2609 .get(&id)
2610 .context("missing repository handle")
2611 .cloned()
2612 })?
2613 }
2614
2615 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2616 self.repositories
2617 .iter()
2618 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2619 .collect()
2620 }
2621
2622 fn process_updated_entries(
2623 &self,
2624 worktree: &Entity<Worktree>,
2625 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2626 cx: &mut App,
2627 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2628 let path_style = worktree.read(cx).path_style();
2629 let mut repo_paths = self
2630 .repositories
2631 .values()
2632 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2633 .collect::<Vec<_>>();
2634 let mut entries: Vec<_> = updated_entries
2635 .iter()
2636 .map(|(path, _, _)| path.clone())
2637 .collect();
2638 entries.sort();
2639 let worktree = worktree.read(cx);
2640
2641 let entries = entries
2642 .into_iter()
2643 .map(|path| worktree.absolutize(&path))
2644 .collect::<Arc<[_]>>();
2645
2646 let executor = cx.background_executor().clone();
2647 cx.background_executor().spawn(async move {
2648 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2649 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2650 let mut tasks = FuturesOrdered::new();
2651 for (repo_path, repo) in repo_paths.into_iter().rev() {
2652 let entries = entries.clone();
2653 let task = executor.spawn(async move {
2654 // Find all repository paths that belong to this repo
2655 let mut ix = entries.partition_point(|path| path < &*repo_path);
2656 if ix == entries.len() {
2657 return None;
2658 };
2659
2660 let mut paths = Vec::new();
2661 // All paths prefixed by a given repo will constitute a continuous range.
2662 while let Some(path) = entries.get(ix)
2663 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2664 &repo_path, path, path_style,
2665 )
2666 {
2667 paths.push((repo_path, ix));
2668 ix += 1;
2669 }
2670 if paths.is_empty() {
2671 None
2672 } else {
2673 Some((repo, paths))
2674 }
2675 });
2676 tasks.push_back(task);
2677 }
2678
2679 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2680 let mut path_was_used = vec![false; entries.len()];
2681 let tasks = tasks.collect::<Vec<_>>().await;
2682 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2683 // We always want to assign a path to it's innermost repository.
2684 for t in tasks {
2685 let Some((repo, paths)) = t else {
2686 continue;
2687 };
2688 let entry = paths_by_git_repo.entry(repo).or_default();
2689 for (repo_path, ix) in paths {
2690 if path_was_used[ix] {
2691 continue;
2692 }
2693 path_was_used[ix] = true;
2694 entry.push(repo_path);
2695 }
2696 }
2697
2698 paths_by_git_repo
2699 })
2700 }
2701}
2702
2703impl BufferGitState {
2704 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2705 Self {
2706 unstaged_diff: Default::default(),
2707 uncommitted_diff: Default::default(),
2708 recalculate_diff_task: Default::default(),
2709 language: Default::default(),
2710 language_registry: Default::default(),
2711 recalculating_tx: postage::watch::channel_with(false).0,
2712 hunk_staging_operation_count: 0,
2713 hunk_staging_operation_count_as_of_write: 0,
2714 head_text: Default::default(),
2715 index_text: Default::default(),
2716 head_changed: Default::default(),
2717 index_changed: Default::default(),
2718 language_changed: Default::default(),
2719 conflict_updated_futures: Default::default(),
2720 conflict_set: Default::default(),
2721 reparse_conflict_markers_task: Default::default(),
2722 }
2723 }
2724
2725 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2726 self.language = buffer.read(cx).language().cloned();
2727 self.language_changed = true;
2728 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2729 }
2730
2731 fn reparse_conflict_markers(
2732 &mut self,
2733 buffer: text::BufferSnapshot,
2734 cx: &mut Context<Self>,
2735 ) -> oneshot::Receiver<()> {
2736 let (tx, rx) = oneshot::channel();
2737
2738 let Some(conflict_set) = self
2739 .conflict_set
2740 .as_ref()
2741 .and_then(|conflict_set| conflict_set.upgrade())
2742 else {
2743 return rx;
2744 };
2745
2746 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2747 if conflict_set.has_conflict {
2748 Some(conflict_set.snapshot())
2749 } else {
2750 None
2751 }
2752 });
2753
2754 if let Some(old_snapshot) = old_snapshot {
2755 self.conflict_updated_futures.push(tx);
2756 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2757 let (snapshot, changed_range) = cx
2758 .background_spawn(async move {
2759 let new_snapshot = ConflictSet::parse(&buffer);
2760 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2761 (new_snapshot, changed_range)
2762 })
2763 .await;
2764 this.update(cx, |this, cx| {
2765 if let Some(conflict_set) = &this.conflict_set {
2766 conflict_set
2767 .update(cx, |conflict_set, cx| {
2768 conflict_set.set_snapshot(snapshot, changed_range, cx);
2769 })
2770 .ok();
2771 }
2772 let futures = std::mem::take(&mut this.conflict_updated_futures);
2773 for tx in futures {
2774 tx.send(()).ok();
2775 }
2776 })
2777 }))
2778 }
2779
2780 rx
2781 }
2782
2783 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2784 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2785 }
2786
2787 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2788 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2789 }
2790
2791 fn handle_base_texts_updated(
2792 &mut self,
2793 buffer: text::BufferSnapshot,
2794 message: proto::UpdateDiffBases,
2795 cx: &mut Context<Self>,
2796 ) {
2797 use proto::update_diff_bases::Mode;
2798
2799 let Some(mode) = Mode::from_i32(message.mode) else {
2800 return;
2801 };
2802
2803 let diff_bases_change = match mode {
2804 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2805 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2806 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2807 Mode::IndexAndHead => DiffBasesChange::SetEach {
2808 index: message.staged_text,
2809 head: message.committed_text,
2810 },
2811 };
2812
2813 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2814 }
2815
2816 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2817 if *self.recalculating_tx.borrow() {
2818 let mut rx = self.recalculating_tx.subscribe();
2819 Some(async move {
2820 loop {
2821 let is_recalculating = rx.recv().await;
2822 if is_recalculating != Some(true) {
2823 break;
2824 }
2825 }
2826 })
2827 } else {
2828 None
2829 }
2830 }
2831
2832 fn diff_bases_changed(
2833 &mut self,
2834 buffer: text::BufferSnapshot,
2835 diff_bases_change: Option<DiffBasesChange>,
2836 cx: &mut Context<Self>,
2837 ) {
2838 match diff_bases_change {
2839 Some(DiffBasesChange::SetIndex(index)) => {
2840 self.index_text = index.map(|mut index| {
2841 text::LineEnding::normalize(&mut index);
2842 Arc::new(index)
2843 });
2844 self.index_changed = true;
2845 }
2846 Some(DiffBasesChange::SetHead(head)) => {
2847 self.head_text = head.map(|mut head| {
2848 text::LineEnding::normalize(&mut head);
2849 Arc::new(head)
2850 });
2851 self.head_changed = true;
2852 }
2853 Some(DiffBasesChange::SetBoth(text)) => {
2854 let text = text.map(|mut text| {
2855 text::LineEnding::normalize(&mut text);
2856 Arc::new(text)
2857 });
2858 self.head_text = text.clone();
2859 self.index_text = text;
2860 self.head_changed = true;
2861 self.index_changed = true;
2862 }
2863 Some(DiffBasesChange::SetEach { index, head }) => {
2864 self.index_text = index.map(|mut index| {
2865 text::LineEnding::normalize(&mut index);
2866 Arc::new(index)
2867 });
2868 self.index_changed = true;
2869 self.head_text = head.map(|mut head| {
2870 text::LineEnding::normalize(&mut head);
2871 Arc::new(head)
2872 });
2873 self.head_changed = true;
2874 }
2875 None => {}
2876 }
2877
2878 self.recalculate_diffs(buffer, cx)
2879 }
2880
2881 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2882 *self.recalculating_tx.borrow_mut() = true;
2883
2884 let language = self.language.clone();
2885 let language_registry = self.language_registry.clone();
2886 let unstaged_diff = self.unstaged_diff();
2887 let uncommitted_diff = self.uncommitted_diff();
2888 let head = self.head_text.clone();
2889 let index = self.index_text.clone();
2890 let index_changed = self.index_changed;
2891 let head_changed = self.head_changed;
2892 let language_changed = self.language_changed;
2893 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2894 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2895 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2896 (None, None) => true,
2897 _ => false,
2898 };
2899 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2900 log::debug!(
2901 "start recalculating diffs for buffer {}",
2902 buffer.remote_id()
2903 );
2904
2905 let mut new_unstaged_diff = None;
2906 if let Some(unstaged_diff) = &unstaged_diff {
2907 new_unstaged_diff = Some(
2908 BufferDiff::update_diff(
2909 unstaged_diff.clone(),
2910 buffer.clone(),
2911 index,
2912 index_changed,
2913 language_changed,
2914 language.clone(),
2915 language_registry.clone(),
2916 cx,
2917 )
2918 .await?,
2919 );
2920 }
2921
2922 let mut new_uncommitted_diff = None;
2923 if let Some(uncommitted_diff) = &uncommitted_diff {
2924 new_uncommitted_diff = if index_matches_head {
2925 new_unstaged_diff.clone()
2926 } else {
2927 Some(
2928 BufferDiff::update_diff(
2929 uncommitted_diff.clone(),
2930 buffer.clone(),
2931 head,
2932 head_changed,
2933 language_changed,
2934 language.clone(),
2935 language_registry.clone(),
2936 cx,
2937 )
2938 .await?,
2939 )
2940 }
2941 }
2942
2943 let cancel = this.update(cx, |this, _| {
2944 // This checks whether all pending stage/unstage operations
2945 // have quiesced (i.e. both the corresponding write and the
2946 // read of that write have completed). If not, then we cancel
2947 // this recalculation attempt to avoid invalidating pending
2948 // state too quickly; another recalculation will come along
2949 // later and clear the pending state once the state of the index has settled.
2950 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2951 *this.recalculating_tx.borrow_mut() = false;
2952 true
2953 } else {
2954 false
2955 }
2956 })?;
2957 if cancel {
2958 log::debug!(
2959 concat!(
2960 "aborting recalculating diffs for buffer {}",
2961 "due to subsequent hunk operations",
2962 ),
2963 buffer.remote_id()
2964 );
2965 return Ok(());
2966 }
2967
2968 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2969 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2970 {
2971 unstaged_diff.update(cx, |diff, cx| {
2972 if language_changed {
2973 diff.language_changed(cx);
2974 }
2975 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2976 })?
2977 } else {
2978 None
2979 };
2980
2981 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2982 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2983 {
2984 uncommitted_diff.update(cx, |diff, cx| {
2985 if language_changed {
2986 diff.language_changed(cx);
2987 }
2988 diff.set_snapshot_with_secondary(
2989 new_uncommitted_diff,
2990 &buffer,
2991 unstaged_changed_range,
2992 true,
2993 cx,
2994 );
2995 })?;
2996 }
2997
2998 log::debug!(
2999 "finished recalculating diffs for buffer {}",
3000 buffer.remote_id()
3001 );
3002
3003 if let Some(this) = this.upgrade() {
3004 this.update(cx, |this, _| {
3005 this.index_changed = false;
3006 this.head_changed = false;
3007 this.language_changed = false;
3008 *this.recalculating_tx.borrow_mut() = false;
3009 })?;
3010 }
3011
3012 Ok(())
3013 }));
3014 }
3015}
3016
3017fn make_remote_delegate(
3018 this: Entity<GitStore>,
3019 project_id: u64,
3020 repository_id: RepositoryId,
3021 askpass_id: u64,
3022 cx: &mut AsyncApp,
3023) -> AskPassDelegate {
3024 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3025 this.update(cx, |this, cx| {
3026 let Some((client, _)) = this.downstream_client() else {
3027 return;
3028 };
3029 let response = client.request(proto::AskPassRequest {
3030 project_id,
3031 repository_id: repository_id.to_proto(),
3032 askpass_id,
3033 prompt,
3034 });
3035 cx.spawn(async move |_, _| {
3036 let mut response = response.await?.response;
3037 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3038 .ok();
3039 response.zeroize();
3040 anyhow::Ok(())
3041 })
3042 .detach_and_log_err(cx);
3043 })
3044 .log_err();
3045 })
3046}
3047
3048impl RepositoryId {
3049 pub fn to_proto(self) -> u64 {
3050 self.0
3051 }
3052
3053 pub fn from_proto(id: u64) -> Self {
3054 RepositoryId(id)
3055 }
3056}
3057
3058impl RepositorySnapshot {
3059 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3060 Self {
3061 id,
3062 statuses_by_path: Default::default(),
3063 work_directory_abs_path,
3064 branch: None,
3065 head_commit: None,
3066 scan_id: 0,
3067 merge: Default::default(),
3068 remote_origin_url: None,
3069 remote_upstream_url: None,
3070 stash_entries: Default::default(),
3071 path_style,
3072 }
3073 }
3074
3075 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3076 proto::UpdateRepository {
3077 branch_summary: self.branch.as_ref().map(branch_to_proto),
3078 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3079 updated_statuses: self
3080 .statuses_by_path
3081 .iter()
3082 .map(|entry| entry.to_proto())
3083 .collect(),
3084 removed_statuses: Default::default(),
3085 current_merge_conflicts: self
3086 .merge
3087 .conflicted_paths
3088 .iter()
3089 .map(|repo_path| repo_path.to_proto())
3090 .collect(),
3091 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3092 project_id,
3093 id: self.id.to_proto(),
3094 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3095 entry_ids: vec![self.id.to_proto()],
3096 scan_id: self.scan_id,
3097 is_last_update: true,
3098 stash_entries: self
3099 .stash_entries
3100 .entries
3101 .iter()
3102 .map(stash_to_proto)
3103 .collect(),
3104 }
3105 }
3106
3107 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3108 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3109 let mut removed_statuses: Vec<String> = Vec::new();
3110
3111 let mut new_statuses = self.statuses_by_path.iter().peekable();
3112 let mut old_statuses = old.statuses_by_path.iter().peekable();
3113
3114 let mut current_new_entry = new_statuses.next();
3115 let mut current_old_entry = old_statuses.next();
3116 loop {
3117 match (current_new_entry, current_old_entry) {
3118 (Some(new_entry), Some(old_entry)) => {
3119 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3120 Ordering::Less => {
3121 updated_statuses.push(new_entry.to_proto());
3122 current_new_entry = new_statuses.next();
3123 }
3124 Ordering::Equal => {
3125 if new_entry.status != old_entry.status {
3126 updated_statuses.push(new_entry.to_proto());
3127 }
3128 current_old_entry = old_statuses.next();
3129 current_new_entry = new_statuses.next();
3130 }
3131 Ordering::Greater => {
3132 removed_statuses.push(old_entry.repo_path.to_proto());
3133 current_old_entry = old_statuses.next();
3134 }
3135 }
3136 }
3137 (None, Some(old_entry)) => {
3138 removed_statuses.push(old_entry.repo_path.to_proto());
3139 current_old_entry = old_statuses.next();
3140 }
3141 (Some(new_entry), None) => {
3142 updated_statuses.push(new_entry.to_proto());
3143 current_new_entry = new_statuses.next();
3144 }
3145 (None, None) => break,
3146 }
3147 }
3148
3149 proto::UpdateRepository {
3150 branch_summary: self.branch.as_ref().map(branch_to_proto),
3151 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3152 updated_statuses,
3153 removed_statuses,
3154 current_merge_conflicts: self
3155 .merge
3156 .conflicted_paths
3157 .iter()
3158 .map(|path| path.to_proto())
3159 .collect(),
3160 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3161 project_id,
3162 id: self.id.to_proto(),
3163 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3164 entry_ids: vec![],
3165 scan_id: self.scan_id,
3166 is_last_update: true,
3167 stash_entries: self
3168 .stash_entries
3169 .entries
3170 .iter()
3171 .map(stash_to_proto)
3172 .collect(),
3173 }
3174 }
3175
3176 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3177 self.statuses_by_path.iter().cloned()
3178 }
3179
3180 pub fn status_summary(&self) -> GitSummary {
3181 self.statuses_by_path.summary().item_summary
3182 }
3183
3184 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3185 self.statuses_by_path
3186 .get(&PathKey(path.as_ref().clone()), ())
3187 .cloned()
3188 }
3189
3190 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3191 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3192 }
3193
3194 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3195 self.path_style
3196 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3197 .unwrap()
3198 .into()
3199 }
3200
3201 #[inline]
3202 fn abs_path_to_repo_path_inner(
3203 work_directory_abs_path: &Path,
3204 abs_path: &Path,
3205 path_style: PathStyle,
3206 ) -> Option<RepoPath> {
3207 abs_path
3208 .strip_prefix(&work_directory_abs_path)
3209 .ok()
3210 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3211 }
3212
3213 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3214 self.merge.conflicted_paths.contains(repo_path)
3215 }
3216
3217 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3218 let had_conflict_on_last_merge_head_change =
3219 self.merge.conflicted_paths.contains(repo_path);
3220 let has_conflict_currently = self
3221 .status_for_path(repo_path)
3222 .is_some_and(|entry| entry.status.is_conflicted());
3223 had_conflict_on_last_merge_head_change || has_conflict_currently
3224 }
3225
3226 /// This is the name that will be displayed in the repository selector for this repository.
3227 pub fn display_name(&self) -> SharedString {
3228 self.work_directory_abs_path
3229 .file_name()
3230 .unwrap_or_default()
3231 .to_string_lossy()
3232 .to_string()
3233 .into()
3234 }
3235}
3236
3237pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3238 proto::StashEntry {
3239 oid: entry.oid.as_bytes().to_vec(),
3240 message: entry.message.clone(),
3241 branch: entry.branch.clone(),
3242 index: entry.index as u64,
3243 timestamp: entry.timestamp,
3244 }
3245}
3246
3247pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3248 Ok(StashEntry {
3249 oid: Oid::from_bytes(&entry.oid)?,
3250 message: entry.message.clone(),
3251 index: entry.index as usize,
3252 branch: entry.branch.clone(),
3253 timestamp: entry.timestamp,
3254 })
3255}
3256
3257impl MergeDetails {
3258 async fn load(
3259 backend: &Arc<dyn GitRepository>,
3260 status: &SumTree<StatusEntry>,
3261 prev_snapshot: &RepositorySnapshot,
3262 ) -> Result<(MergeDetails, bool)> {
3263 log::debug!("load merge details");
3264 let message = backend.merge_message().await;
3265 let heads = backend
3266 .revparse_batch(vec![
3267 "MERGE_HEAD".into(),
3268 "CHERRY_PICK_HEAD".into(),
3269 "REBASE_HEAD".into(),
3270 "REVERT_HEAD".into(),
3271 "APPLY_HEAD".into(),
3272 ])
3273 .await
3274 .log_err()
3275 .unwrap_or_default()
3276 .into_iter()
3277 .map(|opt| opt.map(SharedString::from))
3278 .collect::<Vec<_>>();
3279 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3280 let conflicted_paths = if merge_heads_changed {
3281 let current_conflicted_paths = TreeSet::from_ordered_entries(
3282 status
3283 .iter()
3284 .filter(|entry| entry.status.is_conflicted())
3285 .map(|entry| entry.repo_path.clone()),
3286 );
3287
3288 // It can happen that we run a scan while a lengthy merge is in progress
3289 // that will eventually result in conflicts, but before those conflicts
3290 // are reported by `git status`. Since for the moment we only care about
3291 // the merge heads state for the purposes of tracking conflicts, don't update
3292 // this state until we see some conflicts.
3293 if heads.iter().any(Option::is_some)
3294 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3295 && current_conflicted_paths.is_empty()
3296 {
3297 log::debug!("not updating merge heads because no conflicts found");
3298 return Ok((
3299 MergeDetails {
3300 message: message.map(SharedString::from),
3301 ..prev_snapshot.merge.clone()
3302 },
3303 false,
3304 ));
3305 }
3306
3307 current_conflicted_paths
3308 } else {
3309 prev_snapshot.merge.conflicted_paths.clone()
3310 };
3311 let details = MergeDetails {
3312 conflicted_paths,
3313 message: message.map(SharedString::from),
3314 heads,
3315 };
3316 Ok((details, merge_heads_changed))
3317 }
3318}
3319
3320impl Repository {
3321 pub fn snapshot(&self) -> RepositorySnapshot {
3322 self.snapshot.clone()
3323 }
3324
3325 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3326 self.pending_ops.iter().cloned()
3327 }
3328
3329 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3330 self.pending_ops.summary().clone()
3331 }
3332
3333 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3334 self.pending_ops
3335 .get(&PathKey(path.as_ref().clone()), ())
3336 .cloned()
3337 }
3338
3339 fn local(
3340 id: RepositoryId,
3341 work_directory_abs_path: Arc<Path>,
3342 dot_git_abs_path: Arc<Path>,
3343 repository_dir_abs_path: Arc<Path>,
3344 common_dir_abs_path: Arc<Path>,
3345 project_environment: WeakEntity<ProjectEnvironment>,
3346 fs: Arc<dyn Fs>,
3347 git_store: WeakEntity<GitStore>,
3348 cx: &mut Context<Self>,
3349 ) -> Self {
3350 let snapshot =
3351 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3352 Repository {
3353 this: cx.weak_entity(),
3354 git_store,
3355 snapshot,
3356 pending_ops: Default::default(),
3357 commit_message_buffer: None,
3358 askpass_delegates: Default::default(),
3359 paths_needing_status_update: Default::default(),
3360 latest_askpass_id: 0,
3361 job_sender: Repository::spawn_local_git_worker(
3362 work_directory_abs_path,
3363 dot_git_abs_path,
3364 repository_dir_abs_path,
3365 common_dir_abs_path,
3366 project_environment,
3367 fs,
3368 cx,
3369 ),
3370 job_id: 0,
3371 active_jobs: Default::default(),
3372 }
3373 }
3374
3375 fn remote(
3376 id: RepositoryId,
3377 work_directory_abs_path: Arc<Path>,
3378 path_style: PathStyle,
3379 project_id: ProjectId,
3380 client: AnyProtoClient,
3381 git_store: WeakEntity<GitStore>,
3382 cx: &mut Context<Self>,
3383 ) -> Self {
3384 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3385 Self {
3386 this: cx.weak_entity(),
3387 snapshot,
3388 commit_message_buffer: None,
3389 git_store,
3390 pending_ops: Default::default(),
3391 paths_needing_status_update: Default::default(),
3392 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3393 askpass_delegates: Default::default(),
3394 latest_askpass_id: 0,
3395 active_jobs: Default::default(),
3396 job_id: 0,
3397 }
3398 }
3399
3400 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3401 self.git_store.upgrade()
3402 }
3403
3404 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3405 let this = cx.weak_entity();
3406 let git_store = self.git_store.clone();
3407 let _ = self.send_keyed_job(
3408 Some(GitJobKey::ReloadBufferDiffBases),
3409 None,
3410 |state, mut cx| async move {
3411 let RepositoryState::Local { backend, .. } = state else {
3412 log::error!("tried to recompute diffs for a non-local repository");
3413 return Ok(());
3414 };
3415
3416 let Some(this) = this.upgrade() else {
3417 return Ok(());
3418 };
3419
3420 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3421 git_store.update(cx, |git_store, cx| {
3422 git_store
3423 .diffs
3424 .iter()
3425 .filter_map(|(buffer_id, diff_state)| {
3426 let buffer_store = git_store.buffer_store.read(cx);
3427 let buffer = buffer_store.get(*buffer_id)?;
3428 let file = File::from_dyn(buffer.read(cx).file())?;
3429 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3430 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3431 log::debug!(
3432 "start reload diff bases for repo path {}",
3433 repo_path.as_unix_str()
3434 );
3435 diff_state.update(cx, |diff_state, _| {
3436 let has_unstaged_diff = diff_state
3437 .unstaged_diff
3438 .as_ref()
3439 .is_some_and(|diff| diff.is_upgradable());
3440 let has_uncommitted_diff = diff_state
3441 .uncommitted_diff
3442 .as_ref()
3443 .is_some_and(|set| set.is_upgradable());
3444
3445 Some((
3446 buffer,
3447 repo_path,
3448 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3449 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3450 ))
3451 })
3452 })
3453 .collect::<Vec<_>>()
3454 })
3455 })??;
3456
3457 let buffer_diff_base_changes = cx
3458 .background_spawn(async move {
3459 let mut changes = Vec::new();
3460 for (buffer, repo_path, current_index_text, current_head_text) in
3461 &repo_diff_state_updates
3462 {
3463 let index_text = if current_index_text.is_some() {
3464 backend.load_index_text(repo_path.clone()).await
3465 } else {
3466 None
3467 };
3468 let head_text = if current_head_text.is_some() {
3469 backend.load_committed_text(repo_path.clone()).await
3470 } else {
3471 None
3472 };
3473
3474 let change =
3475 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3476 (Some(current_index), Some(current_head)) => {
3477 let index_changed =
3478 index_text.as_ref() != current_index.as_deref();
3479 let head_changed =
3480 head_text.as_ref() != current_head.as_deref();
3481 if index_changed && head_changed {
3482 if index_text == head_text {
3483 Some(DiffBasesChange::SetBoth(head_text))
3484 } else {
3485 Some(DiffBasesChange::SetEach {
3486 index: index_text,
3487 head: head_text,
3488 })
3489 }
3490 } else if index_changed {
3491 Some(DiffBasesChange::SetIndex(index_text))
3492 } else if head_changed {
3493 Some(DiffBasesChange::SetHead(head_text))
3494 } else {
3495 None
3496 }
3497 }
3498 (Some(current_index), None) => {
3499 let index_changed =
3500 index_text.as_ref() != current_index.as_deref();
3501 index_changed
3502 .then_some(DiffBasesChange::SetIndex(index_text))
3503 }
3504 (None, Some(current_head)) => {
3505 let head_changed =
3506 head_text.as_ref() != current_head.as_deref();
3507 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3508 }
3509 (None, None) => None,
3510 };
3511
3512 changes.push((buffer.clone(), change))
3513 }
3514 changes
3515 })
3516 .await;
3517
3518 git_store.update(&mut cx, |git_store, cx| {
3519 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3520 let buffer_snapshot = buffer.read(cx).text_snapshot();
3521 let buffer_id = buffer_snapshot.remote_id();
3522 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3523 continue;
3524 };
3525
3526 let downstream_client = git_store.downstream_client();
3527 diff_state.update(cx, |diff_state, cx| {
3528 use proto::update_diff_bases::Mode;
3529
3530 if let Some((diff_bases_change, (client, project_id))) =
3531 diff_bases_change.clone().zip(downstream_client)
3532 {
3533 let (staged_text, committed_text, mode) = match diff_bases_change {
3534 DiffBasesChange::SetIndex(index) => {
3535 (index, None, Mode::IndexOnly)
3536 }
3537 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3538 DiffBasesChange::SetEach { index, head } => {
3539 (index, head, Mode::IndexAndHead)
3540 }
3541 DiffBasesChange::SetBoth(text) => {
3542 (None, text, Mode::IndexMatchesHead)
3543 }
3544 };
3545 client
3546 .send(proto::UpdateDiffBases {
3547 project_id: project_id.to_proto(),
3548 buffer_id: buffer_id.to_proto(),
3549 staged_text,
3550 committed_text,
3551 mode: mode as i32,
3552 })
3553 .log_err();
3554 }
3555
3556 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3557 });
3558 }
3559 })
3560 },
3561 );
3562 }
3563
3564 pub fn send_job<F, Fut, R>(
3565 &mut self,
3566 status: Option<SharedString>,
3567 job: F,
3568 ) -> oneshot::Receiver<R>
3569 where
3570 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3571 Fut: Future<Output = R> + 'static,
3572 R: Send + 'static,
3573 {
3574 self.send_keyed_job(None, status, job)
3575 }
3576
3577 fn send_keyed_job<F, Fut, R>(
3578 &mut self,
3579 key: Option<GitJobKey>,
3580 status: Option<SharedString>,
3581 job: F,
3582 ) -> oneshot::Receiver<R>
3583 where
3584 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3585 Fut: Future<Output = R> + 'static,
3586 R: Send + 'static,
3587 {
3588 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3589 let job_id = post_inc(&mut self.job_id);
3590 let this = self.this.clone();
3591 self.job_sender
3592 .unbounded_send(GitJob {
3593 key,
3594 job: Box::new(move |state, cx: &mut AsyncApp| {
3595 let job = job(state, cx.clone());
3596 cx.spawn(async move |cx| {
3597 if let Some(s) = status.clone() {
3598 this.update(cx, |this, cx| {
3599 this.active_jobs.insert(
3600 job_id,
3601 JobInfo {
3602 start: Instant::now(),
3603 message: s.clone(),
3604 },
3605 );
3606
3607 cx.notify();
3608 })
3609 .ok();
3610 }
3611 let result = job.await;
3612
3613 this.update(cx, |this, cx| {
3614 this.active_jobs.remove(&job_id);
3615 cx.notify();
3616 })
3617 .ok();
3618
3619 result_tx.send(result).ok();
3620 })
3621 }),
3622 })
3623 .ok();
3624 result_rx
3625 }
3626
3627 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3628 let Some(git_store) = self.git_store.upgrade() else {
3629 return;
3630 };
3631 let entity = cx.entity();
3632 git_store.update(cx, |git_store, cx| {
3633 let Some((&id, _)) = git_store
3634 .repositories
3635 .iter()
3636 .find(|(_, handle)| *handle == &entity)
3637 else {
3638 return;
3639 };
3640 git_store.active_repo_id = Some(id);
3641 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3642 });
3643 }
3644
3645 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3646 self.snapshot.status()
3647 }
3648
3649 pub fn cached_stash(&self) -> GitStash {
3650 self.snapshot.stash_entries.clone()
3651 }
3652
3653 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3654 let git_store = self.git_store.upgrade()?;
3655 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3656 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3657 let abs_path = SanitizedPath::new(&abs_path);
3658 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3659 Some(ProjectPath {
3660 worktree_id: worktree.read(cx).id(),
3661 path: relative_path,
3662 })
3663 }
3664
3665 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3666 let git_store = self.git_store.upgrade()?;
3667 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3668 let abs_path = worktree_store.absolutize(path, cx)?;
3669 self.snapshot.abs_path_to_repo_path(&abs_path)
3670 }
3671
3672 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3673 other
3674 .read(cx)
3675 .snapshot
3676 .work_directory_abs_path
3677 .starts_with(&self.snapshot.work_directory_abs_path)
3678 }
3679
3680 pub fn open_commit_buffer(
3681 &mut self,
3682 languages: Option<Arc<LanguageRegistry>>,
3683 buffer_store: Entity<BufferStore>,
3684 cx: &mut Context<Self>,
3685 ) -> Task<Result<Entity<Buffer>>> {
3686 let id = self.id;
3687 if let Some(buffer) = self.commit_message_buffer.clone() {
3688 return Task::ready(Ok(buffer));
3689 }
3690 let this = cx.weak_entity();
3691
3692 let rx = self.send_job(None, move |state, mut cx| async move {
3693 let Some(this) = this.upgrade() else {
3694 bail!("git store was dropped");
3695 };
3696 match state {
3697 RepositoryState::Local { .. } => {
3698 this.update(&mut cx, |_, cx| {
3699 Self::open_local_commit_buffer(languages, buffer_store, cx)
3700 })?
3701 .await
3702 }
3703 RepositoryState::Remote { project_id, client } => {
3704 let request = client.request(proto::OpenCommitMessageBuffer {
3705 project_id: project_id.0,
3706 repository_id: id.to_proto(),
3707 });
3708 let response = request.await.context("requesting to open commit buffer")?;
3709 let buffer_id = BufferId::new(response.buffer_id)?;
3710 let buffer = buffer_store
3711 .update(&mut cx, |buffer_store, cx| {
3712 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3713 })?
3714 .await?;
3715 if let Some(language_registry) = languages {
3716 let git_commit_language =
3717 language_registry.language_for_name("Git Commit").await?;
3718 buffer.update(&mut cx, |buffer, cx| {
3719 buffer.set_language(Some(git_commit_language), cx);
3720 })?;
3721 }
3722 this.update(&mut cx, |this, _| {
3723 this.commit_message_buffer = Some(buffer.clone());
3724 })?;
3725 Ok(buffer)
3726 }
3727 }
3728 });
3729
3730 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3731 }
3732
3733 fn open_local_commit_buffer(
3734 language_registry: Option<Arc<LanguageRegistry>>,
3735 buffer_store: Entity<BufferStore>,
3736 cx: &mut Context<Self>,
3737 ) -> Task<Result<Entity<Buffer>>> {
3738 cx.spawn(async move |repository, cx| {
3739 let buffer = buffer_store
3740 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3741 .await?;
3742
3743 if let Some(language_registry) = language_registry {
3744 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3745 buffer.update(cx, |buffer, cx| {
3746 buffer.set_language(Some(git_commit_language), cx);
3747 })?;
3748 }
3749
3750 repository.update(cx, |repository, _| {
3751 repository.commit_message_buffer = Some(buffer.clone());
3752 })?;
3753 Ok(buffer)
3754 })
3755 }
3756
3757 pub fn checkout_files(
3758 &mut self,
3759 commit: &str,
3760 paths: Vec<RepoPath>,
3761 cx: &mut Context<Self>,
3762 ) -> Task<Result<()>> {
3763 let commit = commit.to_string();
3764 let id = self.id;
3765
3766 self.spawn_job_with_tracking(
3767 paths.clone(),
3768 pending_op::GitStatus::Reverted,
3769 cx,
3770 async move |this, cx| {
3771 this.update(cx, |this, _cx| {
3772 this.send_job(
3773 Some(format!("git checkout {}", commit).into()),
3774 move |git_repo, _| async move {
3775 match git_repo {
3776 RepositoryState::Local {
3777 backend,
3778 environment,
3779 ..
3780 } => {
3781 backend
3782 .checkout_files(commit, paths, environment.clone())
3783 .await
3784 }
3785 RepositoryState::Remote { project_id, client } => {
3786 client
3787 .request(proto::GitCheckoutFiles {
3788 project_id: project_id.0,
3789 repository_id: id.to_proto(),
3790 commit,
3791 paths: paths
3792 .into_iter()
3793 .map(|p| p.to_proto())
3794 .collect(),
3795 })
3796 .await?;
3797
3798 Ok(())
3799 }
3800 }
3801 },
3802 )
3803 })?
3804 .await?
3805 },
3806 )
3807 }
3808
3809 pub fn reset(
3810 &mut self,
3811 commit: String,
3812 reset_mode: ResetMode,
3813 _cx: &mut App,
3814 ) -> oneshot::Receiver<Result<()>> {
3815 let id = self.id;
3816
3817 self.send_job(None, move |git_repo, _| async move {
3818 match git_repo {
3819 RepositoryState::Local {
3820 backend,
3821 environment,
3822 ..
3823 } => backend.reset(commit, reset_mode, environment).await,
3824 RepositoryState::Remote { project_id, client } => {
3825 client
3826 .request(proto::GitReset {
3827 project_id: project_id.0,
3828 repository_id: id.to_proto(),
3829 commit,
3830 mode: match reset_mode {
3831 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3832 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3833 },
3834 })
3835 .await?;
3836
3837 Ok(())
3838 }
3839 }
3840 })
3841 }
3842
3843 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3844 let id = self.id;
3845 self.send_job(None, move |git_repo, _cx| async move {
3846 match git_repo {
3847 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3848 RepositoryState::Remote { project_id, client } => {
3849 let resp = client
3850 .request(proto::GitShow {
3851 project_id: project_id.0,
3852 repository_id: id.to_proto(),
3853 commit,
3854 })
3855 .await?;
3856
3857 Ok(CommitDetails {
3858 sha: resp.sha.into(),
3859 message: resp.message.into(),
3860 commit_timestamp: resp.commit_timestamp,
3861 author_email: resp.author_email.into(),
3862 author_name: resp.author_name.into(),
3863 })
3864 }
3865 }
3866 })
3867 }
3868
3869 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3870 let id = self.id;
3871 self.send_job(None, move |git_repo, cx| async move {
3872 match git_repo {
3873 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3874 RepositoryState::Remote {
3875 client, project_id, ..
3876 } => {
3877 let response = client
3878 .request(proto::LoadCommitDiff {
3879 project_id: project_id.0,
3880 repository_id: id.to_proto(),
3881 commit,
3882 })
3883 .await?;
3884 Ok(CommitDiff {
3885 files: response
3886 .files
3887 .into_iter()
3888 .map(|file| {
3889 Ok(CommitFile {
3890 path: RepoPath::from_proto(&file.path)?,
3891 old_text: file.old_text,
3892 new_text: file.new_text,
3893 })
3894 })
3895 .collect::<Result<Vec<_>>>()?,
3896 })
3897 }
3898 }
3899 })
3900 }
3901
3902 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3903 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3904 }
3905
3906 fn save_buffers<'a>(
3907 &self,
3908 entries: impl IntoIterator<Item = &'a RepoPath>,
3909 cx: &mut Context<Self>,
3910 ) -> Vec<Task<anyhow::Result<()>>> {
3911 let mut save_futures = Vec::new();
3912 if let Some(buffer_store) = self.buffer_store(cx) {
3913 buffer_store.update(cx, |buffer_store, cx| {
3914 for path in entries {
3915 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3916 continue;
3917 };
3918 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3919 && buffer
3920 .read(cx)
3921 .file()
3922 .is_some_and(|file| file.disk_state().exists())
3923 && buffer.read(cx).has_unsaved_edits()
3924 {
3925 save_futures.push(buffer_store.save_buffer(buffer, cx));
3926 }
3927 }
3928 })
3929 }
3930 save_futures
3931 }
3932
3933 pub fn stage_entries(
3934 &mut self,
3935 entries: Vec<RepoPath>,
3936 cx: &mut Context<Self>,
3937 ) -> Task<anyhow::Result<()>> {
3938 if entries.is_empty() {
3939 return Task::ready(Ok(()));
3940 }
3941 let id = self.id;
3942 let save_tasks = self.save_buffers(&entries, cx);
3943 let paths = entries
3944 .iter()
3945 .map(|p| p.as_unix_str())
3946 .collect::<Vec<_>>()
3947 .join(" ");
3948 let status = format!("git add {paths}");
3949 let job_key = GitJobKey::WriteIndex(entries.clone());
3950
3951 self.spawn_job_with_tracking(
3952 entries.clone(),
3953 pending_op::GitStatus::Staged,
3954 cx,
3955 async move |this, cx| {
3956 for save_task in save_tasks {
3957 save_task.await?;
3958 }
3959
3960 this.update(cx, |this, _| {
3961 this.send_keyed_job(
3962 Some(job_key),
3963 Some(status.into()),
3964 move |git_repo, _cx| async move {
3965 match git_repo {
3966 RepositoryState::Local {
3967 backend,
3968 environment,
3969 ..
3970 } => backend.stage_paths(entries, environment.clone()).await,
3971 RepositoryState::Remote { project_id, client } => {
3972 client
3973 .request(proto::Stage {
3974 project_id: project_id.0,
3975 repository_id: id.to_proto(),
3976 paths: entries
3977 .into_iter()
3978 .map(|repo_path| repo_path.to_proto())
3979 .collect(),
3980 })
3981 .await
3982 .context("sending stage request")?;
3983
3984 Ok(())
3985 }
3986 }
3987 },
3988 )
3989 })?
3990 .await?
3991 },
3992 )
3993 }
3994
3995 pub fn unstage_entries(
3996 &mut self,
3997 entries: Vec<RepoPath>,
3998 cx: &mut Context<Self>,
3999 ) -> Task<anyhow::Result<()>> {
4000 if entries.is_empty() {
4001 return Task::ready(Ok(()));
4002 }
4003 let id = self.id;
4004 let save_tasks = self.save_buffers(&entries, cx);
4005 let paths = entries
4006 .iter()
4007 .map(|p| p.as_unix_str())
4008 .collect::<Vec<_>>()
4009 .join(" ");
4010 let status = format!("git reset {paths}");
4011 let job_key = GitJobKey::WriteIndex(entries.clone());
4012
4013 self.spawn_job_with_tracking(
4014 entries.clone(),
4015 pending_op::GitStatus::Unstaged,
4016 cx,
4017 async move |this, cx| {
4018 for save_task in save_tasks {
4019 save_task.await?;
4020 }
4021
4022 this.update(cx, |this, _| {
4023 this.send_keyed_job(
4024 Some(job_key),
4025 Some(status.into()),
4026 move |git_repo, _cx| async move {
4027 match git_repo {
4028 RepositoryState::Local {
4029 backend,
4030 environment,
4031 ..
4032 } => backend.unstage_paths(entries, environment).await,
4033 RepositoryState::Remote { project_id, client } => {
4034 client
4035 .request(proto::Unstage {
4036 project_id: project_id.0,
4037 repository_id: id.to_proto(),
4038 paths: entries
4039 .into_iter()
4040 .map(|repo_path| repo_path.to_proto())
4041 .collect(),
4042 })
4043 .await
4044 .context("sending unstage request")?;
4045
4046 Ok(())
4047 }
4048 }
4049 },
4050 )
4051 })?
4052 .await?
4053 },
4054 )
4055 }
4056
4057 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4058 let to_stage = self
4059 .cached_status()
4060 .filter_map(|entry| {
4061 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4062 if ops.staging() || ops.staged() {
4063 None
4064 } else {
4065 Some(entry.repo_path)
4066 }
4067 } else if entry.status.staging().is_fully_staged() {
4068 None
4069 } else {
4070 Some(entry.repo_path)
4071 }
4072 })
4073 .collect();
4074 self.stage_entries(to_stage, cx)
4075 }
4076
4077 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4078 let to_unstage = self
4079 .cached_status()
4080 .filter_map(|entry| {
4081 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4082 if !ops.staging() && !ops.staged() {
4083 None
4084 } else {
4085 Some(entry.repo_path)
4086 }
4087 } else if entry.status.staging().is_fully_unstaged() {
4088 None
4089 } else {
4090 Some(entry.repo_path)
4091 }
4092 })
4093 .collect();
4094 self.unstage_entries(to_unstage, cx)
4095 }
4096
4097 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4098 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4099
4100 self.stash_entries(to_stash, cx)
4101 }
4102
4103 pub fn stash_entries(
4104 &mut self,
4105 entries: Vec<RepoPath>,
4106 cx: &mut Context<Self>,
4107 ) -> Task<anyhow::Result<()>> {
4108 let id = self.id;
4109
4110 cx.spawn(async move |this, cx| {
4111 this.update(cx, |this, _| {
4112 this.send_job(None, move |git_repo, _cx| async move {
4113 match git_repo {
4114 RepositoryState::Local {
4115 backend,
4116 environment,
4117 ..
4118 } => backend.stash_paths(entries, environment).await,
4119 RepositoryState::Remote { project_id, client } => {
4120 client
4121 .request(proto::Stash {
4122 project_id: project_id.0,
4123 repository_id: id.to_proto(),
4124 paths: entries
4125 .into_iter()
4126 .map(|repo_path| repo_path.to_proto())
4127 .collect(),
4128 })
4129 .await
4130 .context("sending stash request")?;
4131 Ok(())
4132 }
4133 }
4134 })
4135 })?
4136 .await??;
4137 Ok(())
4138 })
4139 }
4140
4141 pub fn stash_pop(
4142 &mut self,
4143 index: Option<usize>,
4144 cx: &mut Context<Self>,
4145 ) -> Task<anyhow::Result<()>> {
4146 let id = self.id;
4147 cx.spawn(async move |this, cx| {
4148 this.update(cx, |this, _| {
4149 this.send_job(None, move |git_repo, _cx| async move {
4150 match git_repo {
4151 RepositoryState::Local {
4152 backend,
4153 environment,
4154 ..
4155 } => backend.stash_pop(index, environment).await,
4156 RepositoryState::Remote { project_id, client } => {
4157 client
4158 .request(proto::StashPop {
4159 project_id: project_id.0,
4160 repository_id: id.to_proto(),
4161 stash_index: index.map(|i| i as u64),
4162 })
4163 .await
4164 .context("sending stash pop request")?;
4165 Ok(())
4166 }
4167 }
4168 })
4169 })?
4170 .await??;
4171 Ok(())
4172 })
4173 }
4174
4175 pub fn stash_apply(
4176 &mut self,
4177 index: Option<usize>,
4178 cx: &mut Context<Self>,
4179 ) -> Task<anyhow::Result<()>> {
4180 let id = self.id;
4181 cx.spawn(async move |this, cx| {
4182 this.update(cx, |this, _| {
4183 this.send_job(None, move |git_repo, _cx| async move {
4184 match git_repo {
4185 RepositoryState::Local {
4186 backend,
4187 environment,
4188 ..
4189 } => backend.stash_apply(index, environment).await,
4190 RepositoryState::Remote { project_id, client } => {
4191 client
4192 .request(proto::StashApply {
4193 project_id: project_id.0,
4194 repository_id: id.to_proto(),
4195 stash_index: index.map(|i| i as u64),
4196 })
4197 .await
4198 .context("sending stash apply request")?;
4199 Ok(())
4200 }
4201 }
4202 })
4203 })?
4204 .await??;
4205 Ok(())
4206 })
4207 }
4208
4209 pub fn stash_drop(
4210 &mut self,
4211 index: Option<usize>,
4212 cx: &mut Context<Self>,
4213 ) -> oneshot::Receiver<anyhow::Result<()>> {
4214 let id = self.id;
4215 let updates_tx = self
4216 .git_store()
4217 .and_then(|git_store| match &git_store.read(cx).state {
4218 GitStoreState::Local { downstream, .. } => downstream
4219 .as_ref()
4220 .map(|downstream| downstream.updates_tx.clone()),
4221 _ => None,
4222 });
4223 let this = cx.weak_entity();
4224 self.send_job(None, move |git_repo, mut cx| async move {
4225 match git_repo {
4226 RepositoryState::Local {
4227 backend,
4228 environment,
4229 ..
4230 } => {
4231 // TODO would be nice to not have to do this manually
4232 let result = backend.stash_drop(index, environment).await;
4233 if result.is_ok()
4234 && let Ok(stash_entries) = backend.stash_entries().await
4235 {
4236 let snapshot = this.update(&mut cx, |this, cx| {
4237 this.snapshot.stash_entries = stash_entries;
4238 cx.emit(RepositoryEvent::StashEntriesChanged);
4239 this.snapshot.clone()
4240 })?;
4241 if let Some(updates_tx) = updates_tx {
4242 updates_tx
4243 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4244 .ok();
4245 }
4246 }
4247
4248 result
4249 }
4250 RepositoryState::Remote { project_id, client } => {
4251 client
4252 .request(proto::StashDrop {
4253 project_id: project_id.0,
4254 repository_id: id.to_proto(),
4255 stash_index: index.map(|i| i as u64),
4256 })
4257 .await
4258 .context("sending stash pop request")?;
4259 Ok(())
4260 }
4261 }
4262 })
4263 }
4264
4265 pub fn commit(
4266 &mut self,
4267 message: SharedString,
4268 name_and_email: Option<(SharedString, SharedString)>,
4269 options: CommitOptions,
4270 askpass: AskPassDelegate,
4271 _cx: &mut App,
4272 ) -> oneshot::Receiver<Result<()>> {
4273 let id = self.id;
4274 let askpass_delegates = self.askpass_delegates.clone();
4275 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4276
4277 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4278 match git_repo {
4279 RepositoryState::Local {
4280 backend,
4281 environment,
4282 ..
4283 } => {
4284 backend
4285 .commit(message, name_and_email, options, askpass, environment)
4286 .await
4287 }
4288 RepositoryState::Remote { project_id, client } => {
4289 askpass_delegates.lock().insert(askpass_id, askpass);
4290 let _defer = util::defer(|| {
4291 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4292 debug_assert!(askpass_delegate.is_some());
4293 });
4294 let (name, email) = name_and_email.unzip();
4295 client
4296 .request(proto::Commit {
4297 project_id: project_id.0,
4298 repository_id: id.to_proto(),
4299 message: String::from(message),
4300 name: name.map(String::from),
4301 email: email.map(String::from),
4302 options: Some(proto::commit::CommitOptions {
4303 amend: options.amend,
4304 signoff: options.signoff,
4305 }),
4306 askpass_id,
4307 })
4308 .await
4309 .context("sending commit request")?;
4310
4311 Ok(())
4312 }
4313 }
4314 })
4315 }
4316
4317 pub fn fetch(
4318 &mut self,
4319 fetch_options: FetchOptions,
4320 askpass: AskPassDelegate,
4321 _cx: &mut App,
4322 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4323 let askpass_delegates = self.askpass_delegates.clone();
4324 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4325 let id = self.id;
4326
4327 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4328 match git_repo {
4329 RepositoryState::Local {
4330 backend,
4331 environment,
4332 ..
4333 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4334 RepositoryState::Remote { project_id, client } => {
4335 askpass_delegates.lock().insert(askpass_id, askpass);
4336 let _defer = util::defer(|| {
4337 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4338 debug_assert!(askpass_delegate.is_some());
4339 });
4340
4341 let response = client
4342 .request(proto::Fetch {
4343 project_id: project_id.0,
4344 repository_id: id.to_proto(),
4345 askpass_id,
4346 remote: fetch_options.to_proto(),
4347 })
4348 .await
4349 .context("sending fetch request")?;
4350
4351 Ok(RemoteCommandOutput {
4352 stdout: response.stdout,
4353 stderr: response.stderr,
4354 })
4355 }
4356 }
4357 })
4358 }
4359
4360 pub fn push(
4361 &mut self,
4362 branch: SharedString,
4363 remote: SharedString,
4364 options: Option<PushOptions>,
4365 askpass: AskPassDelegate,
4366 cx: &mut Context<Self>,
4367 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4368 let askpass_delegates = self.askpass_delegates.clone();
4369 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4370 let id = self.id;
4371
4372 let args = options
4373 .map(|option| match option {
4374 PushOptions::SetUpstream => " --set-upstream",
4375 PushOptions::Force => " --force-with-lease",
4376 })
4377 .unwrap_or("");
4378
4379 let updates_tx = self
4380 .git_store()
4381 .and_then(|git_store| match &git_store.read(cx).state {
4382 GitStoreState::Local { downstream, .. } => downstream
4383 .as_ref()
4384 .map(|downstream| downstream.updates_tx.clone()),
4385 _ => None,
4386 });
4387
4388 let this = cx.weak_entity();
4389 self.send_job(
4390 Some(format!("git push {} {} {}", args, remote, branch).into()),
4391 move |git_repo, mut cx| async move {
4392 match git_repo {
4393 RepositoryState::Local {
4394 backend,
4395 environment,
4396 ..
4397 } => {
4398 let result = backend
4399 .push(
4400 branch.to_string(),
4401 remote.to_string(),
4402 options,
4403 askpass,
4404 environment.clone(),
4405 cx.clone(),
4406 )
4407 .await;
4408 // TODO would be nice to not have to do this manually
4409 if result.is_ok() {
4410 let branches = backend.branches().await?;
4411 let branch = branches.into_iter().find(|branch| branch.is_head);
4412 log::info!("head branch after scan is {branch:?}");
4413 let snapshot = this.update(&mut cx, |this, cx| {
4414 this.snapshot.branch = branch;
4415 cx.emit(RepositoryEvent::BranchChanged);
4416 this.snapshot.clone()
4417 })?;
4418 if let Some(updates_tx) = updates_tx {
4419 updates_tx
4420 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4421 .ok();
4422 }
4423 }
4424 result
4425 }
4426 RepositoryState::Remote { project_id, client } => {
4427 askpass_delegates.lock().insert(askpass_id, askpass);
4428 let _defer = util::defer(|| {
4429 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4430 debug_assert!(askpass_delegate.is_some());
4431 });
4432 let response = client
4433 .request(proto::Push {
4434 project_id: project_id.0,
4435 repository_id: id.to_proto(),
4436 askpass_id,
4437 branch_name: branch.to_string(),
4438 remote_name: remote.to_string(),
4439 options: options.map(|options| match options {
4440 PushOptions::Force => proto::push::PushOptions::Force,
4441 PushOptions::SetUpstream => {
4442 proto::push::PushOptions::SetUpstream
4443 }
4444 }
4445 as i32),
4446 })
4447 .await
4448 .context("sending push request")?;
4449
4450 Ok(RemoteCommandOutput {
4451 stdout: response.stdout,
4452 stderr: response.stderr,
4453 })
4454 }
4455 }
4456 },
4457 )
4458 }
4459
4460 pub fn pull(
4461 &mut self,
4462 branch: Option<SharedString>,
4463 remote: SharedString,
4464 rebase: bool,
4465 askpass: AskPassDelegate,
4466 _cx: &mut App,
4467 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4468 let askpass_delegates = self.askpass_delegates.clone();
4469 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4470 let id = self.id;
4471
4472 let mut status = "git pull".to_string();
4473 if rebase {
4474 status.push_str(" --rebase");
4475 }
4476 status.push_str(&format!(" {}", remote));
4477 if let Some(b) = &branch {
4478 status.push_str(&format!(" {}", b));
4479 }
4480
4481 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4482 match git_repo {
4483 RepositoryState::Local {
4484 backend,
4485 environment,
4486 ..
4487 } => {
4488 backend
4489 .pull(
4490 branch.as_ref().map(|b| b.to_string()),
4491 remote.to_string(),
4492 rebase,
4493 askpass,
4494 environment.clone(),
4495 cx,
4496 )
4497 .await
4498 }
4499 RepositoryState::Remote { project_id, client } => {
4500 askpass_delegates.lock().insert(askpass_id, askpass);
4501 let _defer = util::defer(|| {
4502 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4503 debug_assert!(askpass_delegate.is_some());
4504 });
4505 let response = client
4506 .request(proto::Pull {
4507 project_id: project_id.0,
4508 repository_id: id.to_proto(),
4509 askpass_id,
4510 rebase,
4511 branch_name: branch.as_ref().map(|b| b.to_string()),
4512 remote_name: remote.to_string(),
4513 })
4514 .await
4515 .context("sending pull request")?;
4516
4517 Ok(RemoteCommandOutput {
4518 stdout: response.stdout,
4519 stderr: response.stderr,
4520 })
4521 }
4522 }
4523 })
4524 }
4525
4526 fn spawn_set_index_text_job(
4527 &mut self,
4528 path: RepoPath,
4529 content: Option<String>,
4530 hunk_staging_operation_count: Option<usize>,
4531 cx: &mut Context<Self>,
4532 ) -> oneshot::Receiver<anyhow::Result<()>> {
4533 let id = self.id;
4534 let this = cx.weak_entity();
4535 let git_store = self.git_store.clone();
4536 self.send_keyed_job(
4537 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4538 None,
4539 move |git_repo, mut cx| async move {
4540 log::debug!(
4541 "start updating index text for buffer {}",
4542 path.as_unix_str()
4543 );
4544 match git_repo {
4545 RepositoryState::Local {
4546 backend,
4547 environment,
4548 ..
4549 } => {
4550 backend
4551 .set_index_text(path.clone(), content, environment.clone())
4552 .await?;
4553 }
4554 RepositoryState::Remote { project_id, client } => {
4555 client
4556 .request(proto::SetIndexText {
4557 project_id: project_id.0,
4558 repository_id: id.to_proto(),
4559 path: path.to_proto(),
4560 text: content,
4561 })
4562 .await?;
4563 }
4564 }
4565 log::debug!(
4566 "finish updating index text for buffer {}",
4567 path.as_unix_str()
4568 );
4569
4570 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4571 let project_path = this
4572 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4573 .ok()
4574 .flatten();
4575 git_store.update(&mut cx, |git_store, cx| {
4576 let buffer_id = git_store
4577 .buffer_store
4578 .read(cx)
4579 .get_by_path(&project_path?)?
4580 .read(cx)
4581 .remote_id();
4582 let diff_state = git_store.diffs.get(&buffer_id)?;
4583 diff_state.update(cx, |diff_state, _| {
4584 diff_state.hunk_staging_operation_count_as_of_write =
4585 hunk_staging_operation_count;
4586 });
4587 Some(())
4588 })?;
4589 }
4590 Ok(())
4591 },
4592 )
4593 }
4594
4595 pub fn get_remotes(
4596 &mut self,
4597 branch_name: Option<String>,
4598 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4599 let id = self.id;
4600 self.send_job(None, move |repo, _cx| async move {
4601 match repo {
4602 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4603 RepositoryState::Remote { project_id, client } => {
4604 let response = client
4605 .request(proto::GetRemotes {
4606 project_id: project_id.0,
4607 repository_id: id.to_proto(),
4608 branch_name,
4609 })
4610 .await?;
4611
4612 let remotes = response
4613 .remotes
4614 .into_iter()
4615 .map(|remotes| git::repository::Remote {
4616 name: remotes.name.into(),
4617 })
4618 .collect();
4619
4620 Ok(remotes)
4621 }
4622 }
4623 })
4624 }
4625
4626 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4627 let id = self.id;
4628 self.send_job(None, move |repo, _| async move {
4629 match repo {
4630 RepositoryState::Local { backend, .. } => backend.branches().await,
4631 RepositoryState::Remote { project_id, client } => {
4632 let response = client
4633 .request(proto::GitGetBranches {
4634 project_id: project_id.0,
4635 repository_id: id.to_proto(),
4636 })
4637 .await?;
4638
4639 let branches = response
4640 .branches
4641 .into_iter()
4642 .map(|branch| proto_to_branch(&branch))
4643 .collect();
4644
4645 Ok(branches)
4646 }
4647 }
4648 })
4649 }
4650
4651 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4652 let id = self.id;
4653 self.send_job(None, move |repo, _| async move {
4654 match repo {
4655 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4656 RepositoryState::Remote { project_id, client } => {
4657 let response = client
4658 .request(proto::GitGetWorktrees {
4659 project_id: project_id.0,
4660 repository_id: id.to_proto(),
4661 })
4662 .await?;
4663
4664 let worktrees = response
4665 .worktrees
4666 .into_iter()
4667 .map(|worktree| proto_to_worktree(&worktree))
4668 .collect();
4669
4670 Ok(worktrees)
4671 }
4672 }
4673 })
4674 }
4675
4676 pub fn create_worktree(
4677 &mut self,
4678 name: String,
4679 path: PathBuf,
4680 commit: Option<String>,
4681 ) -> oneshot::Receiver<Result<()>> {
4682 let id = self.id;
4683 self.send_job(
4684 Some("git worktree add".into()),
4685 move |repo, _cx| async move {
4686 match repo {
4687 RepositoryState::Local { backend, .. } => {
4688 backend.create_worktree(name, path, commit).await
4689 }
4690 RepositoryState::Remote { project_id, client } => {
4691 client
4692 .request(proto::GitCreateWorktree {
4693 project_id: project_id.0,
4694 repository_id: id.to_proto(),
4695 name,
4696 directory: path.to_string_lossy().to_string(),
4697 commit,
4698 })
4699 .await?;
4700
4701 Ok(())
4702 }
4703 }
4704 },
4705 )
4706 }
4707
4708 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4709 let id = self.id;
4710 self.send_job(None, move |repo, _| async move {
4711 match repo {
4712 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4713 RepositoryState::Remote { project_id, client } => {
4714 let response = client
4715 .request(proto::GetDefaultBranch {
4716 project_id: project_id.0,
4717 repository_id: id.to_proto(),
4718 })
4719 .await?;
4720
4721 anyhow::Ok(response.branch.map(SharedString::from))
4722 }
4723 }
4724 })
4725 }
4726
4727 pub fn diff_tree(
4728 &mut self,
4729 diff_type: DiffTreeType,
4730 _cx: &App,
4731 ) -> oneshot::Receiver<Result<TreeDiff>> {
4732 let repository_id = self.snapshot.id;
4733 self.send_job(None, move |repo, _cx| async move {
4734 match repo {
4735 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4736 RepositoryState::Remote { client, project_id } => {
4737 let response = client
4738 .request(proto::GetTreeDiff {
4739 project_id: project_id.0,
4740 repository_id: repository_id.0,
4741 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4742 base: diff_type.base().to_string(),
4743 head: diff_type.head().to_string(),
4744 })
4745 .await?;
4746
4747 let entries = response
4748 .entries
4749 .into_iter()
4750 .filter_map(|entry| {
4751 let status = match entry.status() {
4752 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4753 proto::tree_diff_status::Status::Modified => {
4754 TreeDiffStatus::Modified {
4755 old: git::Oid::from_str(
4756 &entry.oid.context("missing oid").log_err()?,
4757 )
4758 .log_err()?,
4759 }
4760 }
4761 proto::tree_diff_status::Status::Deleted => {
4762 TreeDiffStatus::Deleted {
4763 old: git::Oid::from_str(
4764 &entry.oid.context("missing oid").log_err()?,
4765 )
4766 .log_err()?,
4767 }
4768 }
4769 };
4770 Some((
4771 RepoPath::from_rel_path(
4772 &RelPath::from_proto(&entry.path).log_err()?,
4773 ),
4774 status,
4775 ))
4776 })
4777 .collect();
4778
4779 Ok(TreeDiff { entries })
4780 }
4781 }
4782 })
4783 }
4784
4785 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4786 let id = self.id;
4787 self.send_job(None, move |repo, _cx| async move {
4788 match repo {
4789 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4790 RepositoryState::Remote { project_id, client } => {
4791 let response = client
4792 .request(proto::GitDiff {
4793 project_id: project_id.0,
4794 repository_id: id.to_proto(),
4795 diff_type: match diff_type {
4796 DiffType::HeadToIndex => {
4797 proto::git_diff::DiffType::HeadToIndex.into()
4798 }
4799 DiffType::HeadToWorktree => {
4800 proto::git_diff::DiffType::HeadToWorktree.into()
4801 }
4802 },
4803 })
4804 .await?;
4805
4806 Ok(response.diff)
4807 }
4808 }
4809 })
4810 }
4811
4812 pub fn create_branch(
4813 &mut self,
4814 branch_name: String,
4815 base_branch: Option<String>,
4816 ) -> oneshot::Receiver<Result<()>> {
4817 let id = self.id;
4818 let status_msg = if let Some(ref base) = base_branch {
4819 format!("git switch -c {branch_name} {base}").into()
4820 } else {
4821 format!("git switch -c {branch_name}").into()
4822 };
4823 self.send_job(Some(status_msg), move |repo, _cx| async move {
4824 match repo {
4825 RepositoryState::Local { backend, .. } => {
4826 backend.create_branch(branch_name, base_branch).await
4827 }
4828 RepositoryState::Remote { project_id, client } => {
4829 client
4830 .request(proto::GitCreateBranch {
4831 project_id: project_id.0,
4832 repository_id: id.to_proto(),
4833 branch_name,
4834 })
4835 .await?;
4836
4837 Ok(())
4838 }
4839 }
4840 })
4841 }
4842
4843 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4844 let id = self.id;
4845 self.send_job(
4846 Some(format!("git switch {branch_name}").into()),
4847 move |repo, _cx| async move {
4848 match repo {
4849 RepositoryState::Local { backend, .. } => {
4850 backend.change_branch(branch_name).await
4851 }
4852 RepositoryState::Remote { project_id, client } => {
4853 client
4854 .request(proto::GitChangeBranch {
4855 project_id: project_id.0,
4856 repository_id: id.to_proto(),
4857 branch_name,
4858 })
4859 .await?;
4860
4861 Ok(())
4862 }
4863 }
4864 },
4865 )
4866 }
4867
4868 pub fn rename_branch(
4869 &mut self,
4870 branch: String,
4871 new_name: String,
4872 ) -> oneshot::Receiver<Result<()>> {
4873 let id = self.id;
4874 self.send_job(
4875 Some(format!("git branch -m {branch} {new_name}").into()),
4876 move |repo, _cx| async move {
4877 match repo {
4878 RepositoryState::Local { backend, .. } => {
4879 backend.rename_branch(branch, new_name).await
4880 }
4881 RepositoryState::Remote { project_id, client } => {
4882 client
4883 .request(proto::GitRenameBranch {
4884 project_id: project_id.0,
4885 repository_id: id.to_proto(),
4886 branch,
4887 new_name,
4888 })
4889 .await?;
4890
4891 Ok(())
4892 }
4893 }
4894 },
4895 )
4896 }
4897
4898 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4899 let id = self.id;
4900 self.send_job(None, move |repo, _cx| async move {
4901 match repo {
4902 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4903 RepositoryState::Remote { project_id, client } => {
4904 let response = client
4905 .request(proto::CheckForPushedCommits {
4906 project_id: project_id.0,
4907 repository_id: id.to_proto(),
4908 })
4909 .await?;
4910
4911 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4912
4913 Ok(branches)
4914 }
4915 }
4916 })
4917 }
4918
4919 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4920 self.send_job(None, |repo, _cx| async move {
4921 match repo {
4922 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4923 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4924 }
4925 })
4926 }
4927
4928 pub fn restore_checkpoint(
4929 &mut self,
4930 checkpoint: GitRepositoryCheckpoint,
4931 ) -> oneshot::Receiver<Result<()>> {
4932 self.send_job(None, move |repo, _cx| async move {
4933 match repo {
4934 RepositoryState::Local { backend, .. } => {
4935 backend.restore_checkpoint(checkpoint).await
4936 }
4937 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4938 }
4939 })
4940 }
4941
4942 pub(crate) fn apply_remote_update(
4943 &mut self,
4944 update: proto::UpdateRepository,
4945 cx: &mut Context<Self>,
4946 ) -> Result<()> {
4947 let conflicted_paths = TreeSet::from_ordered_entries(
4948 update
4949 .current_merge_conflicts
4950 .into_iter()
4951 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4952 );
4953 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4954 let new_head_commit = update
4955 .head_commit_details
4956 .as_ref()
4957 .map(proto_to_commit_details);
4958 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4959 cx.emit(RepositoryEvent::BranchChanged)
4960 }
4961 self.snapshot.branch = new_branch;
4962 self.snapshot.head_commit = new_head_commit;
4963
4964 self.snapshot.merge.conflicted_paths = conflicted_paths;
4965 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4966 let new_stash_entries = GitStash {
4967 entries: update
4968 .stash_entries
4969 .iter()
4970 .filter_map(|entry| proto_to_stash(entry).ok())
4971 .collect(),
4972 };
4973 if self.snapshot.stash_entries != new_stash_entries {
4974 cx.emit(RepositoryEvent::StashEntriesChanged)
4975 }
4976 self.snapshot.stash_entries = new_stash_entries;
4977
4978 let edits = update
4979 .removed_statuses
4980 .into_iter()
4981 .filter_map(|path| {
4982 Some(sum_tree::Edit::Remove(PathKey(
4983 RelPath::from_proto(&path).log_err()?,
4984 )))
4985 })
4986 .chain(
4987 update
4988 .updated_statuses
4989 .into_iter()
4990 .filter_map(|updated_status| {
4991 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4992 }),
4993 )
4994 .collect::<Vec<_>>();
4995 if !edits.is_empty() {
4996 cx.emit(RepositoryEvent::StatusesChanged);
4997 }
4998 self.snapshot.statuses_by_path.edit(edits, ());
4999 if update.is_last_update {
5000 self.snapshot.scan_id = update.scan_id;
5001 }
5002 self.clear_pending_ops(cx);
5003 Ok(())
5004 }
5005
5006 pub fn compare_checkpoints(
5007 &mut self,
5008 left: GitRepositoryCheckpoint,
5009 right: GitRepositoryCheckpoint,
5010 ) -> oneshot::Receiver<Result<bool>> {
5011 self.send_job(None, move |repo, _cx| async move {
5012 match repo {
5013 RepositoryState::Local { backend, .. } => {
5014 backend.compare_checkpoints(left, right).await
5015 }
5016 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5017 }
5018 })
5019 }
5020
5021 pub fn diff_checkpoints(
5022 &mut self,
5023 base_checkpoint: GitRepositoryCheckpoint,
5024 target_checkpoint: GitRepositoryCheckpoint,
5025 ) -> oneshot::Receiver<Result<String>> {
5026 self.send_job(None, move |repo, _cx| async move {
5027 match repo {
5028 RepositoryState::Local { backend, .. } => {
5029 backend
5030 .diff_checkpoints(base_checkpoint, target_checkpoint)
5031 .await
5032 }
5033 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5034 }
5035 })
5036 }
5037
5038 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5039 let updated = SumTree::from_iter(
5040 self.pending_ops.iter().filter_map(|ops| {
5041 let inner_ops: Vec<PendingOp> =
5042 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5043 if inner_ops.is_empty() {
5044 None
5045 } else {
5046 Some(PendingOps {
5047 repo_path: ops.repo_path.clone(),
5048 ops: inner_ops,
5049 })
5050 }
5051 }),
5052 (),
5053 );
5054
5055 if updated != self.pending_ops {
5056 cx.emit(RepositoryEvent::PendingOpsChanged {
5057 pending_ops: self.pending_ops.clone(),
5058 })
5059 }
5060
5061 self.pending_ops = updated;
5062 }
5063
5064 fn schedule_scan(
5065 &mut self,
5066 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5067 cx: &mut Context<Self>,
5068 ) {
5069 let this = cx.weak_entity();
5070 let _ = self.send_keyed_job(
5071 Some(GitJobKey::ReloadGitState),
5072 None,
5073 |state, mut cx| async move {
5074 log::debug!("run scheduled git status scan");
5075
5076 let Some(this) = this.upgrade() else {
5077 return Ok(());
5078 };
5079 let RepositoryState::Local { backend, .. } = state else {
5080 bail!("not a local repository")
5081 };
5082 let (snapshot, events) = this
5083 .update(&mut cx, |this, _| {
5084 this.paths_needing_status_update.clear();
5085 compute_snapshot(
5086 this.id,
5087 this.work_directory_abs_path.clone(),
5088 this.snapshot.clone(),
5089 backend.clone(),
5090 )
5091 })?
5092 .await?;
5093 this.update(&mut cx, |this, cx| {
5094 this.snapshot = snapshot.clone();
5095 this.clear_pending_ops(cx);
5096 for event in events {
5097 cx.emit(event);
5098 }
5099 })?;
5100 if let Some(updates_tx) = updates_tx {
5101 updates_tx
5102 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5103 .ok();
5104 }
5105 Ok(())
5106 },
5107 );
5108 }
5109
5110 fn spawn_local_git_worker(
5111 work_directory_abs_path: Arc<Path>,
5112 dot_git_abs_path: Arc<Path>,
5113 _repository_dir_abs_path: Arc<Path>,
5114 _common_dir_abs_path: Arc<Path>,
5115 project_environment: WeakEntity<ProjectEnvironment>,
5116 fs: Arc<dyn Fs>,
5117 cx: &mut Context<Self>,
5118 ) -> mpsc::UnboundedSender<GitJob> {
5119 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5120
5121 cx.spawn(async move |_, cx| {
5122 let environment = project_environment
5123 .upgrade()
5124 .context("missing project environment")?
5125 .update(cx, |project_environment, cx| {
5126 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
5127 })?
5128 .await
5129 .unwrap_or_else(|| {
5130 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
5131 HashMap::default()
5132 });
5133 let search_paths = environment.get("PATH").map(|val| val.to_owned());
5134 let backend = cx
5135 .background_spawn(async move {
5136 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
5137 .or_else(|| which::which("git").ok());
5138 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
5139 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
5140 })
5141 .await?;
5142
5143 if let Some(git_hosting_provider_registry) =
5144 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5145 {
5146 git_hosting_providers::register_additional_providers(
5147 git_hosting_provider_registry,
5148 backend.clone(),
5149 );
5150 }
5151
5152 let state = RepositoryState::Local {
5153 backend,
5154 environment: Arc::new(environment),
5155 };
5156 let mut jobs = VecDeque::new();
5157 loop {
5158 while let Ok(Some(next_job)) = job_rx.try_next() {
5159 jobs.push_back(next_job);
5160 }
5161
5162 if let Some(job) = jobs.pop_front() {
5163 if let Some(current_key) = &job.key
5164 && jobs
5165 .iter()
5166 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5167 {
5168 continue;
5169 }
5170 (job.job)(state.clone(), cx).await;
5171 } else if let Some(job) = job_rx.next().await {
5172 jobs.push_back(job);
5173 } else {
5174 break;
5175 }
5176 }
5177 anyhow::Ok(())
5178 })
5179 .detach_and_log_err(cx);
5180
5181 job_tx
5182 }
5183
5184 fn spawn_remote_git_worker(
5185 project_id: ProjectId,
5186 client: AnyProtoClient,
5187 cx: &mut Context<Self>,
5188 ) -> mpsc::UnboundedSender<GitJob> {
5189 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5190
5191 cx.spawn(async move |_, cx| {
5192 let state = RepositoryState::Remote { project_id, client };
5193 let mut jobs = VecDeque::new();
5194 loop {
5195 while let Ok(Some(next_job)) = job_rx.try_next() {
5196 jobs.push_back(next_job);
5197 }
5198
5199 if let Some(job) = jobs.pop_front() {
5200 if let Some(current_key) = &job.key
5201 && jobs
5202 .iter()
5203 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5204 {
5205 continue;
5206 }
5207 (job.job)(state.clone(), cx).await;
5208 } else if let Some(job) = job_rx.next().await {
5209 jobs.push_back(job);
5210 } else {
5211 break;
5212 }
5213 }
5214 anyhow::Ok(())
5215 })
5216 .detach_and_log_err(cx);
5217
5218 job_tx
5219 }
5220
5221 fn load_staged_text(
5222 &mut self,
5223 buffer_id: BufferId,
5224 repo_path: RepoPath,
5225 cx: &App,
5226 ) -> Task<Result<Option<String>>> {
5227 let rx = self.send_job(None, move |state, _| async move {
5228 match state {
5229 RepositoryState::Local { backend, .. } => {
5230 anyhow::Ok(backend.load_index_text(repo_path).await)
5231 }
5232 RepositoryState::Remote { project_id, client } => {
5233 let response = client
5234 .request(proto::OpenUnstagedDiff {
5235 project_id: project_id.to_proto(),
5236 buffer_id: buffer_id.to_proto(),
5237 })
5238 .await?;
5239 Ok(response.staged_text)
5240 }
5241 }
5242 });
5243 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5244 }
5245
5246 fn load_committed_text(
5247 &mut self,
5248 buffer_id: BufferId,
5249 repo_path: RepoPath,
5250 cx: &App,
5251 ) -> Task<Result<DiffBasesChange>> {
5252 let rx = self.send_job(None, move |state, _| async move {
5253 match state {
5254 RepositoryState::Local { backend, .. } => {
5255 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5256 let staged_text = backend.load_index_text(repo_path).await;
5257 let diff_bases_change = if committed_text == staged_text {
5258 DiffBasesChange::SetBoth(committed_text)
5259 } else {
5260 DiffBasesChange::SetEach {
5261 index: staged_text,
5262 head: committed_text,
5263 }
5264 };
5265 anyhow::Ok(diff_bases_change)
5266 }
5267 RepositoryState::Remote { project_id, client } => {
5268 use proto::open_uncommitted_diff_response::Mode;
5269
5270 let response = client
5271 .request(proto::OpenUncommittedDiff {
5272 project_id: project_id.to_proto(),
5273 buffer_id: buffer_id.to_proto(),
5274 })
5275 .await?;
5276 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5277 let bases = match mode {
5278 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5279 Mode::IndexAndHead => DiffBasesChange::SetEach {
5280 head: response.committed_text,
5281 index: response.staged_text,
5282 },
5283 };
5284 Ok(bases)
5285 }
5286 }
5287 });
5288
5289 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5290 }
5291 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5292 let repository_id = self.snapshot.id;
5293 let rx = self.send_job(None, move |state, _| async move {
5294 match state {
5295 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5296 RepositoryState::Remote { client, project_id } => {
5297 let response = client
5298 .request(proto::GetBlobContent {
5299 project_id: project_id.to_proto(),
5300 repository_id: repository_id.0,
5301 oid: oid.to_string(),
5302 })
5303 .await?;
5304 Ok(response.content)
5305 }
5306 }
5307 });
5308 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5309 }
5310
5311 fn paths_changed(
5312 &mut self,
5313 paths: Vec<RepoPath>,
5314 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5315 cx: &mut Context<Self>,
5316 ) {
5317 self.paths_needing_status_update.extend(paths);
5318
5319 let this = cx.weak_entity();
5320 let _ = self.send_keyed_job(
5321 Some(GitJobKey::RefreshStatuses),
5322 None,
5323 |state, mut cx| async move {
5324 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5325 (
5326 this.snapshot.clone(),
5327 mem::take(&mut this.paths_needing_status_update),
5328 )
5329 })?;
5330 let RepositoryState::Local { backend, .. } = state else {
5331 bail!("not a local repository")
5332 };
5333
5334 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5335 if paths.is_empty() {
5336 return Ok(());
5337 }
5338 let statuses = backend.status(&paths).await?;
5339 let stash_entries = backend.stash_entries().await?;
5340
5341 let changed_path_statuses = cx
5342 .background_spawn(async move {
5343 let mut changed_path_statuses = Vec::new();
5344 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5345 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5346
5347 for (repo_path, status) in &*statuses.entries {
5348 changed_paths.remove(repo_path);
5349 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5350 && cursor.item().is_some_and(|entry| entry.status == *status)
5351 {
5352 continue;
5353 }
5354
5355 changed_path_statuses.push(Edit::Insert(StatusEntry {
5356 repo_path: repo_path.clone(),
5357 status: *status,
5358 }));
5359 }
5360 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5361 for path in changed_paths.into_iter() {
5362 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5363 changed_path_statuses
5364 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5365 }
5366 }
5367 changed_path_statuses
5368 })
5369 .await;
5370
5371 this.update(&mut cx, |this, cx| {
5372 if this.snapshot.stash_entries != stash_entries {
5373 cx.emit(RepositoryEvent::StashEntriesChanged);
5374 this.snapshot.stash_entries = stash_entries;
5375 }
5376
5377 if !changed_path_statuses.is_empty() {
5378 cx.emit(RepositoryEvent::StatusesChanged);
5379 this.snapshot
5380 .statuses_by_path
5381 .edit(changed_path_statuses, ());
5382 this.snapshot.scan_id += 1;
5383 }
5384
5385 if let Some(updates_tx) = updates_tx {
5386 updates_tx
5387 .unbounded_send(DownstreamUpdate::UpdateRepository(
5388 this.snapshot.clone(),
5389 ))
5390 .ok();
5391 }
5392 })
5393 },
5394 );
5395 }
5396
5397 /// currently running git command and when it started
5398 pub fn current_job(&self) -> Option<JobInfo> {
5399 self.active_jobs.values().next().cloned()
5400 }
5401
5402 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5403 self.send_job(None, |_, _| async {})
5404 }
5405
5406 fn spawn_job_with_tracking<AsyncFn>(
5407 &mut self,
5408 paths: Vec<RepoPath>,
5409 git_status: pending_op::GitStatus,
5410 cx: &mut Context<Self>,
5411 f: AsyncFn,
5412 ) -> Task<Result<()>>
5413 where
5414 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5415 {
5416 let ids = self.new_pending_ops_for_paths(paths, git_status);
5417
5418 cx.spawn(async move |this, cx| {
5419 let (job_status, result) = match f(this.clone(), cx).await {
5420 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5421 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5422 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5423 };
5424
5425 this.update(cx, |this, _| {
5426 let mut edits = Vec::with_capacity(ids.len());
5427 for (id, entry) in ids {
5428 if let Some(mut ops) = this
5429 .pending_ops
5430 .get(&PathKey(entry.as_ref().clone()), ())
5431 .cloned()
5432 {
5433 if let Some(op) = ops.op_by_id_mut(id) {
5434 op.job_status = job_status;
5435 }
5436 edits.push(sum_tree::Edit::Insert(ops));
5437 }
5438 }
5439 this.pending_ops.edit(edits, ());
5440 })?;
5441
5442 result
5443 })
5444 }
5445
5446 fn new_pending_ops_for_paths(
5447 &mut self,
5448 paths: Vec<RepoPath>,
5449 git_status: pending_op::GitStatus,
5450 ) -> Vec<(PendingOpId, RepoPath)> {
5451 let mut edits = Vec::with_capacity(paths.len());
5452 let mut ids = Vec::with_capacity(paths.len());
5453 for path in paths {
5454 let mut ops = self
5455 .pending_ops
5456 .get(&PathKey(path.as_ref().clone()), ())
5457 .cloned()
5458 .unwrap_or_else(|| PendingOps::new(&path));
5459 let id = ops.max_id() + 1;
5460 ops.ops.push(PendingOp {
5461 id,
5462 git_status,
5463 job_status: pending_op::JobStatus::Running,
5464 });
5465 edits.push(sum_tree::Edit::Insert(ops));
5466 ids.push((id, path));
5467 }
5468 self.pending_ops.edit(edits, ());
5469 ids
5470 }
5471}
5472
5473fn get_permalink_in_rust_registry_src(
5474 provider_registry: Arc<GitHostingProviderRegistry>,
5475 path: PathBuf,
5476 selection: Range<u32>,
5477) -> Result<url::Url> {
5478 #[derive(Deserialize)]
5479 struct CargoVcsGit {
5480 sha1: String,
5481 }
5482
5483 #[derive(Deserialize)]
5484 struct CargoVcsInfo {
5485 git: CargoVcsGit,
5486 path_in_vcs: String,
5487 }
5488
5489 #[derive(Deserialize)]
5490 struct CargoPackage {
5491 repository: String,
5492 }
5493
5494 #[derive(Deserialize)]
5495 struct CargoToml {
5496 package: CargoPackage,
5497 }
5498
5499 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5500 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5501 Some((dir, json))
5502 }) else {
5503 bail!("No .cargo_vcs_info.json found in parent directories")
5504 };
5505 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5506 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5507 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5508 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5509 .context("parsing package.repository field of manifest")?;
5510 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5511 let permalink = provider.build_permalink(
5512 remote,
5513 BuildPermalinkParams::new(
5514 &cargo_vcs_info.git.sha1,
5515 &RepoPath::from_rel_path(
5516 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5517 ),
5518 Some(selection),
5519 ),
5520 );
5521 Ok(permalink)
5522}
5523
5524fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5525 let Some(blame) = blame else {
5526 return proto::BlameBufferResponse {
5527 blame_response: None,
5528 };
5529 };
5530
5531 let entries = blame
5532 .entries
5533 .into_iter()
5534 .map(|entry| proto::BlameEntry {
5535 sha: entry.sha.as_bytes().into(),
5536 start_line: entry.range.start,
5537 end_line: entry.range.end,
5538 original_line_number: entry.original_line_number,
5539 author: entry.author,
5540 author_mail: entry.author_mail,
5541 author_time: entry.author_time,
5542 author_tz: entry.author_tz,
5543 committer: entry.committer_name,
5544 committer_mail: entry.committer_email,
5545 committer_time: entry.committer_time,
5546 committer_tz: entry.committer_tz,
5547 summary: entry.summary,
5548 previous: entry.previous,
5549 filename: entry.filename,
5550 })
5551 .collect::<Vec<_>>();
5552
5553 let messages = blame
5554 .messages
5555 .into_iter()
5556 .map(|(oid, message)| proto::CommitMessage {
5557 oid: oid.as_bytes().into(),
5558 message,
5559 })
5560 .collect::<Vec<_>>();
5561
5562 proto::BlameBufferResponse {
5563 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5564 entries,
5565 messages,
5566 remote_url: blame.remote_url,
5567 }),
5568 }
5569}
5570
5571fn deserialize_blame_buffer_response(
5572 response: proto::BlameBufferResponse,
5573) -> Option<git::blame::Blame> {
5574 let response = response.blame_response?;
5575 let entries = response
5576 .entries
5577 .into_iter()
5578 .filter_map(|entry| {
5579 Some(git::blame::BlameEntry {
5580 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5581 range: entry.start_line..entry.end_line,
5582 original_line_number: entry.original_line_number,
5583 committer_name: entry.committer,
5584 committer_time: entry.committer_time,
5585 committer_tz: entry.committer_tz,
5586 committer_email: entry.committer_mail,
5587 author: entry.author,
5588 author_mail: entry.author_mail,
5589 author_time: entry.author_time,
5590 author_tz: entry.author_tz,
5591 summary: entry.summary,
5592 previous: entry.previous,
5593 filename: entry.filename,
5594 })
5595 })
5596 .collect::<Vec<_>>();
5597
5598 let messages = response
5599 .messages
5600 .into_iter()
5601 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5602 .collect::<HashMap<_, _>>();
5603
5604 Some(Blame {
5605 entries,
5606 messages,
5607 remote_url: response.remote_url,
5608 })
5609}
5610
5611fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5612 proto::Branch {
5613 is_head: branch.is_head,
5614 ref_name: branch.ref_name.to_string(),
5615 unix_timestamp: branch
5616 .most_recent_commit
5617 .as_ref()
5618 .map(|commit| commit.commit_timestamp as u64),
5619 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5620 ref_name: upstream.ref_name.to_string(),
5621 tracking: upstream
5622 .tracking
5623 .status()
5624 .map(|upstream| proto::UpstreamTracking {
5625 ahead: upstream.ahead as u64,
5626 behind: upstream.behind as u64,
5627 }),
5628 }),
5629 most_recent_commit: branch
5630 .most_recent_commit
5631 .as_ref()
5632 .map(|commit| proto::CommitSummary {
5633 sha: commit.sha.to_string(),
5634 subject: commit.subject.to_string(),
5635 commit_timestamp: commit.commit_timestamp,
5636 author_name: commit.author_name.to_string(),
5637 }),
5638 }
5639}
5640
5641fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5642 proto::Worktree {
5643 path: worktree.path.to_string_lossy().to_string(),
5644 ref_name: worktree.ref_name.to_string(),
5645 sha: worktree.sha.to_string(),
5646 }
5647}
5648
5649fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5650 git::repository::Worktree {
5651 path: PathBuf::from(proto.path.clone()),
5652 ref_name: proto.ref_name.clone().into(),
5653 sha: proto.sha.clone().into(),
5654 }
5655}
5656
5657fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5658 git::repository::Branch {
5659 is_head: proto.is_head,
5660 ref_name: proto.ref_name.clone().into(),
5661 upstream: proto
5662 .upstream
5663 .as_ref()
5664 .map(|upstream| git::repository::Upstream {
5665 ref_name: upstream.ref_name.to_string().into(),
5666 tracking: upstream
5667 .tracking
5668 .as_ref()
5669 .map(|tracking| {
5670 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5671 ahead: tracking.ahead as u32,
5672 behind: tracking.behind as u32,
5673 })
5674 })
5675 .unwrap_or(git::repository::UpstreamTracking::Gone),
5676 }),
5677 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5678 git::repository::CommitSummary {
5679 sha: commit.sha.to_string().into(),
5680 subject: commit.subject.to_string().into(),
5681 commit_timestamp: commit.commit_timestamp,
5682 author_name: commit.author_name.to_string().into(),
5683 has_parent: true,
5684 }
5685 }),
5686 }
5687}
5688
5689fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5690 proto::GitCommitDetails {
5691 sha: commit.sha.to_string(),
5692 message: commit.message.to_string(),
5693 commit_timestamp: commit.commit_timestamp,
5694 author_email: commit.author_email.to_string(),
5695 author_name: commit.author_name.to_string(),
5696 }
5697}
5698
5699fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5700 CommitDetails {
5701 sha: proto.sha.clone().into(),
5702 message: proto.message.clone().into(),
5703 commit_timestamp: proto.commit_timestamp,
5704 author_email: proto.author_email.clone().into(),
5705 author_name: proto.author_name.clone().into(),
5706 }
5707}
5708
5709async fn compute_snapshot(
5710 id: RepositoryId,
5711 work_directory_abs_path: Arc<Path>,
5712 prev_snapshot: RepositorySnapshot,
5713 backend: Arc<dyn GitRepository>,
5714) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5715 let mut events = Vec::new();
5716 let branches = backend.branches().await?;
5717 let branch = branches.into_iter().find(|branch| branch.is_head);
5718 let statuses = backend
5719 .status(&[RepoPath::from_rel_path(
5720 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
5721 )])
5722 .await?;
5723 let stash_entries = backend.stash_entries().await?;
5724 let statuses_by_path = SumTree::from_iter(
5725 statuses
5726 .entries
5727 .iter()
5728 .map(|(repo_path, status)| StatusEntry {
5729 repo_path: repo_path.clone(),
5730 status: *status,
5731 }),
5732 (),
5733 );
5734 let (merge_details, merge_heads_changed) =
5735 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5736 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5737
5738 if merge_heads_changed {
5739 events.push(RepositoryEvent::MergeHeadsChanged);
5740 }
5741
5742 if statuses_by_path != prev_snapshot.statuses_by_path {
5743 events.push(RepositoryEvent::StatusesChanged)
5744 }
5745
5746 // Useful when branch is None in detached head state
5747 let head_commit = match backend.head_sha().await {
5748 Some(head_sha) => backend.show(head_sha).await.log_err(),
5749 None => None,
5750 };
5751
5752 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5753 events.push(RepositoryEvent::BranchChanged);
5754 }
5755
5756 // Used by edit prediction data collection
5757 let remote_origin_url = backend.remote_url("origin");
5758 let remote_upstream_url = backend.remote_url("upstream");
5759
5760 let snapshot = RepositorySnapshot {
5761 id,
5762 statuses_by_path,
5763 work_directory_abs_path,
5764 path_style: prev_snapshot.path_style,
5765 scan_id: prev_snapshot.scan_id + 1,
5766 branch,
5767 head_commit,
5768 merge: merge_details,
5769 remote_origin_url,
5770 remote_upstream_url,
5771 stash_entries,
5772 };
5773
5774 Ok((snapshot, events))
5775}
5776
5777fn status_from_proto(
5778 simple_status: i32,
5779 status: Option<proto::GitFileStatus>,
5780) -> anyhow::Result<FileStatus> {
5781 use proto::git_file_status::Variant;
5782
5783 let Some(variant) = status.and_then(|status| status.variant) else {
5784 let code = proto::GitStatus::from_i32(simple_status)
5785 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5786 let result = match code {
5787 proto::GitStatus::Added => TrackedStatus {
5788 worktree_status: StatusCode::Added,
5789 index_status: StatusCode::Unmodified,
5790 }
5791 .into(),
5792 proto::GitStatus::Modified => TrackedStatus {
5793 worktree_status: StatusCode::Modified,
5794 index_status: StatusCode::Unmodified,
5795 }
5796 .into(),
5797 proto::GitStatus::Conflict => UnmergedStatus {
5798 first_head: UnmergedStatusCode::Updated,
5799 second_head: UnmergedStatusCode::Updated,
5800 }
5801 .into(),
5802 proto::GitStatus::Deleted => TrackedStatus {
5803 worktree_status: StatusCode::Deleted,
5804 index_status: StatusCode::Unmodified,
5805 }
5806 .into(),
5807 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5808 };
5809 return Ok(result);
5810 };
5811
5812 let result = match variant {
5813 Variant::Untracked(_) => FileStatus::Untracked,
5814 Variant::Ignored(_) => FileStatus::Ignored,
5815 Variant::Unmerged(unmerged) => {
5816 let [first_head, second_head] =
5817 [unmerged.first_head, unmerged.second_head].map(|head| {
5818 let code = proto::GitStatus::from_i32(head)
5819 .with_context(|| format!("Invalid git status code: {head}"))?;
5820 let result = match code {
5821 proto::GitStatus::Added => UnmergedStatusCode::Added,
5822 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5823 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5824 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5825 };
5826 Ok(result)
5827 });
5828 let [first_head, second_head] = [first_head?, second_head?];
5829 UnmergedStatus {
5830 first_head,
5831 second_head,
5832 }
5833 .into()
5834 }
5835 Variant::Tracked(tracked) => {
5836 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5837 .map(|status| {
5838 let code = proto::GitStatus::from_i32(status)
5839 .with_context(|| format!("Invalid git status code: {status}"))?;
5840 let result = match code {
5841 proto::GitStatus::Modified => StatusCode::Modified,
5842 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5843 proto::GitStatus::Added => StatusCode::Added,
5844 proto::GitStatus::Deleted => StatusCode::Deleted,
5845 proto::GitStatus::Renamed => StatusCode::Renamed,
5846 proto::GitStatus::Copied => StatusCode::Copied,
5847 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5848 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5849 };
5850 Ok(result)
5851 });
5852 let [index_status, worktree_status] = [index_status?, worktree_status?];
5853 TrackedStatus {
5854 index_status,
5855 worktree_status,
5856 }
5857 .into()
5858 }
5859 };
5860 Ok(result)
5861}
5862
5863fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5864 use proto::git_file_status::{Tracked, Unmerged, Variant};
5865
5866 let variant = match status {
5867 FileStatus::Untracked => Variant::Untracked(Default::default()),
5868 FileStatus::Ignored => Variant::Ignored(Default::default()),
5869 FileStatus::Unmerged(UnmergedStatus {
5870 first_head,
5871 second_head,
5872 }) => Variant::Unmerged(Unmerged {
5873 first_head: unmerged_status_to_proto(first_head),
5874 second_head: unmerged_status_to_proto(second_head),
5875 }),
5876 FileStatus::Tracked(TrackedStatus {
5877 index_status,
5878 worktree_status,
5879 }) => Variant::Tracked(Tracked {
5880 index_status: tracked_status_to_proto(index_status),
5881 worktree_status: tracked_status_to_proto(worktree_status),
5882 }),
5883 };
5884 proto::GitFileStatus {
5885 variant: Some(variant),
5886 }
5887}
5888
5889fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5890 match code {
5891 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5892 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5893 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5894 }
5895}
5896
5897fn tracked_status_to_proto(code: StatusCode) -> i32 {
5898 match code {
5899 StatusCode::Added => proto::GitStatus::Added as _,
5900 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5901 StatusCode::Modified => proto::GitStatus::Modified as _,
5902 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5903 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5904 StatusCode::Copied => proto::GitStatus::Copied as _,
5905 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5906 }
5907}