1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use std::{
59 cmp::Ordering,
60 collections::{BTreeSet, VecDeque},
61 future::Future,
62 mem,
63 ops::Range,
64 path::{Path, PathBuf},
65 str::FromStr,
66 sync::{
67 Arc,
68 atomic::{self, AtomicU64},
69 },
70 time::Instant,
71};
72use sum_tree::{Edit, SumTree, TreeSet};
73use task::Shell;
74use text::{Bias, BufferId};
75use util::{
76 ResultExt, debug_panic,
77 paths::{PathStyle, SanitizedPath},
78 post_inc,
79 rel_path::RelPath,
80};
81use worktree::{
82 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
83 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
84};
85use zeroize::Zeroize;
86
87pub struct GitStore {
88 state: GitStoreState,
89 buffer_store: Entity<BufferStore>,
90 worktree_store: Entity<WorktreeStore>,
91 repositories: HashMap<RepositoryId, Entity<Repository>>,
92 active_repo_id: Option<RepositoryId>,
93 #[allow(clippy::type_complexity)]
94 loading_diffs:
95 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
96 diffs: HashMap<BufferId, Entity<BufferGitState>>,
97 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
98 _subscriptions: Vec<Subscription>,
99}
100
101#[derive(Default)]
102struct SharedDiffs {
103 unstaged: Option<Entity<BufferDiff>>,
104 uncommitted: Option<Entity<BufferDiff>>,
105}
106
107struct BufferGitState {
108 unstaged_diff: Option<WeakEntity<BufferDiff>>,
109 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
110 conflict_set: Option<WeakEntity<ConflictSet>>,
111 recalculate_diff_task: Option<Task<Result<()>>>,
112 reparse_conflict_markers_task: Option<Task<Result<()>>>,
113 language: Option<Arc<Language>>,
114 language_registry: Option<Arc<LanguageRegistry>>,
115 conflict_updated_futures: Vec<oneshot::Sender<()>>,
116 recalculating_tx: postage::watch::Sender<bool>,
117
118 /// These operation counts are used to ensure that head and index text
119 /// values read from the git repository are up-to-date with any hunk staging
120 /// operations that have been performed on the BufferDiff.
121 ///
122 /// The operation count is incremented immediately when the user initiates a
123 /// hunk stage/unstage operation. Then, upon finishing writing the new index
124 /// text do disk, the `operation count as of write` is updated to reflect
125 /// the operation count that prompted the write.
126 hunk_staging_operation_count: usize,
127 hunk_staging_operation_count_as_of_write: usize,
128
129 head_text: Option<Arc<String>>,
130 index_text: Option<Arc<String>>,
131 head_changed: bool,
132 index_changed: bool,
133 language_changed: bool,
134}
135
136#[derive(Clone, Debug)]
137enum DiffBasesChange {
138 SetIndex(Option<String>),
139 SetHead(Option<String>),
140 SetEach {
141 index: Option<String>,
142 head: Option<String>,
143 },
144 SetBoth(Option<String>),
145}
146
147#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
148enum DiffKind {
149 Unstaged,
150 Uncommitted,
151}
152
153enum GitStoreState {
154 Local {
155 next_repository_id: Arc<AtomicU64>,
156 downstream: Option<LocalDownstreamState>,
157 project_environment: Entity<ProjectEnvironment>,
158 fs: Arc<dyn Fs>,
159 },
160 Remote {
161 upstream_client: AnyProtoClient,
162 upstream_project_id: u64,
163 downstream: Option<(AnyProtoClient, ProjectId)>,
164 },
165}
166
167enum DownstreamUpdate {
168 UpdateRepository(RepositorySnapshot),
169 RemoveRepository(RepositoryId),
170}
171
172struct LocalDownstreamState {
173 client: AnyProtoClient,
174 project_id: ProjectId,
175 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
176 _task: Task<Result<()>>,
177}
178
179#[derive(Clone, Debug)]
180pub struct GitStoreCheckpoint {
181 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
182}
183
184#[derive(Clone, Debug, PartialEq, Eq)]
185pub struct StatusEntry {
186 pub repo_path: RepoPath,
187 pub status: FileStatus,
188}
189
190impl StatusEntry {
191 fn to_proto(&self) -> proto::StatusEntry {
192 let simple_status = match self.status {
193 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
194 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
195 FileStatus::Tracked(TrackedStatus {
196 index_status,
197 worktree_status,
198 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
199 worktree_status
200 } else {
201 index_status
202 }),
203 };
204
205 proto::StatusEntry {
206 repo_path: self.repo_path.to_proto(),
207 simple_status,
208 status: Some(status_to_proto(self.status)),
209 }
210 }
211}
212
213impl TryFrom<proto::StatusEntry> for StatusEntry {
214 type Error = anyhow::Error;
215
216 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
217 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
218 let status = status_from_proto(value.simple_status, value.status)?;
219 Ok(Self { repo_path, status })
220 }
221}
222
223impl sum_tree::Item for StatusEntry {
224 type Summary = PathSummary<GitSummary>;
225
226 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
227 PathSummary {
228 max_path: self.repo_path.0.clone(),
229 item_summary: self.status.summary(),
230 }
231 }
232}
233
234impl sum_tree::KeyedItem for StatusEntry {
235 type Key = PathKey;
236
237 fn key(&self) -> Self::Key {
238 PathKey(self.repo_path.0.clone())
239 }
240}
241
242#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
243pub struct RepositoryId(pub u64);
244
245#[derive(Clone, Debug, Default, PartialEq, Eq)]
246pub struct MergeDetails {
247 pub conflicted_paths: TreeSet<RepoPath>,
248 pub message: Option<SharedString>,
249 pub heads: Vec<Option<SharedString>>,
250}
251
252#[derive(Clone, Debug, PartialEq, Eq)]
253pub struct RepositorySnapshot {
254 pub id: RepositoryId,
255 pub statuses_by_path: SumTree<StatusEntry>,
256 pub pending_ops_by_path: SumTree<PendingOps>,
257 pub work_directory_abs_path: Arc<Path>,
258 pub path_style: PathStyle,
259 pub branch: Option<Branch>,
260 pub head_commit: Option<CommitDetails>,
261 pub scan_id: u64,
262 pub merge: MergeDetails,
263 pub remote_origin_url: Option<String>,
264 pub remote_upstream_url: Option<String>,
265 pub stash_entries: GitStash,
266}
267
268type JobId = u64;
269
270#[derive(Clone, Debug, PartialEq, Eq)]
271pub struct JobInfo {
272 pub start: Instant,
273 pub message: SharedString,
274}
275
276pub struct Repository {
277 this: WeakEntity<Self>,
278 snapshot: RepositorySnapshot,
279 commit_message_buffer: Option<Entity<Buffer>>,
280 git_store: WeakEntity<GitStore>,
281 // For a local repository, holds paths that have had worktree events since the last status scan completed,
282 // and that should be examined during the next status scan.
283 paths_needing_status_update: BTreeSet<RepoPath>,
284 job_sender: mpsc::UnboundedSender<GitJob>,
285 active_jobs: HashMap<JobId, JobInfo>,
286 job_id: JobId,
287 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
288 latest_askpass_id: u64,
289}
290
291impl std::ops::Deref for Repository {
292 type Target = RepositorySnapshot;
293
294 fn deref(&self) -> &Self::Target {
295 &self.snapshot
296 }
297}
298
299#[derive(Clone)]
300pub enum RepositoryState {
301 Local {
302 backend: Arc<dyn GitRepository>,
303 environment: Arc<HashMap<String, String>>,
304 },
305 Remote {
306 project_id: ProjectId,
307 client: AnyProtoClient,
308 },
309}
310
311#[derive(Clone, Debug, PartialEq, Eq)]
312pub enum RepositoryEvent {
313 StatusesChanged {
314 // TODO could report which statuses changed here
315 full_scan: bool,
316 },
317 MergeHeadsChanged,
318 BranchChanged,
319 StashEntriesChanged,
320 PendingOpsChanged {
321 pending_ops: SumTree<pending_op::PendingOps>,
322 },
323}
324
325#[derive(Clone, Debug)]
326pub struct JobsUpdated;
327
328#[derive(Debug)]
329pub enum GitStoreEvent {
330 ActiveRepositoryChanged(Option<RepositoryId>),
331 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
332 RepositoryAdded,
333 RepositoryRemoved(RepositoryId),
334 IndexWriteError(anyhow::Error),
335 JobsUpdated,
336 ConflictsUpdated,
337}
338
339impl EventEmitter<RepositoryEvent> for Repository {}
340impl EventEmitter<JobsUpdated> for Repository {}
341impl EventEmitter<GitStoreEvent> for GitStore {}
342
343pub struct GitJob {
344 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
345 key: Option<GitJobKey>,
346}
347
348#[derive(PartialEq, Eq)]
349enum GitJobKey {
350 WriteIndex(Vec<RepoPath>),
351 ReloadBufferDiffBases,
352 RefreshStatuses,
353 ReloadGitState,
354}
355
356impl GitStore {
357 pub fn local(
358 worktree_store: &Entity<WorktreeStore>,
359 buffer_store: Entity<BufferStore>,
360 environment: Entity<ProjectEnvironment>,
361 fs: Arc<dyn Fs>,
362 cx: &mut Context<Self>,
363 ) -> Self {
364 Self::new(
365 worktree_store.clone(),
366 buffer_store,
367 GitStoreState::Local {
368 next_repository_id: Arc::new(AtomicU64::new(1)),
369 downstream: None,
370 project_environment: environment,
371 fs,
372 },
373 cx,
374 )
375 }
376
377 pub fn remote(
378 worktree_store: &Entity<WorktreeStore>,
379 buffer_store: Entity<BufferStore>,
380 upstream_client: AnyProtoClient,
381 project_id: u64,
382 cx: &mut Context<Self>,
383 ) -> Self {
384 Self::new(
385 worktree_store.clone(),
386 buffer_store,
387 GitStoreState::Remote {
388 upstream_client,
389 upstream_project_id: project_id,
390 downstream: None,
391 },
392 cx,
393 )
394 }
395
396 fn new(
397 worktree_store: Entity<WorktreeStore>,
398 buffer_store: Entity<BufferStore>,
399 state: GitStoreState,
400 cx: &mut Context<Self>,
401 ) -> Self {
402 let _subscriptions = vec![
403 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
404 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
405 ];
406
407 GitStore {
408 state,
409 buffer_store,
410 worktree_store,
411 repositories: HashMap::default(),
412 active_repo_id: None,
413 _subscriptions,
414 loading_diffs: HashMap::default(),
415 shared_diffs: HashMap::default(),
416 diffs: HashMap::default(),
417 }
418 }
419
420 pub fn init(client: &AnyProtoClient) {
421 client.add_entity_request_handler(Self::handle_get_remotes);
422 client.add_entity_request_handler(Self::handle_get_branches);
423 client.add_entity_request_handler(Self::handle_get_default_branch);
424 client.add_entity_request_handler(Self::handle_change_branch);
425 client.add_entity_request_handler(Self::handle_create_branch);
426 client.add_entity_request_handler(Self::handle_rename_branch);
427 client.add_entity_request_handler(Self::handle_git_init);
428 client.add_entity_request_handler(Self::handle_push);
429 client.add_entity_request_handler(Self::handle_pull);
430 client.add_entity_request_handler(Self::handle_fetch);
431 client.add_entity_request_handler(Self::handle_stage);
432 client.add_entity_request_handler(Self::handle_unstage);
433 client.add_entity_request_handler(Self::handle_stash);
434 client.add_entity_request_handler(Self::handle_stash_pop);
435 client.add_entity_request_handler(Self::handle_stash_apply);
436 client.add_entity_request_handler(Self::handle_stash_drop);
437 client.add_entity_request_handler(Self::handle_commit);
438 client.add_entity_request_handler(Self::handle_reset);
439 client.add_entity_request_handler(Self::handle_show);
440 client.add_entity_request_handler(Self::handle_load_commit_diff);
441 client.add_entity_request_handler(Self::handle_checkout_files);
442 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
443 client.add_entity_request_handler(Self::handle_set_index_text);
444 client.add_entity_request_handler(Self::handle_askpass);
445 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
446 client.add_entity_request_handler(Self::handle_git_diff);
447 client.add_entity_request_handler(Self::handle_tree_diff);
448 client.add_entity_request_handler(Self::handle_get_blob_content);
449 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
450 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
451 client.add_entity_message_handler(Self::handle_update_diff_bases);
452 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
453 client.add_entity_request_handler(Self::handle_blame_buffer);
454 client.add_entity_message_handler(Self::handle_update_repository);
455 client.add_entity_message_handler(Self::handle_remove_repository);
456 client.add_entity_request_handler(Self::handle_git_clone);
457 client.add_entity_request_handler(Self::handle_get_worktrees);
458 client.add_entity_request_handler(Self::handle_create_worktree);
459 }
460
461 pub fn is_local(&self) -> bool {
462 matches!(self.state, GitStoreState::Local { .. })
463 }
464 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
465 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
466 let id = repo.read(cx).id;
467 if self.active_repo_id != Some(id) {
468 self.active_repo_id = Some(id);
469 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
470 }
471 }
472 }
473
474 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
475 match &mut self.state {
476 GitStoreState::Remote {
477 downstream: downstream_client,
478 ..
479 } => {
480 for repo in self.repositories.values() {
481 let update = repo.read(cx).snapshot.initial_update(project_id);
482 for update in split_repository_update(update) {
483 client.send(update).log_err();
484 }
485 }
486 *downstream_client = Some((client, ProjectId(project_id)));
487 }
488 GitStoreState::Local {
489 downstream: downstream_client,
490 ..
491 } => {
492 let mut snapshots = HashMap::default();
493 let (updates_tx, mut updates_rx) = mpsc::unbounded();
494 for repo in self.repositories.values() {
495 updates_tx
496 .unbounded_send(DownstreamUpdate::UpdateRepository(
497 repo.read(cx).snapshot.clone(),
498 ))
499 .ok();
500 }
501 *downstream_client = Some(LocalDownstreamState {
502 client: client.clone(),
503 project_id: ProjectId(project_id),
504 updates_tx,
505 _task: cx.spawn(async move |this, cx| {
506 cx.background_spawn(async move {
507 while let Some(update) = updates_rx.next().await {
508 match update {
509 DownstreamUpdate::UpdateRepository(snapshot) => {
510 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
511 {
512 let update =
513 snapshot.build_update(old_snapshot, project_id);
514 *old_snapshot = snapshot;
515 for update in split_repository_update(update) {
516 client.send(update)?;
517 }
518 } else {
519 let update = snapshot.initial_update(project_id);
520 for update in split_repository_update(update) {
521 client.send(update)?;
522 }
523 snapshots.insert(snapshot.id, snapshot);
524 }
525 }
526 DownstreamUpdate::RemoveRepository(id) => {
527 client.send(proto::RemoveRepository {
528 project_id,
529 id: id.to_proto(),
530 })?;
531 }
532 }
533 }
534 anyhow::Ok(())
535 })
536 .await
537 .ok();
538 this.update(cx, |this, _| {
539 if let GitStoreState::Local {
540 downstream: downstream_client,
541 ..
542 } = &mut this.state
543 {
544 downstream_client.take();
545 } else {
546 unreachable!("unshared called on remote store");
547 }
548 })
549 }),
550 });
551 }
552 }
553 }
554
555 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
556 match &mut self.state {
557 GitStoreState::Local {
558 downstream: downstream_client,
559 ..
560 } => {
561 downstream_client.take();
562 }
563 GitStoreState::Remote {
564 downstream: downstream_client,
565 ..
566 } => {
567 downstream_client.take();
568 }
569 }
570 self.shared_diffs.clear();
571 }
572
573 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
574 self.shared_diffs.remove(peer_id);
575 }
576
577 pub fn active_repository(&self) -> Option<Entity<Repository>> {
578 self.active_repo_id
579 .as_ref()
580 .map(|id| self.repositories[id].clone())
581 }
582
583 pub fn open_unstaged_diff(
584 &mut self,
585 buffer: Entity<Buffer>,
586 cx: &mut Context<Self>,
587 ) -> Task<Result<Entity<BufferDiff>>> {
588 let buffer_id = buffer.read(cx).remote_id();
589 if let Some(diff_state) = self.diffs.get(&buffer_id)
590 && let Some(unstaged_diff) = diff_state
591 .read(cx)
592 .unstaged_diff
593 .as_ref()
594 .and_then(|weak| weak.upgrade())
595 {
596 if let Some(task) =
597 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
598 {
599 return cx.background_executor().spawn(async move {
600 task.await;
601 Ok(unstaged_diff)
602 });
603 }
604 return Task::ready(Ok(unstaged_diff));
605 }
606
607 let Some((repo, repo_path)) =
608 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
609 else {
610 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
611 };
612
613 let task = self
614 .loading_diffs
615 .entry((buffer_id, DiffKind::Unstaged))
616 .or_insert_with(|| {
617 let staged_text = repo.update(cx, |repo, cx| {
618 repo.load_staged_text(buffer_id, repo_path, cx)
619 });
620 cx.spawn(async move |this, cx| {
621 Self::open_diff_internal(
622 this,
623 DiffKind::Unstaged,
624 staged_text.await.map(DiffBasesChange::SetIndex),
625 buffer,
626 cx,
627 )
628 .await
629 .map_err(Arc::new)
630 })
631 .shared()
632 })
633 .clone();
634
635 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
636 }
637
638 pub fn open_diff_since(
639 &mut self,
640 oid: Option<git::Oid>,
641 buffer: Entity<Buffer>,
642 repo: Entity<Repository>,
643 languages: Arc<LanguageRegistry>,
644 cx: &mut Context<Self>,
645 ) -> Task<Result<Entity<BufferDiff>>> {
646 cx.spawn(async move |this, cx| {
647 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
648 let content = match oid {
649 None => None,
650 Some(oid) => Some(
651 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
652 .await?,
653 ),
654 };
655 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
656
657 buffer_diff
658 .update(cx, |buffer_diff, cx| {
659 buffer_diff.set_base_text(
660 content.map(Arc::new),
661 buffer_snapshot.language().cloned(),
662 Some(languages.clone()),
663 buffer_snapshot.text,
664 cx,
665 )
666 })?
667 .await?;
668 let unstaged_diff = this
669 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
670 .await?;
671 buffer_diff.update(cx, |buffer_diff, _| {
672 buffer_diff.set_secondary_diff(unstaged_diff);
673 })?;
674
675 this.update(cx, |_, cx| {
676 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
677 .detach();
678 })?;
679
680 Ok(buffer_diff)
681 })
682 }
683
684 pub fn open_uncommitted_diff(
685 &mut self,
686 buffer: Entity<Buffer>,
687 cx: &mut Context<Self>,
688 ) -> Task<Result<Entity<BufferDiff>>> {
689 let buffer_id = buffer.read(cx).remote_id();
690
691 if let Some(diff_state) = self.diffs.get(&buffer_id)
692 && let Some(uncommitted_diff) = diff_state
693 .read(cx)
694 .uncommitted_diff
695 .as_ref()
696 .and_then(|weak| weak.upgrade())
697 {
698 if let Some(task) =
699 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
700 {
701 return cx.background_executor().spawn(async move {
702 task.await;
703 Ok(uncommitted_diff)
704 });
705 }
706 return Task::ready(Ok(uncommitted_diff));
707 }
708
709 let Some((repo, repo_path)) =
710 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
711 else {
712 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
713 };
714
715 let task = self
716 .loading_diffs
717 .entry((buffer_id, DiffKind::Uncommitted))
718 .or_insert_with(|| {
719 let changes = repo.update(cx, |repo, cx| {
720 repo.load_committed_text(buffer_id, repo_path, cx)
721 });
722
723 // todo(lw): hot foreground spawn
724 cx.spawn(async move |this, cx| {
725 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
726 .await
727 .map_err(Arc::new)
728 })
729 .shared()
730 })
731 .clone();
732
733 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
734 }
735
736 async fn open_diff_internal(
737 this: WeakEntity<Self>,
738 kind: DiffKind,
739 texts: Result<DiffBasesChange>,
740 buffer_entity: Entity<Buffer>,
741 cx: &mut AsyncApp,
742 ) -> Result<Entity<BufferDiff>> {
743 let diff_bases_change = match texts {
744 Err(e) => {
745 this.update(cx, |this, cx| {
746 let buffer = buffer_entity.read(cx);
747 let buffer_id = buffer.remote_id();
748 this.loading_diffs.remove(&(buffer_id, kind));
749 })?;
750 return Err(e);
751 }
752 Ok(change) => change,
753 };
754
755 this.update(cx, |this, cx| {
756 let buffer = buffer_entity.read(cx);
757 let buffer_id = buffer.remote_id();
758 let language = buffer.language().cloned();
759 let language_registry = buffer.language_registry();
760 let text_snapshot = buffer.text_snapshot();
761 this.loading_diffs.remove(&(buffer_id, kind));
762
763 let git_store = cx.weak_entity();
764 let diff_state = this
765 .diffs
766 .entry(buffer_id)
767 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
768
769 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
770
771 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
772 diff_state.update(cx, |diff_state, cx| {
773 diff_state.language = language;
774 diff_state.language_registry = language_registry;
775
776 match kind {
777 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
778 DiffKind::Uncommitted => {
779 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
780 diff
781 } else {
782 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
783 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
784 unstaged_diff
785 };
786
787 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
788 diff_state.uncommitted_diff = Some(diff.downgrade())
789 }
790 }
791
792 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
793 let rx = diff_state.wait_for_recalculation();
794
795 anyhow::Ok(async move {
796 if let Some(rx) = rx {
797 rx.await;
798 }
799 Ok(diff)
800 })
801 })
802 })??
803 .await
804 }
805
806 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
807 let diff_state = self.diffs.get(&buffer_id)?;
808 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
809 }
810
811 pub fn get_uncommitted_diff(
812 &self,
813 buffer_id: BufferId,
814 cx: &App,
815 ) -> Option<Entity<BufferDiff>> {
816 let diff_state = self.diffs.get(&buffer_id)?;
817 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
818 }
819
820 pub fn open_conflict_set(
821 &mut self,
822 buffer: Entity<Buffer>,
823 cx: &mut Context<Self>,
824 ) -> Entity<ConflictSet> {
825 log::debug!("open conflict set");
826 let buffer_id = buffer.read(cx).remote_id();
827
828 if let Some(git_state) = self.diffs.get(&buffer_id)
829 && let Some(conflict_set) = git_state
830 .read(cx)
831 .conflict_set
832 .as_ref()
833 .and_then(|weak| weak.upgrade())
834 {
835 let conflict_set = conflict_set;
836 let buffer_snapshot = buffer.read(cx).text_snapshot();
837
838 git_state.update(cx, |state, cx| {
839 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
840 });
841
842 return conflict_set;
843 }
844
845 let is_unmerged = self
846 .repository_and_path_for_buffer_id(buffer_id, cx)
847 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
848 let git_store = cx.weak_entity();
849 let buffer_git_state = self
850 .diffs
851 .entry(buffer_id)
852 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
853 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
854
855 self._subscriptions
856 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
857 cx.emit(GitStoreEvent::ConflictsUpdated);
858 }));
859
860 buffer_git_state.update(cx, |state, cx| {
861 state.conflict_set = Some(conflict_set.downgrade());
862 let buffer_snapshot = buffer.read(cx).text_snapshot();
863 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
864 });
865
866 conflict_set
867 }
868
869 pub fn project_path_git_status(
870 &self,
871 project_path: &ProjectPath,
872 cx: &App,
873 ) -> Option<FileStatus> {
874 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
875 Some(repo.read(cx).status_for_path(&repo_path)?.status)
876 }
877
878 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
879 let mut work_directory_abs_paths = Vec::new();
880 let mut checkpoints = Vec::new();
881 for repository in self.repositories.values() {
882 repository.update(cx, |repository, _| {
883 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
884 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
885 });
886 }
887
888 cx.background_executor().spawn(async move {
889 let checkpoints = future::try_join_all(checkpoints).await?;
890 Ok(GitStoreCheckpoint {
891 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
892 .into_iter()
893 .zip(checkpoints)
894 .collect(),
895 })
896 })
897 }
898
899 pub fn restore_checkpoint(
900 &self,
901 checkpoint: GitStoreCheckpoint,
902 cx: &mut App,
903 ) -> Task<Result<()>> {
904 let repositories_by_work_dir_abs_path = self
905 .repositories
906 .values()
907 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
908 .collect::<HashMap<_, _>>();
909
910 let mut tasks = Vec::new();
911 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
912 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
913 let restore = repository.update(cx, |repository, _| {
914 repository.restore_checkpoint(checkpoint)
915 });
916 tasks.push(async move { restore.await? });
917 }
918 }
919 cx.background_spawn(async move {
920 future::try_join_all(tasks).await?;
921 Ok(())
922 })
923 }
924
925 /// Compares two checkpoints, returning true if they are equal.
926 pub fn compare_checkpoints(
927 &self,
928 left: GitStoreCheckpoint,
929 mut right: GitStoreCheckpoint,
930 cx: &mut App,
931 ) -> Task<Result<bool>> {
932 let repositories_by_work_dir_abs_path = self
933 .repositories
934 .values()
935 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
936 .collect::<HashMap<_, _>>();
937
938 let mut tasks = Vec::new();
939 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
940 if let Some(right_checkpoint) = right
941 .checkpoints_by_work_dir_abs_path
942 .remove(&work_dir_abs_path)
943 {
944 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
945 {
946 let compare = repository.update(cx, |repository, _| {
947 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
948 });
949
950 tasks.push(async move { compare.await? });
951 }
952 } else {
953 return Task::ready(Ok(false));
954 }
955 }
956 cx.background_spawn(async move {
957 Ok(future::try_join_all(tasks)
958 .await?
959 .into_iter()
960 .all(|result| result))
961 })
962 }
963
964 /// Blames a buffer.
965 pub fn blame_buffer(
966 &self,
967 buffer: &Entity<Buffer>,
968 version: Option<clock::Global>,
969 cx: &mut App,
970 ) -> Task<Result<Option<Blame>>> {
971 let buffer = buffer.read(cx);
972 let Some((repo, repo_path)) =
973 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
974 else {
975 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
976 };
977 let content = match &version {
978 Some(version) => buffer.rope_for_version(version),
979 None => buffer.as_rope().clone(),
980 };
981 let version = version.unwrap_or(buffer.version());
982 let buffer_id = buffer.remote_id();
983
984 let rx = repo.update(cx, |repo, _| {
985 repo.send_job(None, move |state, _| async move {
986 match state {
987 RepositoryState::Local { backend, .. } => backend
988 .blame(repo_path.clone(), content)
989 .await
990 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
991 .map(Some),
992 RepositoryState::Remote { project_id, client } => {
993 let response = client
994 .request(proto::BlameBuffer {
995 project_id: project_id.to_proto(),
996 buffer_id: buffer_id.into(),
997 version: serialize_version(&version),
998 })
999 .await?;
1000 Ok(deserialize_blame_buffer_response(response))
1001 }
1002 }
1003 })
1004 });
1005
1006 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1007 }
1008
1009 pub fn get_permalink_to_line(
1010 &self,
1011 buffer: &Entity<Buffer>,
1012 selection: Range<u32>,
1013 cx: &mut App,
1014 ) -> Task<Result<url::Url>> {
1015 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1016 return Task::ready(Err(anyhow!("buffer has no file")));
1017 };
1018
1019 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1020 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1021 cx,
1022 ) else {
1023 // If we're not in a Git repo, check whether this is a Rust source
1024 // file in the Cargo registry (presumably opened with go-to-definition
1025 // from a normal Rust file). If so, we can put together a permalink
1026 // using crate metadata.
1027 if buffer
1028 .read(cx)
1029 .language()
1030 .is_none_or(|lang| lang.name() != "Rust".into())
1031 {
1032 return Task::ready(Err(anyhow!("no permalink available")));
1033 }
1034 let file_path = file.worktree.read(cx).absolutize(&file.path);
1035 return cx.spawn(async move |cx| {
1036 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1037 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1038 .context("no permalink available")
1039 });
1040 };
1041
1042 let buffer_id = buffer.read(cx).remote_id();
1043 let branch = repo.read(cx).branch.clone();
1044 let remote = branch
1045 .as_ref()
1046 .and_then(|b| b.upstream.as_ref())
1047 .and_then(|b| b.remote_name())
1048 .unwrap_or("origin")
1049 .to_string();
1050
1051 let rx = repo.update(cx, |repo, _| {
1052 repo.send_job(None, move |state, cx| async move {
1053 match state {
1054 RepositoryState::Local { backend, .. } => {
1055 let origin_url = backend
1056 .remote_url(&remote)
1057 .with_context(|| format!("remote \"{remote}\" not found"))?;
1058
1059 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1060
1061 let provider_registry =
1062 cx.update(GitHostingProviderRegistry::default_global)?;
1063
1064 let (provider, remote) =
1065 parse_git_remote_url(provider_registry, &origin_url)
1066 .context("parsing Git remote URL")?;
1067
1068 Ok(provider.build_permalink(
1069 remote,
1070 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1071 ))
1072 }
1073 RepositoryState::Remote { project_id, client } => {
1074 let response = client
1075 .request(proto::GetPermalinkToLine {
1076 project_id: project_id.to_proto(),
1077 buffer_id: buffer_id.into(),
1078 selection: Some(proto::Range {
1079 start: selection.start as u64,
1080 end: selection.end as u64,
1081 }),
1082 })
1083 .await?;
1084
1085 url::Url::parse(&response.permalink).context("failed to parse permalink")
1086 }
1087 }
1088 })
1089 });
1090 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1091 }
1092
1093 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1094 match &self.state {
1095 GitStoreState::Local {
1096 downstream: downstream_client,
1097 ..
1098 } => downstream_client
1099 .as_ref()
1100 .map(|state| (state.client.clone(), state.project_id)),
1101 GitStoreState::Remote {
1102 downstream: downstream_client,
1103 ..
1104 } => downstream_client.clone(),
1105 }
1106 }
1107
1108 fn upstream_client(&self) -> Option<AnyProtoClient> {
1109 match &self.state {
1110 GitStoreState::Local { .. } => None,
1111 GitStoreState::Remote {
1112 upstream_client, ..
1113 } => Some(upstream_client.clone()),
1114 }
1115 }
1116
1117 fn on_worktree_store_event(
1118 &mut self,
1119 worktree_store: Entity<WorktreeStore>,
1120 event: &WorktreeStoreEvent,
1121 cx: &mut Context<Self>,
1122 ) {
1123 let GitStoreState::Local {
1124 project_environment,
1125 downstream,
1126 next_repository_id,
1127 fs,
1128 } = &self.state
1129 else {
1130 return;
1131 };
1132
1133 match event {
1134 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1135 if let Some(worktree) = self
1136 .worktree_store
1137 .read(cx)
1138 .worktree_for_id(*worktree_id, cx)
1139 {
1140 let paths_by_git_repo =
1141 self.process_updated_entries(&worktree, updated_entries, cx);
1142 let downstream = downstream
1143 .as_ref()
1144 .map(|downstream| downstream.updates_tx.clone());
1145 cx.spawn(async move |_, cx| {
1146 let paths_by_git_repo = paths_by_git_repo.await;
1147 for (repo, paths) in paths_by_git_repo {
1148 repo.update(cx, |repo, cx| {
1149 repo.paths_changed(paths, downstream.clone(), cx);
1150 })
1151 .ok();
1152 }
1153 })
1154 .detach();
1155 }
1156 }
1157 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1158 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1159 else {
1160 return;
1161 };
1162 if !worktree.read(cx).is_visible() {
1163 log::debug!(
1164 "not adding repositories for local worktree {:?} because it's not visible",
1165 worktree.read(cx).abs_path()
1166 );
1167 return;
1168 }
1169 self.update_repositories_from_worktree(
1170 project_environment.clone(),
1171 next_repository_id.clone(),
1172 downstream
1173 .as_ref()
1174 .map(|downstream| downstream.updates_tx.clone()),
1175 changed_repos.clone(),
1176 fs.clone(),
1177 cx,
1178 );
1179 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1180 }
1181 _ => {}
1182 }
1183 }
1184 fn on_repository_event(
1185 &mut self,
1186 repo: Entity<Repository>,
1187 event: &RepositoryEvent,
1188 cx: &mut Context<Self>,
1189 ) {
1190 let id = repo.read(cx).id;
1191 let repo_snapshot = repo.read(cx).snapshot.clone();
1192 for (buffer_id, diff) in self.diffs.iter() {
1193 if let Some((buffer_repo, repo_path)) =
1194 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1195 && buffer_repo == repo
1196 {
1197 diff.update(cx, |diff, cx| {
1198 if let Some(conflict_set) = &diff.conflict_set {
1199 let conflict_status_changed =
1200 conflict_set.update(cx, |conflict_set, cx| {
1201 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1202 conflict_set.set_has_conflict(has_conflict, cx)
1203 })?;
1204 if conflict_status_changed {
1205 let buffer_store = self.buffer_store.read(cx);
1206 if let Some(buffer) = buffer_store.get(*buffer_id) {
1207 let _ = diff
1208 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1209 }
1210 }
1211 }
1212 anyhow::Ok(())
1213 })
1214 .ok();
1215 }
1216 }
1217 cx.emit(GitStoreEvent::RepositoryUpdated(
1218 id,
1219 event.clone(),
1220 self.active_repo_id == Some(id),
1221 ))
1222 }
1223
1224 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1225 cx.emit(GitStoreEvent::JobsUpdated)
1226 }
1227
1228 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1229 fn update_repositories_from_worktree(
1230 &mut self,
1231 project_environment: Entity<ProjectEnvironment>,
1232 next_repository_id: Arc<AtomicU64>,
1233 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1234 updated_git_repositories: UpdatedGitRepositoriesSet,
1235 fs: Arc<dyn Fs>,
1236 cx: &mut Context<Self>,
1237 ) {
1238 let mut removed_ids = Vec::new();
1239 for update in updated_git_repositories.iter() {
1240 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1241 let existing_work_directory_abs_path =
1242 repo.read(cx).work_directory_abs_path.clone();
1243 Some(&existing_work_directory_abs_path)
1244 == update.old_work_directory_abs_path.as_ref()
1245 || Some(&existing_work_directory_abs_path)
1246 == update.new_work_directory_abs_path.as_ref()
1247 }) {
1248 if let Some(new_work_directory_abs_path) =
1249 update.new_work_directory_abs_path.clone()
1250 {
1251 existing.update(cx, |existing, cx| {
1252 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1253 existing.schedule_scan(updates_tx.clone(), cx);
1254 });
1255 } else {
1256 removed_ids.push(*id);
1257 }
1258 } else if let UpdatedGitRepository {
1259 new_work_directory_abs_path: Some(work_directory_abs_path),
1260 dot_git_abs_path: Some(dot_git_abs_path),
1261 repository_dir_abs_path: Some(repository_dir_abs_path),
1262 common_dir_abs_path: Some(common_dir_abs_path),
1263 ..
1264 } = update
1265 {
1266 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1267 let git_store = cx.weak_entity();
1268 let repo = cx.new(|cx| {
1269 let mut repo = Repository::local(
1270 id,
1271 work_directory_abs_path.clone(),
1272 dot_git_abs_path.clone(),
1273 repository_dir_abs_path.clone(),
1274 common_dir_abs_path.clone(),
1275 project_environment.downgrade(),
1276 fs.clone(),
1277 git_store,
1278 cx,
1279 );
1280 if let Some(updates_tx) = updates_tx.as_ref() {
1281 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1282 updates_tx
1283 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1284 .ok();
1285 }
1286 repo.schedule_scan(updates_tx.clone(), cx);
1287 repo
1288 });
1289 self._subscriptions
1290 .push(cx.subscribe(&repo, Self::on_repository_event));
1291 self._subscriptions
1292 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1293 self.repositories.insert(id, repo);
1294 cx.emit(GitStoreEvent::RepositoryAdded);
1295 self.active_repo_id.get_or_insert_with(|| {
1296 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1297 id
1298 });
1299 }
1300 }
1301
1302 for id in removed_ids {
1303 if self.active_repo_id == Some(id) {
1304 self.active_repo_id = None;
1305 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1306 }
1307 self.repositories.remove(&id);
1308 if let Some(updates_tx) = updates_tx.as_ref() {
1309 updates_tx
1310 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1311 .ok();
1312 }
1313 }
1314 }
1315
1316 fn on_buffer_store_event(
1317 &mut self,
1318 _: Entity<BufferStore>,
1319 event: &BufferStoreEvent,
1320 cx: &mut Context<Self>,
1321 ) {
1322 match event {
1323 BufferStoreEvent::BufferAdded(buffer) => {
1324 cx.subscribe(buffer, |this, buffer, event, cx| {
1325 if let BufferEvent::LanguageChanged = event {
1326 let buffer_id = buffer.read(cx).remote_id();
1327 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1328 diff_state.update(cx, |diff_state, cx| {
1329 diff_state.buffer_language_changed(buffer, cx);
1330 });
1331 }
1332 }
1333 })
1334 .detach();
1335 }
1336 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1337 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1338 diffs.remove(buffer_id);
1339 }
1340 }
1341 BufferStoreEvent::BufferDropped(buffer_id) => {
1342 self.diffs.remove(buffer_id);
1343 for diffs in self.shared_diffs.values_mut() {
1344 diffs.remove(buffer_id);
1345 }
1346 }
1347
1348 _ => {}
1349 }
1350 }
1351
1352 pub fn recalculate_buffer_diffs(
1353 &mut self,
1354 buffers: Vec<Entity<Buffer>>,
1355 cx: &mut Context<Self>,
1356 ) -> impl Future<Output = ()> + use<> {
1357 let mut futures = Vec::new();
1358 for buffer in buffers {
1359 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1360 let buffer = buffer.read(cx).text_snapshot();
1361 diff_state.update(cx, |diff_state, cx| {
1362 diff_state.recalculate_diffs(buffer.clone(), cx);
1363 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1364 });
1365 futures.push(diff_state.update(cx, |diff_state, cx| {
1366 diff_state
1367 .reparse_conflict_markers(buffer, cx)
1368 .map(|_| {})
1369 .boxed()
1370 }));
1371 }
1372 }
1373 async move {
1374 futures::future::join_all(futures).await;
1375 }
1376 }
1377
1378 fn on_buffer_diff_event(
1379 &mut self,
1380 diff: Entity<buffer_diff::BufferDiff>,
1381 event: &BufferDiffEvent,
1382 cx: &mut Context<Self>,
1383 ) {
1384 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1385 let buffer_id = diff.read(cx).buffer_id;
1386 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1387 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1388 diff_state.hunk_staging_operation_count += 1;
1389 diff_state.hunk_staging_operation_count
1390 });
1391 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1392 let recv = repo.update(cx, |repo, cx| {
1393 log::debug!("hunks changed for {}", path.as_unix_str());
1394 repo.spawn_set_index_text_job(
1395 path,
1396 new_index_text.as_ref().map(|rope| rope.to_string()),
1397 Some(hunk_staging_operation_count),
1398 cx,
1399 )
1400 });
1401 let diff = diff.downgrade();
1402 cx.spawn(async move |this, cx| {
1403 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1404 diff.update(cx, |diff, cx| {
1405 diff.clear_pending_hunks(cx);
1406 })
1407 .ok();
1408 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1409 .ok();
1410 }
1411 })
1412 .detach();
1413 }
1414 }
1415 }
1416 }
1417
1418 fn local_worktree_git_repos_changed(
1419 &mut self,
1420 worktree: Entity<Worktree>,
1421 changed_repos: &UpdatedGitRepositoriesSet,
1422 cx: &mut Context<Self>,
1423 ) {
1424 log::debug!("local worktree repos changed");
1425 debug_assert!(worktree.read(cx).is_local());
1426
1427 for repository in self.repositories.values() {
1428 repository.update(cx, |repository, cx| {
1429 let repo_abs_path = &repository.work_directory_abs_path;
1430 if changed_repos.iter().any(|update| {
1431 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1432 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1433 }) {
1434 repository.reload_buffer_diff_bases(cx);
1435 }
1436 });
1437 }
1438 }
1439
1440 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1441 &self.repositories
1442 }
1443
1444 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1445 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1446 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1447 Some(status.status)
1448 }
1449
1450 pub fn repository_and_path_for_buffer_id(
1451 &self,
1452 buffer_id: BufferId,
1453 cx: &App,
1454 ) -> Option<(Entity<Repository>, RepoPath)> {
1455 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1456 let project_path = buffer.read(cx).project_path(cx)?;
1457 self.repository_and_path_for_project_path(&project_path, cx)
1458 }
1459
1460 pub fn repository_and_path_for_project_path(
1461 &self,
1462 path: &ProjectPath,
1463 cx: &App,
1464 ) -> Option<(Entity<Repository>, RepoPath)> {
1465 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1466 self.repositories
1467 .values()
1468 .filter_map(|repo| {
1469 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1470 Some((repo.clone(), repo_path))
1471 })
1472 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1473 }
1474
1475 pub fn git_init(
1476 &self,
1477 path: Arc<Path>,
1478 fallback_branch_name: String,
1479 cx: &App,
1480 ) -> Task<Result<()>> {
1481 match &self.state {
1482 GitStoreState::Local { fs, .. } => {
1483 let fs = fs.clone();
1484 cx.background_executor()
1485 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1486 }
1487 GitStoreState::Remote {
1488 upstream_client,
1489 upstream_project_id: project_id,
1490 ..
1491 } => {
1492 let client = upstream_client.clone();
1493 let project_id = *project_id;
1494 cx.background_executor().spawn(async move {
1495 client
1496 .request(proto::GitInit {
1497 project_id: project_id,
1498 abs_path: path.to_string_lossy().into_owned(),
1499 fallback_branch_name,
1500 })
1501 .await?;
1502 Ok(())
1503 })
1504 }
1505 }
1506 }
1507
1508 pub fn git_clone(
1509 &self,
1510 repo: String,
1511 path: impl Into<Arc<std::path::Path>>,
1512 cx: &App,
1513 ) -> Task<Result<()>> {
1514 let path = path.into();
1515 match &self.state {
1516 GitStoreState::Local { fs, .. } => {
1517 let fs = fs.clone();
1518 cx.background_executor()
1519 .spawn(async move { fs.git_clone(&repo, &path).await })
1520 }
1521 GitStoreState::Remote {
1522 upstream_client,
1523 upstream_project_id,
1524 ..
1525 } => {
1526 if upstream_client.is_via_collab() {
1527 return Task::ready(Err(anyhow!(
1528 "Git Clone isn't supported for project guests"
1529 )));
1530 }
1531 let request = upstream_client.request(proto::GitClone {
1532 project_id: *upstream_project_id,
1533 abs_path: path.to_string_lossy().into_owned(),
1534 remote_repo: repo,
1535 });
1536
1537 cx.background_spawn(async move {
1538 let result = request.await?;
1539
1540 match result.success {
1541 true => Ok(()),
1542 false => Err(anyhow!("Git Clone failed")),
1543 }
1544 })
1545 }
1546 }
1547 }
1548
1549 async fn handle_update_repository(
1550 this: Entity<Self>,
1551 envelope: TypedEnvelope<proto::UpdateRepository>,
1552 mut cx: AsyncApp,
1553 ) -> Result<()> {
1554 this.update(&mut cx, |this, cx| {
1555 let path_style = this.worktree_store.read(cx).path_style();
1556 let mut update = envelope.payload;
1557
1558 let id = RepositoryId::from_proto(update.id);
1559 let client = this.upstream_client().context("no upstream client")?;
1560
1561 let mut repo_subscription = None;
1562 let repo = this.repositories.entry(id).or_insert_with(|| {
1563 let git_store = cx.weak_entity();
1564 let repo = cx.new(|cx| {
1565 Repository::remote(
1566 id,
1567 Path::new(&update.abs_path).into(),
1568 path_style,
1569 ProjectId(update.project_id),
1570 client,
1571 git_store,
1572 cx,
1573 )
1574 });
1575 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1576 cx.emit(GitStoreEvent::RepositoryAdded);
1577 repo
1578 });
1579 this._subscriptions.extend(repo_subscription);
1580
1581 repo.update(cx, {
1582 let update = update.clone();
1583 |repo, cx| repo.apply_remote_update(update, cx)
1584 })?;
1585
1586 this.active_repo_id.get_or_insert_with(|| {
1587 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1588 id
1589 });
1590
1591 if let Some((client, project_id)) = this.downstream_client() {
1592 update.project_id = project_id.to_proto();
1593 client.send(update).log_err();
1594 }
1595 Ok(())
1596 })?
1597 }
1598
1599 async fn handle_remove_repository(
1600 this: Entity<Self>,
1601 envelope: TypedEnvelope<proto::RemoveRepository>,
1602 mut cx: AsyncApp,
1603 ) -> Result<()> {
1604 this.update(&mut cx, |this, cx| {
1605 let mut update = envelope.payload;
1606 let id = RepositoryId::from_proto(update.id);
1607 this.repositories.remove(&id);
1608 if let Some((client, project_id)) = this.downstream_client() {
1609 update.project_id = project_id.to_proto();
1610 client.send(update).log_err();
1611 }
1612 if this.active_repo_id == Some(id) {
1613 this.active_repo_id = None;
1614 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1615 }
1616 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1617 })
1618 }
1619
1620 async fn handle_git_init(
1621 this: Entity<Self>,
1622 envelope: TypedEnvelope<proto::GitInit>,
1623 cx: AsyncApp,
1624 ) -> Result<proto::Ack> {
1625 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1626 let name = envelope.payload.fallback_branch_name;
1627 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1628 .await?;
1629
1630 Ok(proto::Ack {})
1631 }
1632
1633 async fn handle_git_clone(
1634 this: Entity<Self>,
1635 envelope: TypedEnvelope<proto::GitClone>,
1636 cx: AsyncApp,
1637 ) -> Result<proto::GitCloneResponse> {
1638 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1639 let repo_name = envelope.payload.remote_repo;
1640 let result = cx
1641 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1642 .await;
1643
1644 Ok(proto::GitCloneResponse {
1645 success: result.is_ok(),
1646 })
1647 }
1648
1649 async fn handle_fetch(
1650 this: Entity<Self>,
1651 envelope: TypedEnvelope<proto::Fetch>,
1652 mut cx: AsyncApp,
1653 ) -> Result<proto::RemoteMessageResponse> {
1654 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1655 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1656 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1657 let askpass_id = envelope.payload.askpass_id;
1658
1659 let askpass = make_remote_delegate(
1660 this,
1661 envelope.payload.project_id,
1662 repository_id,
1663 askpass_id,
1664 &mut cx,
1665 );
1666
1667 let remote_output = repository_handle
1668 .update(&mut cx, |repository_handle, cx| {
1669 repository_handle.fetch(fetch_options, askpass, cx)
1670 })?
1671 .await??;
1672
1673 Ok(proto::RemoteMessageResponse {
1674 stdout: remote_output.stdout,
1675 stderr: remote_output.stderr,
1676 })
1677 }
1678
1679 async fn handle_push(
1680 this: Entity<Self>,
1681 envelope: TypedEnvelope<proto::Push>,
1682 mut cx: AsyncApp,
1683 ) -> Result<proto::RemoteMessageResponse> {
1684 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1685 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1686
1687 let askpass_id = envelope.payload.askpass_id;
1688 let askpass = make_remote_delegate(
1689 this,
1690 envelope.payload.project_id,
1691 repository_id,
1692 askpass_id,
1693 &mut cx,
1694 );
1695
1696 let options = envelope
1697 .payload
1698 .options
1699 .as_ref()
1700 .map(|_| match envelope.payload.options() {
1701 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1702 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1703 });
1704
1705 let branch_name = envelope.payload.branch_name.into();
1706 let remote_name = envelope.payload.remote_name.into();
1707
1708 let remote_output = repository_handle
1709 .update(&mut cx, |repository_handle, cx| {
1710 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1711 })?
1712 .await??;
1713 Ok(proto::RemoteMessageResponse {
1714 stdout: remote_output.stdout,
1715 stderr: remote_output.stderr,
1716 })
1717 }
1718
1719 async fn handle_pull(
1720 this: Entity<Self>,
1721 envelope: TypedEnvelope<proto::Pull>,
1722 mut cx: AsyncApp,
1723 ) -> Result<proto::RemoteMessageResponse> {
1724 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1725 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1726 let askpass_id = envelope.payload.askpass_id;
1727 let askpass = make_remote_delegate(
1728 this,
1729 envelope.payload.project_id,
1730 repository_id,
1731 askpass_id,
1732 &mut cx,
1733 );
1734
1735 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1736 let remote_name = envelope.payload.remote_name.into();
1737 let rebase = envelope.payload.rebase;
1738
1739 let remote_message = repository_handle
1740 .update(&mut cx, |repository_handle, cx| {
1741 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1742 })?
1743 .await??;
1744
1745 Ok(proto::RemoteMessageResponse {
1746 stdout: remote_message.stdout,
1747 stderr: remote_message.stderr,
1748 })
1749 }
1750
1751 async fn handle_stage(
1752 this: Entity<Self>,
1753 envelope: TypedEnvelope<proto::Stage>,
1754 mut cx: AsyncApp,
1755 ) -> Result<proto::Ack> {
1756 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1757 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1758
1759 let entries = envelope
1760 .payload
1761 .paths
1762 .into_iter()
1763 .map(|path| RepoPath::new(&path))
1764 .collect::<Result<Vec<_>>>()?;
1765
1766 repository_handle
1767 .update(&mut cx, |repository_handle, cx| {
1768 repository_handle.stage_entries(entries, cx)
1769 })?
1770 .await?;
1771 Ok(proto::Ack {})
1772 }
1773
1774 async fn handle_unstage(
1775 this: Entity<Self>,
1776 envelope: TypedEnvelope<proto::Unstage>,
1777 mut cx: AsyncApp,
1778 ) -> Result<proto::Ack> {
1779 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1780 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1781
1782 let entries = envelope
1783 .payload
1784 .paths
1785 .into_iter()
1786 .map(|path| RepoPath::new(&path))
1787 .collect::<Result<Vec<_>>>()?;
1788
1789 repository_handle
1790 .update(&mut cx, |repository_handle, cx| {
1791 repository_handle.unstage_entries(entries, cx)
1792 })?
1793 .await?;
1794
1795 Ok(proto::Ack {})
1796 }
1797
1798 async fn handle_stash(
1799 this: Entity<Self>,
1800 envelope: TypedEnvelope<proto::Stash>,
1801 mut cx: AsyncApp,
1802 ) -> Result<proto::Ack> {
1803 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1804 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1805
1806 let entries = envelope
1807 .payload
1808 .paths
1809 .into_iter()
1810 .map(|path| RepoPath::new(&path))
1811 .collect::<Result<Vec<_>>>()?;
1812
1813 repository_handle
1814 .update(&mut cx, |repository_handle, cx| {
1815 repository_handle.stash_entries(entries, cx)
1816 })?
1817 .await?;
1818
1819 Ok(proto::Ack {})
1820 }
1821
1822 async fn handle_stash_pop(
1823 this: Entity<Self>,
1824 envelope: TypedEnvelope<proto::StashPop>,
1825 mut cx: AsyncApp,
1826 ) -> Result<proto::Ack> {
1827 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1828 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1829 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1830
1831 repository_handle
1832 .update(&mut cx, |repository_handle, cx| {
1833 repository_handle.stash_pop(stash_index, cx)
1834 })?
1835 .await?;
1836
1837 Ok(proto::Ack {})
1838 }
1839
1840 async fn handle_stash_apply(
1841 this: Entity<Self>,
1842 envelope: TypedEnvelope<proto::StashApply>,
1843 mut cx: AsyncApp,
1844 ) -> Result<proto::Ack> {
1845 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1846 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1847 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1848
1849 repository_handle
1850 .update(&mut cx, |repository_handle, cx| {
1851 repository_handle.stash_apply(stash_index, cx)
1852 })?
1853 .await?;
1854
1855 Ok(proto::Ack {})
1856 }
1857
1858 async fn handle_stash_drop(
1859 this: Entity<Self>,
1860 envelope: TypedEnvelope<proto::StashDrop>,
1861 mut cx: AsyncApp,
1862 ) -> Result<proto::Ack> {
1863 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1864 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1865 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1866
1867 repository_handle
1868 .update(&mut cx, |repository_handle, cx| {
1869 repository_handle.stash_drop(stash_index, cx)
1870 })?
1871 .await??;
1872
1873 Ok(proto::Ack {})
1874 }
1875
1876 async fn handle_set_index_text(
1877 this: Entity<Self>,
1878 envelope: TypedEnvelope<proto::SetIndexText>,
1879 mut cx: AsyncApp,
1880 ) -> Result<proto::Ack> {
1881 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1882 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1883 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1884
1885 repository_handle
1886 .update(&mut cx, |repository_handle, cx| {
1887 repository_handle.spawn_set_index_text_job(
1888 repo_path,
1889 envelope.payload.text,
1890 None,
1891 cx,
1892 )
1893 })?
1894 .await??;
1895 Ok(proto::Ack {})
1896 }
1897
1898 async fn handle_commit(
1899 this: Entity<Self>,
1900 envelope: TypedEnvelope<proto::Commit>,
1901 mut cx: AsyncApp,
1902 ) -> Result<proto::Ack> {
1903 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1904 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1905
1906 let message = SharedString::from(envelope.payload.message);
1907 let name = envelope.payload.name.map(SharedString::from);
1908 let email = envelope.payload.email.map(SharedString::from);
1909 let options = envelope.payload.options.unwrap_or_default();
1910
1911 repository_handle
1912 .update(&mut cx, |repository_handle, cx| {
1913 repository_handle.commit(
1914 message,
1915 name.zip(email),
1916 CommitOptions {
1917 amend: options.amend,
1918 signoff: options.signoff,
1919 },
1920 cx,
1921 )
1922 })?
1923 .await??;
1924 Ok(proto::Ack {})
1925 }
1926
1927 async fn handle_get_remotes(
1928 this: Entity<Self>,
1929 envelope: TypedEnvelope<proto::GetRemotes>,
1930 mut cx: AsyncApp,
1931 ) -> Result<proto::GetRemotesResponse> {
1932 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1933 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1934
1935 let branch_name = envelope.payload.branch_name;
1936
1937 let remotes = repository_handle
1938 .update(&mut cx, |repository_handle, _| {
1939 repository_handle.get_remotes(branch_name)
1940 })?
1941 .await??;
1942
1943 Ok(proto::GetRemotesResponse {
1944 remotes: remotes
1945 .into_iter()
1946 .map(|remotes| proto::get_remotes_response::Remote {
1947 name: remotes.name.to_string(),
1948 })
1949 .collect::<Vec<_>>(),
1950 })
1951 }
1952
1953 async fn handle_get_worktrees(
1954 this: Entity<Self>,
1955 envelope: TypedEnvelope<proto::GitGetWorktrees>,
1956 mut cx: AsyncApp,
1957 ) -> Result<proto::GitWorktreesResponse> {
1958 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1959 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1960
1961 let worktrees = repository_handle
1962 .update(&mut cx, |repository_handle, _| {
1963 repository_handle.worktrees()
1964 })?
1965 .await??;
1966
1967 Ok(proto::GitWorktreesResponse {
1968 worktrees: worktrees
1969 .into_iter()
1970 .map(|worktree| worktree_to_proto(&worktree))
1971 .collect::<Vec<_>>(),
1972 })
1973 }
1974
1975 async fn handle_create_worktree(
1976 this: Entity<Self>,
1977 envelope: TypedEnvelope<proto::GitCreateWorktree>,
1978 mut cx: AsyncApp,
1979 ) -> Result<proto::Ack> {
1980 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1981 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1982 let directory = PathBuf::from(envelope.payload.directory);
1983 let name = envelope.payload.name;
1984 let commit = envelope.payload.commit;
1985
1986 repository_handle
1987 .update(&mut cx, |repository_handle, _| {
1988 repository_handle.create_worktree(name, directory, commit)
1989 })?
1990 .await??;
1991
1992 Ok(proto::Ack {})
1993 }
1994
1995 async fn handle_get_branches(
1996 this: Entity<Self>,
1997 envelope: TypedEnvelope<proto::GitGetBranches>,
1998 mut cx: AsyncApp,
1999 ) -> Result<proto::GitBranchesResponse> {
2000 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2001 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2002
2003 let branches = repository_handle
2004 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2005 .await??;
2006
2007 Ok(proto::GitBranchesResponse {
2008 branches: branches
2009 .into_iter()
2010 .map(|branch| branch_to_proto(&branch))
2011 .collect::<Vec<_>>(),
2012 })
2013 }
2014 async fn handle_get_default_branch(
2015 this: Entity<Self>,
2016 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2017 mut cx: AsyncApp,
2018 ) -> Result<proto::GetDefaultBranchResponse> {
2019 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2020 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2021
2022 let branch = repository_handle
2023 .update(&mut cx, |repository_handle, _| {
2024 repository_handle.default_branch()
2025 })?
2026 .await??
2027 .map(Into::into);
2028
2029 Ok(proto::GetDefaultBranchResponse { branch })
2030 }
2031 async fn handle_create_branch(
2032 this: Entity<Self>,
2033 envelope: TypedEnvelope<proto::GitCreateBranch>,
2034 mut cx: AsyncApp,
2035 ) -> Result<proto::Ack> {
2036 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2037 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2038 let branch_name = envelope.payload.branch_name;
2039
2040 repository_handle
2041 .update(&mut cx, |repository_handle, _| {
2042 repository_handle.create_branch(branch_name)
2043 })?
2044 .await??;
2045
2046 Ok(proto::Ack {})
2047 }
2048
2049 async fn handle_change_branch(
2050 this: Entity<Self>,
2051 envelope: TypedEnvelope<proto::GitChangeBranch>,
2052 mut cx: AsyncApp,
2053 ) -> Result<proto::Ack> {
2054 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2055 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2056 let branch_name = envelope.payload.branch_name;
2057
2058 repository_handle
2059 .update(&mut cx, |repository_handle, _| {
2060 repository_handle.change_branch(branch_name)
2061 })?
2062 .await??;
2063
2064 Ok(proto::Ack {})
2065 }
2066
2067 async fn handle_rename_branch(
2068 this: Entity<Self>,
2069 envelope: TypedEnvelope<proto::GitRenameBranch>,
2070 mut cx: AsyncApp,
2071 ) -> Result<proto::Ack> {
2072 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2073 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2074 let branch = envelope.payload.branch;
2075 let new_name = envelope.payload.new_name;
2076
2077 repository_handle
2078 .update(&mut cx, |repository_handle, _| {
2079 repository_handle.rename_branch(branch, new_name)
2080 })?
2081 .await??;
2082
2083 Ok(proto::Ack {})
2084 }
2085
2086 async fn handle_show(
2087 this: Entity<Self>,
2088 envelope: TypedEnvelope<proto::GitShow>,
2089 mut cx: AsyncApp,
2090 ) -> Result<proto::GitCommitDetails> {
2091 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2092 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2093
2094 let commit = repository_handle
2095 .update(&mut cx, |repository_handle, _| {
2096 repository_handle.show(envelope.payload.commit)
2097 })?
2098 .await??;
2099 Ok(proto::GitCommitDetails {
2100 sha: commit.sha.into(),
2101 message: commit.message.into(),
2102 commit_timestamp: commit.commit_timestamp,
2103 author_email: commit.author_email.into(),
2104 author_name: commit.author_name.into(),
2105 })
2106 }
2107
2108 async fn handle_load_commit_diff(
2109 this: Entity<Self>,
2110 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2111 mut cx: AsyncApp,
2112 ) -> Result<proto::LoadCommitDiffResponse> {
2113 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2114 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2115
2116 let commit_diff = repository_handle
2117 .update(&mut cx, |repository_handle, _| {
2118 repository_handle.load_commit_diff(envelope.payload.commit)
2119 })?
2120 .await??;
2121 Ok(proto::LoadCommitDiffResponse {
2122 files: commit_diff
2123 .files
2124 .into_iter()
2125 .map(|file| proto::CommitFile {
2126 path: file.path.to_proto(),
2127 old_text: file.old_text,
2128 new_text: file.new_text,
2129 })
2130 .collect(),
2131 })
2132 }
2133
2134 async fn handle_reset(
2135 this: Entity<Self>,
2136 envelope: TypedEnvelope<proto::GitReset>,
2137 mut cx: AsyncApp,
2138 ) -> Result<proto::Ack> {
2139 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2140 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2141
2142 let mode = match envelope.payload.mode() {
2143 git_reset::ResetMode::Soft => ResetMode::Soft,
2144 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2145 };
2146
2147 repository_handle
2148 .update(&mut cx, |repository_handle, cx| {
2149 repository_handle.reset(envelope.payload.commit, mode, cx)
2150 })?
2151 .await??;
2152 Ok(proto::Ack {})
2153 }
2154
2155 async fn handle_checkout_files(
2156 this: Entity<Self>,
2157 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2158 mut cx: AsyncApp,
2159 ) -> Result<proto::Ack> {
2160 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2161 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2162 let paths = envelope
2163 .payload
2164 .paths
2165 .iter()
2166 .map(|s| RepoPath::from_proto(s))
2167 .collect::<Result<Vec<_>>>()?;
2168
2169 repository_handle
2170 .update(&mut cx, |repository_handle, cx| {
2171 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2172 })?
2173 .await?;
2174 Ok(proto::Ack {})
2175 }
2176
2177 async fn handle_open_commit_message_buffer(
2178 this: Entity<Self>,
2179 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2180 mut cx: AsyncApp,
2181 ) -> Result<proto::OpenBufferResponse> {
2182 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2183 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2184 let buffer = repository
2185 .update(&mut cx, |repository, cx| {
2186 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2187 })?
2188 .await?;
2189
2190 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2191 this.update(&mut cx, |this, cx| {
2192 this.buffer_store.update(cx, |buffer_store, cx| {
2193 buffer_store
2194 .create_buffer_for_peer(
2195 &buffer,
2196 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2197 cx,
2198 )
2199 .detach_and_log_err(cx);
2200 })
2201 })?;
2202
2203 Ok(proto::OpenBufferResponse {
2204 buffer_id: buffer_id.to_proto(),
2205 })
2206 }
2207
2208 async fn handle_askpass(
2209 this: Entity<Self>,
2210 envelope: TypedEnvelope<proto::AskPassRequest>,
2211 mut cx: AsyncApp,
2212 ) -> Result<proto::AskPassResponse> {
2213 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2214 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2215
2216 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2217 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2218 debug_panic!("no askpass found");
2219 anyhow::bail!("no askpass found");
2220 };
2221
2222 let response = askpass
2223 .ask_password(envelope.payload.prompt)
2224 .await
2225 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2226
2227 delegates
2228 .lock()
2229 .insert(envelope.payload.askpass_id, askpass);
2230
2231 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2232 Ok(proto::AskPassResponse {
2233 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2234 })
2235 }
2236
2237 async fn handle_check_for_pushed_commits(
2238 this: Entity<Self>,
2239 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2240 mut cx: AsyncApp,
2241 ) -> Result<proto::CheckForPushedCommitsResponse> {
2242 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2243 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2244
2245 let branches = repository_handle
2246 .update(&mut cx, |repository_handle, _| {
2247 repository_handle.check_for_pushed_commits()
2248 })?
2249 .await??;
2250 Ok(proto::CheckForPushedCommitsResponse {
2251 pushed_to: branches
2252 .into_iter()
2253 .map(|commit| commit.to_string())
2254 .collect(),
2255 })
2256 }
2257
2258 async fn handle_git_diff(
2259 this: Entity<Self>,
2260 envelope: TypedEnvelope<proto::GitDiff>,
2261 mut cx: AsyncApp,
2262 ) -> Result<proto::GitDiffResponse> {
2263 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2264 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2265 let diff_type = match envelope.payload.diff_type() {
2266 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2267 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2268 };
2269
2270 let mut diff = repository_handle
2271 .update(&mut cx, |repository_handle, cx| {
2272 repository_handle.diff(diff_type, cx)
2273 })?
2274 .await??;
2275 const ONE_MB: usize = 1_000_000;
2276 if diff.len() > ONE_MB {
2277 diff = diff.chars().take(ONE_MB).collect()
2278 }
2279
2280 Ok(proto::GitDiffResponse { diff })
2281 }
2282
2283 async fn handle_tree_diff(
2284 this: Entity<Self>,
2285 request: TypedEnvelope<proto::GetTreeDiff>,
2286 mut cx: AsyncApp,
2287 ) -> Result<proto::GetTreeDiffResponse> {
2288 let repository_id = RepositoryId(request.payload.repository_id);
2289 let diff_type = if request.payload.is_merge {
2290 DiffTreeType::MergeBase {
2291 base: request.payload.base.into(),
2292 head: request.payload.head.into(),
2293 }
2294 } else {
2295 DiffTreeType::Since {
2296 base: request.payload.base.into(),
2297 head: request.payload.head.into(),
2298 }
2299 };
2300
2301 let diff = this
2302 .update(&mut cx, |this, cx| {
2303 let repository = this.repositories().get(&repository_id)?;
2304 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2305 })?
2306 .context("missing repository")?
2307 .await??;
2308
2309 Ok(proto::GetTreeDiffResponse {
2310 entries: diff
2311 .entries
2312 .into_iter()
2313 .map(|(path, status)| proto::TreeDiffStatus {
2314 path: path.0.to_proto(),
2315 status: match status {
2316 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2317 TreeDiffStatus::Modified { .. } => {
2318 proto::tree_diff_status::Status::Modified.into()
2319 }
2320 TreeDiffStatus::Deleted { .. } => {
2321 proto::tree_diff_status::Status::Deleted.into()
2322 }
2323 },
2324 oid: match status {
2325 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2326 Some(old.to_string())
2327 }
2328 TreeDiffStatus::Added => None,
2329 },
2330 })
2331 .collect(),
2332 })
2333 }
2334
2335 async fn handle_get_blob_content(
2336 this: Entity<Self>,
2337 request: TypedEnvelope<proto::GetBlobContent>,
2338 mut cx: AsyncApp,
2339 ) -> Result<proto::GetBlobContentResponse> {
2340 let oid = git::Oid::from_str(&request.payload.oid)?;
2341 let repository_id = RepositoryId(request.payload.repository_id);
2342 let content = this
2343 .update(&mut cx, |this, cx| {
2344 let repository = this.repositories().get(&repository_id)?;
2345 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2346 })?
2347 .context("missing repository")?
2348 .await?;
2349 Ok(proto::GetBlobContentResponse { content })
2350 }
2351
2352 async fn handle_open_unstaged_diff(
2353 this: Entity<Self>,
2354 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2355 mut cx: AsyncApp,
2356 ) -> Result<proto::OpenUnstagedDiffResponse> {
2357 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2358 let diff = this
2359 .update(&mut cx, |this, cx| {
2360 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2361 Some(this.open_unstaged_diff(buffer, cx))
2362 })?
2363 .context("missing buffer")?
2364 .await?;
2365 this.update(&mut cx, |this, _| {
2366 let shared_diffs = this
2367 .shared_diffs
2368 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2369 .or_default();
2370 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2371 })?;
2372 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2373 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2374 }
2375
2376 async fn handle_open_uncommitted_diff(
2377 this: Entity<Self>,
2378 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2379 mut cx: AsyncApp,
2380 ) -> Result<proto::OpenUncommittedDiffResponse> {
2381 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2382 let diff = this
2383 .update(&mut cx, |this, cx| {
2384 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2385 Some(this.open_uncommitted_diff(buffer, cx))
2386 })?
2387 .context("missing buffer")?
2388 .await?;
2389 this.update(&mut cx, |this, _| {
2390 let shared_diffs = this
2391 .shared_diffs
2392 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2393 .or_default();
2394 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2395 })?;
2396 diff.read_with(&cx, |diff, cx| {
2397 use proto::open_uncommitted_diff_response::Mode;
2398
2399 let unstaged_diff = diff.secondary_diff();
2400 let index_snapshot = unstaged_diff.and_then(|diff| {
2401 let diff = diff.read(cx);
2402 diff.base_text_exists().then(|| diff.base_text())
2403 });
2404
2405 let mode;
2406 let staged_text;
2407 let committed_text;
2408 if diff.base_text_exists() {
2409 let committed_snapshot = diff.base_text();
2410 committed_text = Some(committed_snapshot.text());
2411 if let Some(index_text) = index_snapshot {
2412 if index_text.remote_id() == committed_snapshot.remote_id() {
2413 mode = Mode::IndexMatchesHead;
2414 staged_text = None;
2415 } else {
2416 mode = Mode::IndexAndHead;
2417 staged_text = Some(index_text.text());
2418 }
2419 } else {
2420 mode = Mode::IndexAndHead;
2421 staged_text = None;
2422 }
2423 } else {
2424 mode = Mode::IndexAndHead;
2425 committed_text = None;
2426 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2427 }
2428
2429 proto::OpenUncommittedDiffResponse {
2430 committed_text,
2431 staged_text,
2432 mode: mode.into(),
2433 }
2434 })
2435 }
2436
2437 async fn handle_update_diff_bases(
2438 this: Entity<Self>,
2439 request: TypedEnvelope<proto::UpdateDiffBases>,
2440 mut cx: AsyncApp,
2441 ) -> Result<()> {
2442 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2443 this.update(&mut cx, |this, cx| {
2444 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2445 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2446 {
2447 let buffer = buffer.read(cx).text_snapshot();
2448 diff_state.update(cx, |diff_state, cx| {
2449 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2450 })
2451 }
2452 })
2453 }
2454
2455 async fn handle_blame_buffer(
2456 this: Entity<Self>,
2457 envelope: TypedEnvelope<proto::BlameBuffer>,
2458 mut cx: AsyncApp,
2459 ) -> Result<proto::BlameBufferResponse> {
2460 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2461 let version = deserialize_version(&envelope.payload.version);
2462 let buffer = this.read_with(&cx, |this, cx| {
2463 this.buffer_store.read(cx).get_existing(buffer_id)
2464 })??;
2465 buffer
2466 .update(&mut cx, |buffer, _| {
2467 buffer.wait_for_version(version.clone())
2468 })?
2469 .await?;
2470 let blame = this
2471 .update(&mut cx, |this, cx| {
2472 this.blame_buffer(&buffer, Some(version), cx)
2473 })?
2474 .await?;
2475 Ok(serialize_blame_buffer_response(blame))
2476 }
2477
2478 async fn handle_get_permalink_to_line(
2479 this: Entity<Self>,
2480 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2481 mut cx: AsyncApp,
2482 ) -> Result<proto::GetPermalinkToLineResponse> {
2483 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2484 // let version = deserialize_version(&envelope.payload.version);
2485 let selection = {
2486 let proto_selection = envelope
2487 .payload
2488 .selection
2489 .context("no selection to get permalink for defined")?;
2490 proto_selection.start as u32..proto_selection.end as u32
2491 };
2492 let buffer = this.read_with(&cx, |this, cx| {
2493 this.buffer_store.read(cx).get_existing(buffer_id)
2494 })??;
2495 let permalink = this
2496 .update(&mut cx, |this, cx| {
2497 this.get_permalink_to_line(&buffer, selection, cx)
2498 })?
2499 .await?;
2500 Ok(proto::GetPermalinkToLineResponse {
2501 permalink: permalink.to_string(),
2502 })
2503 }
2504
2505 fn repository_for_request(
2506 this: &Entity<Self>,
2507 id: RepositoryId,
2508 cx: &mut AsyncApp,
2509 ) -> Result<Entity<Repository>> {
2510 this.read_with(cx, |this, _| {
2511 this.repositories
2512 .get(&id)
2513 .context("missing repository handle")
2514 .cloned()
2515 })?
2516 }
2517
2518 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2519 self.repositories
2520 .iter()
2521 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2522 .collect()
2523 }
2524
2525 fn process_updated_entries(
2526 &self,
2527 worktree: &Entity<Worktree>,
2528 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2529 cx: &mut App,
2530 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2531 let path_style = worktree.read(cx).path_style();
2532 let mut repo_paths = self
2533 .repositories
2534 .values()
2535 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2536 .collect::<Vec<_>>();
2537 let mut entries: Vec<_> = updated_entries
2538 .iter()
2539 .map(|(path, _, _)| path.clone())
2540 .collect();
2541 entries.sort();
2542 let worktree = worktree.read(cx);
2543
2544 let entries = entries
2545 .into_iter()
2546 .map(|path| worktree.absolutize(&path))
2547 .collect::<Arc<[_]>>();
2548
2549 let executor = cx.background_executor().clone();
2550 cx.background_executor().spawn(async move {
2551 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2552 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2553 let mut tasks = FuturesOrdered::new();
2554 for (repo_path, repo) in repo_paths.into_iter().rev() {
2555 let entries = entries.clone();
2556 let task = executor.spawn(async move {
2557 // Find all repository paths that belong to this repo
2558 let mut ix = entries.partition_point(|path| path < &*repo_path);
2559 if ix == entries.len() {
2560 return None;
2561 };
2562
2563 let mut paths = Vec::new();
2564 // All paths prefixed by a given repo will constitute a continuous range.
2565 while let Some(path) = entries.get(ix)
2566 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2567 &repo_path, path, path_style,
2568 )
2569 {
2570 paths.push((repo_path, ix));
2571 ix += 1;
2572 }
2573 if paths.is_empty() {
2574 None
2575 } else {
2576 Some((repo, paths))
2577 }
2578 });
2579 tasks.push_back(task);
2580 }
2581
2582 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2583 let mut path_was_used = vec![false; entries.len()];
2584 let tasks = tasks.collect::<Vec<_>>().await;
2585 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2586 // We always want to assign a path to it's innermost repository.
2587 for t in tasks {
2588 let Some((repo, paths)) = t else {
2589 continue;
2590 };
2591 let entry = paths_by_git_repo.entry(repo).or_default();
2592 for (repo_path, ix) in paths {
2593 if path_was_used[ix] {
2594 continue;
2595 }
2596 path_was_used[ix] = true;
2597 entry.push(repo_path);
2598 }
2599 }
2600
2601 paths_by_git_repo
2602 })
2603 }
2604}
2605
2606impl BufferGitState {
2607 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2608 Self {
2609 unstaged_diff: Default::default(),
2610 uncommitted_diff: Default::default(),
2611 recalculate_diff_task: Default::default(),
2612 language: Default::default(),
2613 language_registry: Default::default(),
2614 recalculating_tx: postage::watch::channel_with(false).0,
2615 hunk_staging_operation_count: 0,
2616 hunk_staging_operation_count_as_of_write: 0,
2617 head_text: Default::default(),
2618 index_text: Default::default(),
2619 head_changed: Default::default(),
2620 index_changed: Default::default(),
2621 language_changed: Default::default(),
2622 conflict_updated_futures: Default::default(),
2623 conflict_set: Default::default(),
2624 reparse_conflict_markers_task: Default::default(),
2625 }
2626 }
2627
2628 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2629 self.language = buffer.read(cx).language().cloned();
2630 self.language_changed = true;
2631 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2632 }
2633
2634 fn reparse_conflict_markers(
2635 &mut self,
2636 buffer: text::BufferSnapshot,
2637 cx: &mut Context<Self>,
2638 ) -> oneshot::Receiver<()> {
2639 let (tx, rx) = oneshot::channel();
2640
2641 let Some(conflict_set) = self
2642 .conflict_set
2643 .as_ref()
2644 .and_then(|conflict_set| conflict_set.upgrade())
2645 else {
2646 return rx;
2647 };
2648
2649 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2650 if conflict_set.has_conflict {
2651 Some(conflict_set.snapshot())
2652 } else {
2653 None
2654 }
2655 });
2656
2657 if let Some(old_snapshot) = old_snapshot {
2658 self.conflict_updated_futures.push(tx);
2659 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2660 let (snapshot, changed_range) = cx
2661 .background_spawn(async move {
2662 let new_snapshot = ConflictSet::parse(&buffer);
2663 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2664 (new_snapshot, changed_range)
2665 })
2666 .await;
2667 this.update(cx, |this, cx| {
2668 if let Some(conflict_set) = &this.conflict_set {
2669 conflict_set
2670 .update(cx, |conflict_set, cx| {
2671 conflict_set.set_snapshot(snapshot, changed_range, cx);
2672 })
2673 .ok();
2674 }
2675 let futures = std::mem::take(&mut this.conflict_updated_futures);
2676 for tx in futures {
2677 tx.send(()).ok();
2678 }
2679 })
2680 }))
2681 }
2682
2683 rx
2684 }
2685
2686 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2687 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2688 }
2689
2690 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2691 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2692 }
2693
2694 fn handle_base_texts_updated(
2695 &mut self,
2696 buffer: text::BufferSnapshot,
2697 message: proto::UpdateDiffBases,
2698 cx: &mut Context<Self>,
2699 ) {
2700 use proto::update_diff_bases::Mode;
2701
2702 let Some(mode) = Mode::from_i32(message.mode) else {
2703 return;
2704 };
2705
2706 let diff_bases_change = match mode {
2707 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2708 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2709 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2710 Mode::IndexAndHead => DiffBasesChange::SetEach {
2711 index: message.staged_text,
2712 head: message.committed_text,
2713 },
2714 };
2715
2716 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2717 }
2718
2719 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2720 if *self.recalculating_tx.borrow() {
2721 let mut rx = self.recalculating_tx.subscribe();
2722 Some(async move {
2723 loop {
2724 let is_recalculating = rx.recv().await;
2725 if is_recalculating != Some(true) {
2726 break;
2727 }
2728 }
2729 })
2730 } else {
2731 None
2732 }
2733 }
2734
2735 fn diff_bases_changed(
2736 &mut self,
2737 buffer: text::BufferSnapshot,
2738 diff_bases_change: Option<DiffBasesChange>,
2739 cx: &mut Context<Self>,
2740 ) {
2741 match diff_bases_change {
2742 Some(DiffBasesChange::SetIndex(index)) => {
2743 self.index_text = index.map(|mut index| {
2744 text::LineEnding::normalize(&mut index);
2745 Arc::new(index)
2746 });
2747 self.index_changed = true;
2748 }
2749 Some(DiffBasesChange::SetHead(head)) => {
2750 self.head_text = head.map(|mut head| {
2751 text::LineEnding::normalize(&mut head);
2752 Arc::new(head)
2753 });
2754 self.head_changed = true;
2755 }
2756 Some(DiffBasesChange::SetBoth(text)) => {
2757 let text = text.map(|mut text| {
2758 text::LineEnding::normalize(&mut text);
2759 Arc::new(text)
2760 });
2761 self.head_text = text.clone();
2762 self.index_text = text;
2763 self.head_changed = true;
2764 self.index_changed = true;
2765 }
2766 Some(DiffBasesChange::SetEach { index, head }) => {
2767 self.index_text = index.map(|mut index| {
2768 text::LineEnding::normalize(&mut index);
2769 Arc::new(index)
2770 });
2771 self.index_changed = true;
2772 self.head_text = head.map(|mut head| {
2773 text::LineEnding::normalize(&mut head);
2774 Arc::new(head)
2775 });
2776 self.head_changed = true;
2777 }
2778 None => {}
2779 }
2780
2781 self.recalculate_diffs(buffer, cx)
2782 }
2783
2784 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2785 *self.recalculating_tx.borrow_mut() = true;
2786
2787 let language = self.language.clone();
2788 let language_registry = self.language_registry.clone();
2789 let unstaged_diff = self.unstaged_diff();
2790 let uncommitted_diff = self.uncommitted_diff();
2791 let head = self.head_text.clone();
2792 let index = self.index_text.clone();
2793 let index_changed = self.index_changed;
2794 let head_changed = self.head_changed;
2795 let language_changed = self.language_changed;
2796 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2797 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2798 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2799 (None, None) => true,
2800 _ => false,
2801 };
2802 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2803 log::debug!(
2804 "start recalculating diffs for buffer {}",
2805 buffer.remote_id()
2806 );
2807
2808 let mut new_unstaged_diff = None;
2809 if let Some(unstaged_diff) = &unstaged_diff {
2810 new_unstaged_diff = Some(
2811 BufferDiff::update_diff(
2812 unstaged_diff.clone(),
2813 buffer.clone(),
2814 index,
2815 index_changed,
2816 language_changed,
2817 language.clone(),
2818 language_registry.clone(),
2819 cx,
2820 )
2821 .await?,
2822 );
2823 }
2824
2825 let mut new_uncommitted_diff = None;
2826 if let Some(uncommitted_diff) = &uncommitted_diff {
2827 new_uncommitted_diff = if index_matches_head {
2828 new_unstaged_diff.clone()
2829 } else {
2830 Some(
2831 BufferDiff::update_diff(
2832 uncommitted_diff.clone(),
2833 buffer.clone(),
2834 head,
2835 head_changed,
2836 language_changed,
2837 language.clone(),
2838 language_registry.clone(),
2839 cx,
2840 )
2841 .await?,
2842 )
2843 }
2844 }
2845
2846 let cancel = this.update(cx, |this, _| {
2847 // This checks whether all pending stage/unstage operations
2848 // have quiesced (i.e. both the corresponding write and the
2849 // read of that write have completed). If not, then we cancel
2850 // this recalculation attempt to avoid invalidating pending
2851 // state too quickly; another recalculation will come along
2852 // later and clear the pending state once the state of the index has settled.
2853 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2854 *this.recalculating_tx.borrow_mut() = false;
2855 true
2856 } else {
2857 false
2858 }
2859 })?;
2860 if cancel {
2861 log::debug!(
2862 concat!(
2863 "aborting recalculating diffs for buffer {}",
2864 "due to subsequent hunk operations",
2865 ),
2866 buffer.remote_id()
2867 );
2868 return Ok(());
2869 }
2870
2871 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2872 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2873 {
2874 unstaged_diff.update(cx, |diff, cx| {
2875 if language_changed {
2876 diff.language_changed(cx);
2877 }
2878 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2879 })?
2880 } else {
2881 None
2882 };
2883
2884 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2885 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2886 {
2887 uncommitted_diff.update(cx, |diff, cx| {
2888 if language_changed {
2889 diff.language_changed(cx);
2890 }
2891 diff.set_snapshot_with_secondary(
2892 new_uncommitted_diff,
2893 &buffer,
2894 unstaged_changed_range,
2895 true,
2896 cx,
2897 );
2898 })?;
2899 }
2900
2901 log::debug!(
2902 "finished recalculating diffs for buffer {}",
2903 buffer.remote_id()
2904 );
2905
2906 if let Some(this) = this.upgrade() {
2907 this.update(cx, |this, _| {
2908 this.index_changed = false;
2909 this.head_changed = false;
2910 this.language_changed = false;
2911 *this.recalculating_tx.borrow_mut() = false;
2912 })?;
2913 }
2914
2915 Ok(())
2916 }));
2917 }
2918}
2919
2920fn make_remote_delegate(
2921 this: Entity<GitStore>,
2922 project_id: u64,
2923 repository_id: RepositoryId,
2924 askpass_id: u64,
2925 cx: &mut AsyncApp,
2926) -> AskPassDelegate {
2927 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2928 this.update(cx, |this, cx| {
2929 let Some((client, _)) = this.downstream_client() else {
2930 return;
2931 };
2932 let response = client.request(proto::AskPassRequest {
2933 project_id,
2934 repository_id: repository_id.to_proto(),
2935 askpass_id,
2936 prompt,
2937 });
2938 cx.spawn(async move |_, _| {
2939 let mut response = response.await?.response;
2940 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2941 .ok();
2942 response.zeroize();
2943 anyhow::Ok(())
2944 })
2945 .detach_and_log_err(cx);
2946 })
2947 .log_err();
2948 })
2949}
2950
2951impl RepositoryId {
2952 pub fn to_proto(self) -> u64 {
2953 self.0
2954 }
2955
2956 pub fn from_proto(id: u64) -> Self {
2957 RepositoryId(id)
2958 }
2959}
2960
2961impl RepositorySnapshot {
2962 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2963 Self {
2964 id,
2965 statuses_by_path: Default::default(),
2966 pending_ops_by_path: Default::default(),
2967 work_directory_abs_path,
2968 branch: None,
2969 head_commit: None,
2970 scan_id: 0,
2971 merge: Default::default(),
2972 remote_origin_url: None,
2973 remote_upstream_url: None,
2974 stash_entries: Default::default(),
2975 path_style,
2976 }
2977 }
2978
2979 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2980 proto::UpdateRepository {
2981 branch_summary: self.branch.as_ref().map(branch_to_proto),
2982 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2983 updated_statuses: self
2984 .statuses_by_path
2985 .iter()
2986 .map(|entry| entry.to_proto())
2987 .collect(),
2988 removed_statuses: Default::default(),
2989 current_merge_conflicts: self
2990 .merge
2991 .conflicted_paths
2992 .iter()
2993 .map(|repo_path| repo_path.to_proto())
2994 .collect(),
2995 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2996 project_id,
2997 id: self.id.to_proto(),
2998 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2999 entry_ids: vec![self.id.to_proto()],
3000 scan_id: self.scan_id,
3001 is_last_update: true,
3002 stash_entries: self
3003 .stash_entries
3004 .entries
3005 .iter()
3006 .map(stash_to_proto)
3007 .collect(),
3008 }
3009 }
3010
3011 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3012 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3013 let mut removed_statuses: Vec<String> = Vec::new();
3014
3015 let mut new_statuses = self.statuses_by_path.iter().peekable();
3016 let mut old_statuses = old.statuses_by_path.iter().peekable();
3017
3018 let mut current_new_entry = new_statuses.next();
3019 let mut current_old_entry = old_statuses.next();
3020 loop {
3021 match (current_new_entry, current_old_entry) {
3022 (Some(new_entry), Some(old_entry)) => {
3023 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3024 Ordering::Less => {
3025 updated_statuses.push(new_entry.to_proto());
3026 current_new_entry = new_statuses.next();
3027 }
3028 Ordering::Equal => {
3029 if new_entry.status != old_entry.status {
3030 updated_statuses.push(new_entry.to_proto());
3031 }
3032 current_old_entry = old_statuses.next();
3033 current_new_entry = new_statuses.next();
3034 }
3035 Ordering::Greater => {
3036 removed_statuses.push(old_entry.repo_path.to_proto());
3037 current_old_entry = old_statuses.next();
3038 }
3039 }
3040 }
3041 (None, Some(old_entry)) => {
3042 removed_statuses.push(old_entry.repo_path.to_proto());
3043 current_old_entry = old_statuses.next();
3044 }
3045 (Some(new_entry), None) => {
3046 updated_statuses.push(new_entry.to_proto());
3047 current_new_entry = new_statuses.next();
3048 }
3049 (None, None) => break,
3050 }
3051 }
3052
3053 proto::UpdateRepository {
3054 branch_summary: self.branch.as_ref().map(branch_to_proto),
3055 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3056 updated_statuses,
3057 removed_statuses,
3058 current_merge_conflicts: self
3059 .merge
3060 .conflicted_paths
3061 .iter()
3062 .map(|path| path.to_proto())
3063 .collect(),
3064 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3065 project_id,
3066 id: self.id.to_proto(),
3067 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3068 entry_ids: vec![],
3069 scan_id: self.scan_id,
3070 is_last_update: true,
3071 stash_entries: self
3072 .stash_entries
3073 .entries
3074 .iter()
3075 .map(stash_to_proto)
3076 .collect(),
3077 }
3078 }
3079
3080 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3081 self.statuses_by_path.iter().cloned()
3082 }
3083
3084 pub fn status_summary(&self) -> GitSummary {
3085 self.statuses_by_path.summary().item_summary
3086 }
3087
3088 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3089 self.statuses_by_path
3090 .get(&PathKey(path.0.clone()), ())
3091 .cloned()
3092 }
3093
3094 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3095 self.pending_ops_by_path
3096 .get(&PathKey(path.0.clone()), ())
3097 .cloned()
3098 }
3099
3100 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3101 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3102 }
3103
3104 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3105 self.path_style
3106 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3107 .unwrap()
3108 .into()
3109 }
3110
3111 #[inline]
3112 fn abs_path_to_repo_path_inner(
3113 work_directory_abs_path: &Path,
3114 abs_path: &Path,
3115 path_style: PathStyle,
3116 ) -> Option<RepoPath> {
3117 abs_path
3118 .strip_prefix(&work_directory_abs_path)
3119 .ok()
3120 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3121 }
3122
3123 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3124 self.merge.conflicted_paths.contains(repo_path)
3125 }
3126
3127 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3128 let had_conflict_on_last_merge_head_change =
3129 self.merge.conflicted_paths.contains(repo_path);
3130 let has_conflict_currently = self
3131 .status_for_path(repo_path)
3132 .is_some_and(|entry| entry.status.is_conflicted());
3133 had_conflict_on_last_merge_head_change || has_conflict_currently
3134 }
3135
3136 /// This is the name that will be displayed in the repository selector for this repository.
3137 pub fn display_name(&self) -> SharedString {
3138 self.work_directory_abs_path
3139 .file_name()
3140 .unwrap_or_default()
3141 .to_string_lossy()
3142 .to_string()
3143 .into()
3144 }
3145}
3146
3147pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3148 proto::StashEntry {
3149 oid: entry.oid.as_bytes().to_vec(),
3150 message: entry.message.clone(),
3151 branch: entry.branch.clone(),
3152 index: entry.index as u64,
3153 timestamp: entry.timestamp,
3154 }
3155}
3156
3157pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3158 Ok(StashEntry {
3159 oid: Oid::from_bytes(&entry.oid)?,
3160 message: entry.message.clone(),
3161 index: entry.index as usize,
3162 branch: entry.branch.clone(),
3163 timestamp: entry.timestamp,
3164 })
3165}
3166
3167impl MergeDetails {
3168 async fn load(
3169 backend: &Arc<dyn GitRepository>,
3170 status: &SumTree<StatusEntry>,
3171 prev_snapshot: &RepositorySnapshot,
3172 ) -> Result<(MergeDetails, bool)> {
3173 log::debug!("load merge details");
3174 let message = backend.merge_message().await;
3175 let heads = backend
3176 .revparse_batch(vec![
3177 "MERGE_HEAD".into(),
3178 "CHERRY_PICK_HEAD".into(),
3179 "REBASE_HEAD".into(),
3180 "REVERT_HEAD".into(),
3181 "APPLY_HEAD".into(),
3182 ])
3183 .await
3184 .log_err()
3185 .unwrap_or_default()
3186 .into_iter()
3187 .map(|opt| opt.map(SharedString::from))
3188 .collect::<Vec<_>>();
3189 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3190 let conflicted_paths = if merge_heads_changed {
3191 let current_conflicted_paths = TreeSet::from_ordered_entries(
3192 status
3193 .iter()
3194 .filter(|entry| entry.status.is_conflicted())
3195 .map(|entry| entry.repo_path.clone()),
3196 );
3197
3198 // It can happen that we run a scan while a lengthy merge is in progress
3199 // that will eventually result in conflicts, but before those conflicts
3200 // are reported by `git status`. Since for the moment we only care about
3201 // the merge heads state for the purposes of tracking conflicts, don't update
3202 // this state until we see some conflicts.
3203 if heads.iter().any(Option::is_some)
3204 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3205 && current_conflicted_paths.is_empty()
3206 {
3207 log::debug!("not updating merge heads because no conflicts found");
3208 return Ok((
3209 MergeDetails {
3210 message: message.map(SharedString::from),
3211 ..prev_snapshot.merge.clone()
3212 },
3213 false,
3214 ));
3215 }
3216
3217 current_conflicted_paths
3218 } else {
3219 prev_snapshot.merge.conflicted_paths.clone()
3220 };
3221 let details = MergeDetails {
3222 conflicted_paths,
3223 message: message.map(SharedString::from),
3224 heads,
3225 };
3226 Ok((details, merge_heads_changed))
3227 }
3228}
3229
3230impl Repository {
3231 pub fn snapshot(&self) -> RepositorySnapshot {
3232 self.snapshot.clone()
3233 }
3234
3235 fn local(
3236 id: RepositoryId,
3237 work_directory_abs_path: Arc<Path>,
3238 dot_git_abs_path: Arc<Path>,
3239 repository_dir_abs_path: Arc<Path>,
3240 common_dir_abs_path: Arc<Path>,
3241 project_environment: WeakEntity<ProjectEnvironment>,
3242 fs: Arc<dyn Fs>,
3243 git_store: WeakEntity<GitStore>,
3244 cx: &mut Context<Self>,
3245 ) -> Self {
3246 let snapshot =
3247 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3248 Repository {
3249 this: cx.weak_entity(),
3250 git_store,
3251 snapshot,
3252 commit_message_buffer: None,
3253 askpass_delegates: Default::default(),
3254 paths_needing_status_update: Default::default(),
3255 latest_askpass_id: 0,
3256 job_sender: Repository::spawn_local_git_worker(
3257 work_directory_abs_path,
3258 dot_git_abs_path,
3259 repository_dir_abs_path,
3260 common_dir_abs_path,
3261 project_environment,
3262 fs,
3263 cx,
3264 ),
3265 job_id: 0,
3266 active_jobs: Default::default(),
3267 }
3268 }
3269
3270 fn remote(
3271 id: RepositoryId,
3272 work_directory_abs_path: Arc<Path>,
3273 path_style: PathStyle,
3274 project_id: ProjectId,
3275 client: AnyProtoClient,
3276 git_store: WeakEntity<GitStore>,
3277 cx: &mut Context<Self>,
3278 ) -> Self {
3279 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3280 Self {
3281 this: cx.weak_entity(),
3282 snapshot,
3283 commit_message_buffer: None,
3284 git_store,
3285 paths_needing_status_update: Default::default(),
3286 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3287 askpass_delegates: Default::default(),
3288 latest_askpass_id: 0,
3289 active_jobs: Default::default(),
3290 job_id: 0,
3291 }
3292 }
3293
3294 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3295 self.git_store.upgrade()
3296 }
3297
3298 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3299 let this = cx.weak_entity();
3300 let git_store = self.git_store.clone();
3301 let _ = self.send_keyed_job(
3302 Some(GitJobKey::ReloadBufferDiffBases),
3303 None,
3304 |state, mut cx| async move {
3305 let RepositoryState::Local { backend, .. } = state else {
3306 log::error!("tried to recompute diffs for a non-local repository");
3307 return Ok(());
3308 };
3309
3310 let Some(this) = this.upgrade() else {
3311 return Ok(());
3312 };
3313
3314 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3315 git_store.update(cx, |git_store, cx| {
3316 git_store
3317 .diffs
3318 .iter()
3319 .filter_map(|(buffer_id, diff_state)| {
3320 let buffer_store = git_store.buffer_store.read(cx);
3321 let buffer = buffer_store.get(*buffer_id)?;
3322 let file = File::from_dyn(buffer.read(cx).file())?;
3323 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3324 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3325 log::debug!(
3326 "start reload diff bases for repo path {}",
3327 repo_path.as_unix_str()
3328 );
3329 diff_state.update(cx, |diff_state, _| {
3330 let has_unstaged_diff = diff_state
3331 .unstaged_diff
3332 .as_ref()
3333 .is_some_and(|diff| diff.is_upgradable());
3334 let has_uncommitted_diff = diff_state
3335 .uncommitted_diff
3336 .as_ref()
3337 .is_some_and(|set| set.is_upgradable());
3338
3339 Some((
3340 buffer,
3341 repo_path,
3342 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3343 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3344 ))
3345 })
3346 })
3347 .collect::<Vec<_>>()
3348 })
3349 })??;
3350
3351 let buffer_diff_base_changes = cx
3352 .background_spawn(async move {
3353 let mut changes = Vec::new();
3354 for (buffer, repo_path, current_index_text, current_head_text) in
3355 &repo_diff_state_updates
3356 {
3357 let index_text = if current_index_text.is_some() {
3358 backend.load_index_text(repo_path.clone()).await
3359 } else {
3360 None
3361 };
3362 let head_text = if current_head_text.is_some() {
3363 backend.load_committed_text(repo_path.clone()).await
3364 } else {
3365 None
3366 };
3367
3368 let change =
3369 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3370 (Some(current_index), Some(current_head)) => {
3371 let index_changed =
3372 index_text.as_ref() != current_index.as_deref();
3373 let head_changed =
3374 head_text.as_ref() != current_head.as_deref();
3375 if index_changed && head_changed {
3376 if index_text == head_text {
3377 Some(DiffBasesChange::SetBoth(head_text))
3378 } else {
3379 Some(DiffBasesChange::SetEach {
3380 index: index_text,
3381 head: head_text,
3382 })
3383 }
3384 } else if index_changed {
3385 Some(DiffBasesChange::SetIndex(index_text))
3386 } else if head_changed {
3387 Some(DiffBasesChange::SetHead(head_text))
3388 } else {
3389 None
3390 }
3391 }
3392 (Some(current_index), None) => {
3393 let index_changed =
3394 index_text.as_ref() != current_index.as_deref();
3395 index_changed
3396 .then_some(DiffBasesChange::SetIndex(index_text))
3397 }
3398 (None, Some(current_head)) => {
3399 let head_changed =
3400 head_text.as_ref() != current_head.as_deref();
3401 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3402 }
3403 (None, None) => None,
3404 };
3405
3406 changes.push((buffer.clone(), change))
3407 }
3408 changes
3409 })
3410 .await;
3411
3412 git_store.update(&mut cx, |git_store, cx| {
3413 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3414 let buffer_snapshot = buffer.read(cx).text_snapshot();
3415 let buffer_id = buffer_snapshot.remote_id();
3416 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3417 continue;
3418 };
3419
3420 let downstream_client = git_store.downstream_client();
3421 diff_state.update(cx, |diff_state, cx| {
3422 use proto::update_diff_bases::Mode;
3423
3424 if let Some((diff_bases_change, (client, project_id))) =
3425 diff_bases_change.clone().zip(downstream_client)
3426 {
3427 let (staged_text, committed_text, mode) = match diff_bases_change {
3428 DiffBasesChange::SetIndex(index) => {
3429 (index, None, Mode::IndexOnly)
3430 }
3431 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3432 DiffBasesChange::SetEach { index, head } => {
3433 (index, head, Mode::IndexAndHead)
3434 }
3435 DiffBasesChange::SetBoth(text) => {
3436 (None, text, Mode::IndexMatchesHead)
3437 }
3438 };
3439 client
3440 .send(proto::UpdateDiffBases {
3441 project_id: project_id.to_proto(),
3442 buffer_id: buffer_id.to_proto(),
3443 staged_text,
3444 committed_text,
3445 mode: mode as i32,
3446 })
3447 .log_err();
3448 }
3449
3450 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3451 });
3452 }
3453 })
3454 },
3455 );
3456 }
3457
3458 pub fn send_job<F, Fut, R>(
3459 &mut self,
3460 status: Option<SharedString>,
3461 job: F,
3462 ) -> oneshot::Receiver<R>
3463 where
3464 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3465 Fut: Future<Output = R> + 'static,
3466 R: Send + 'static,
3467 {
3468 self.send_keyed_job(None, status, job)
3469 }
3470
3471 fn send_keyed_job<F, Fut, R>(
3472 &mut self,
3473 key: Option<GitJobKey>,
3474 status: Option<SharedString>,
3475 job: F,
3476 ) -> oneshot::Receiver<R>
3477 where
3478 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3479 Fut: Future<Output = R> + 'static,
3480 R: Send + 'static,
3481 {
3482 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3483 let job_id = post_inc(&mut self.job_id);
3484 let this = self.this.clone();
3485 self.job_sender
3486 .unbounded_send(GitJob {
3487 key,
3488 job: Box::new(move |state, cx: &mut AsyncApp| {
3489 let job = job(state, cx.clone());
3490 cx.spawn(async move |cx| {
3491 if let Some(s) = status.clone() {
3492 this.update(cx, |this, cx| {
3493 this.active_jobs.insert(
3494 job_id,
3495 JobInfo {
3496 start: Instant::now(),
3497 message: s.clone(),
3498 },
3499 );
3500
3501 cx.notify();
3502 })
3503 .ok();
3504 }
3505 let result = job.await;
3506
3507 this.update(cx, |this, cx| {
3508 this.active_jobs.remove(&job_id);
3509 cx.notify();
3510 })
3511 .ok();
3512
3513 result_tx.send(result).ok();
3514 })
3515 }),
3516 })
3517 .ok();
3518 result_rx
3519 }
3520
3521 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3522 let Some(git_store) = self.git_store.upgrade() else {
3523 return;
3524 };
3525 let entity = cx.entity();
3526 git_store.update(cx, |git_store, cx| {
3527 let Some((&id, _)) = git_store
3528 .repositories
3529 .iter()
3530 .find(|(_, handle)| *handle == &entity)
3531 else {
3532 return;
3533 };
3534 git_store.active_repo_id = Some(id);
3535 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3536 });
3537 }
3538
3539 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3540 self.snapshot.status()
3541 }
3542
3543 pub fn cached_stash(&self) -> GitStash {
3544 self.snapshot.stash_entries.clone()
3545 }
3546
3547 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3548 let git_store = self.git_store.upgrade()?;
3549 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3550 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3551 let abs_path = SanitizedPath::new(&abs_path);
3552 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3553 Some(ProjectPath {
3554 worktree_id: worktree.read(cx).id(),
3555 path: relative_path,
3556 })
3557 }
3558
3559 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3560 let git_store = self.git_store.upgrade()?;
3561 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3562 let abs_path = worktree_store.absolutize(path, cx)?;
3563 self.snapshot.abs_path_to_repo_path(&abs_path)
3564 }
3565
3566 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3567 other
3568 .read(cx)
3569 .snapshot
3570 .work_directory_abs_path
3571 .starts_with(&self.snapshot.work_directory_abs_path)
3572 }
3573
3574 pub fn open_commit_buffer(
3575 &mut self,
3576 languages: Option<Arc<LanguageRegistry>>,
3577 buffer_store: Entity<BufferStore>,
3578 cx: &mut Context<Self>,
3579 ) -> Task<Result<Entity<Buffer>>> {
3580 let id = self.id;
3581 if let Some(buffer) = self.commit_message_buffer.clone() {
3582 return Task::ready(Ok(buffer));
3583 }
3584 let this = cx.weak_entity();
3585
3586 let rx = self.send_job(None, move |state, mut cx| async move {
3587 let Some(this) = this.upgrade() else {
3588 bail!("git store was dropped");
3589 };
3590 match state {
3591 RepositoryState::Local { .. } => {
3592 this.update(&mut cx, |_, cx| {
3593 Self::open_local_commit_buffer(languages, buffer_store, cx)
3594 })?
3595 .await
3596 }
3597 RepositoryState::Remote { project_id, client } => {
3598 let request = client.request(proto::OpenCommitMessageBuffer {
3599 project_id: project_id.0,
3600 repository_id: id.to_proto(),
3601 });
3602 let response = request.await.context("requesting to open commit buffer")?;
3603 let buffer_id = BufferId::new(response.buffer_id)?;
3604 let buffer = buffer_store
3605 .update(&mut cx, |buffer_store, cx| {
3606 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3607 })?
3608 .await?;
3609 if let Some(language_registry) = languages {
3610 let git_commit_language =
3611 language_registry.language_for_name("Git Commit").await?;
3612 buffer.update(&mut cx, |buffer, cx| {
3613 buffer.set_language(Some(git_commit_language), cx);
3614 })?;
3615 }
3616 this.update(&mut cx, |this, _| {
3617 this.commit_message_buffer = Some(buffer.clone());
3618 })?;
3619 Ok(buffer)
3620 }
3621 }
3622 });
3623
3624 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3625 }
3626
3627 fn open_local_commit_buffer(
3628 language_registry: Option<Arc<LanguageRegistry>>,
3629 buffer_store: Entity<BufferStore>,
3630 cx: &mut Context<Self>,
3631 ) -> Task<Result<Entity<Buffer>>> {
3632 cx.spawn(async move |repository, cx| {
3633 let buffer = buffer_store
3634 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3635 .await?;
3636
3637 if let Some(language_registry) = language_registry {
3638 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3639 buffer.update(cx, |buffer, cx| {
3640 buffer.set_language(Some(git_commit_language), cx);
3641 })?;
3642 }
3643
3644 repository.update(cx, |repository, _| {
3645 repository.commit_message_buffer = Some(buffer.clone());
3646 })?;
3647 Ok(buffer)
3648 })
3649 }
3650
3651 pub fn checkout_files(
3652 &mut self,
3653 commit: &str,
3654 paths: Vec<RepoPath>,
3655 cx: &mut Context<Self>,
3656 ) -> Task<Result<()>> {
3657 let commit = commit.to_string();
3658 let id = self.id;
3659
3660 self.spawn_job_with_tracking(
3661 paths.clone(),
3662 pending_op::GitStatus::Reverted,
3663 cx,
3664 async move |this, cx| {
3665 this.update(cx, |this, _cx| {
3666 this.send_job(
3667 Some(format!("git checkout {}", commit).into()),
3668 move |git_repo, _| async move {
3669 match git_repo {
3670 RepositoryState::Local {
3671 backend,
3672 environment,
3673 ..
3674 } => {
3675 backend
3676 .checkout_files(commit, paths, environment.clone())
3677 .await
3678 }
3679 RepositoryState::Remote { project_id, client } => {
3680 client
3681 .request(proto::GitCheckoutFiles {
3682 project_id: project_id.0,
3683 repository_id: id.to_proto(),
3684 commit,
3685 paths: paths
3686 .into_iter()
3687 .map(|p| p.to_proto())
3688 .collect(),
3689 })
3690 .await?;
3691
3692 Ok(())
3693 }
3694 }
3695 },
3696 )
3697 })?
3698 .await?
3699 },
3700 )
3701 }
3702
3703 pub fn reset(
3704 &mut self,
3705 commit: String,
3706 reset_mode: ResetMode,
3707 _cx: &mut App,
3708 ) -> oneshot::Receiver<Result<()>> {
3709 let id = self.id;
3710
3711 self.send_job(None, move |git_repo, _| async move {
3712 match git_repo {
3713 RepositoryState::Local {
3714 backend,
3715 environment,
3716 ..
3717 } => backend.reset(commit, reset_mode, environment).await,
3718 RepositoryState::Remote { project_id, client } => {
3719 client
3720 .request(proto::GitReset {
3721 project_id: project_id.0,
3722 repository_id: id.to_proto(),
3723 commit,
3724 mode: match reset_mode {
3725 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3726 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3727 },
3728 })
3729 .await?;
3730
3731 Ok(())
3732 }
3733 }
3734 })
3735 }
3736
3737 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3738 let id = self.id;
3739 self.send_job(None, move |git_repo, _cx| async move {
3740 match git_repo {
3741 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3742 RepositoryState::Remote { project_id, client } => {
3743 let resp = client
3744 .request(proto::GitShow {
3745 project_id: project_id.0,
3746 repository_id: id.to_proto(),
3747 commit,
3748 })
3749 .await?;
3750
3751 Ok(CommitDetails {
3752 sha: resp.sha.into(),
3753 message: resp.message.into(),
3754 commit_timestamp: resp.commit_timestamp,
3755 author_email: resp.author_email.into(),
3756 author_name: resp.author_name.into(),
3757 })
3758 }
3759 }
3760 })
3761 }
3762
3763 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3764 let id = self.id;
3765 self.send_job(None, move |git_repo, cx| async move {
3766 match git_repo {
3767 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3768 RepositoryState::Remote {
3769 client, project_id, ..
3770 } => {
3771 let response = client
3772 .request(proto::LoadCommitDiff {
3773 project_id: project_id.0,
3774 repository_id: id.to_proto(),
3775 commit,
3776 })
3777 .await?;
3778 Ok(CommitDiff {
3779 files: response
3780 .files
3781 .into_iter()
3782 .map(|file| {
3783 Ok(CommitFile {
3784 path: RepoPath::from_proto(&file.path)?,
3785 old_text: file.old_text,
3786 new_text: file.new_text,
3787 })
3788 })
3789 .collect::<Result<Vec<_>>>()?,
3790 })
3791 }
3792 }
3793 })
3794 }
3795
3796 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3797 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3798 }
3799
3800 fn save_buffers<'a>(
3801 &self,
3802 entries: impl IntoIterator<Item = &'a RepoPath>,
3803 cx: &mut Context<Self>,
3804 ) -> Vec<Task<anyhow::Result<()>>> {
3805 let mut save_futures = Vec::new();
3806 if let Some(buffer_store) = self.buffer_store(cx) {
3807 buffer_store.update(cx, |buffer_store, cx| {
3808 for path in entries {
3809 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3810 continue;
3811 };
3812 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3813 && buffer
3814 .read(cx)
3815 .file()
3816 .is_some_and(|file| file.disk_state().exists())
3817 && buffer.read(cx).has_unsaved_edits()
3818 {
3819 save_futures.push(buffer_store.save_buffer(buffer, cx));
3820 }
3821 }
3822 })
3823 }
3824 save_futures
3825 }
3826
3827 pub fn stage_entries(
3828 &mut self,
3829 entries: Vec<RepoPath>,
3830 cx: &mut Context<Self>,
3831 ) -> Task<anyhow::Result<()>> {
3832 if entries.is_empty() {
3833 return Task::ready(Ok(()));
3834 }
3835 let id = self.id;
3836 let save_tasks = self.save_buffers(&entries, cx);
3837 let paths = entries
3838 .iter()
3839 .map(|p| p.as_unix_str())
3840 .collect::<Vec<_>>()
3841 .join(" ");
3842 let status = format!("git add {paths}");
3843 let job_key = GitJobKey::WriteIndex(entries.clone());
3844
3845 self.spawn_job_with_tracking(
3846 entries.clone(),
3847 pending_op::GitStatus::Staged,
3848 cx,
3849 async move |this, cx| {
3850 for save_task in save_tasks {
3851 save_task.await?;
3852 }
3853
3854 this.update(cx, |this, _| {
3855 this.send_keyed_job(
3856 Some(job_key),
3857 Some(status.into()),
3858 move |git_repo, _cx| async move {
3859 match git_repo {
3860 RepositoryState::Local {
3861 backend,
3862 environment,
3863 ..
3864 } => backend.stage_paths(entries, environment.clone()).await,
3865 RepositoryState::Remote { project_id, client } => {
3866 client
3867 .request(proto::Stage {
3868 project_id: project_id.0,
3869 repository_id: id.to_proto(),
3870 paths: entries
3871 .into_iter()
3872 .map(|repo_path| repo_path.to_proto())
3873 .collect(),
3874 })
3875 .await
3876 .context("sending stage request")?;
3877
3878 Ok(())
3879 }
3880 }
3881 },
3882 )
3883 })?
3884 .await?
3885 },
3886 )
3887 }
3888
3889 pub fn unstage_entries(
3890 &mut self,
3891 entries: Vec<RepoPath>,
3892 cx: &mut Context<Self>,
3893 ) -> Task<anyhow::Result<()>> {
3894 if entries.is_empty() {
3895 return Task::ready(Ok(()));
3896 }
3897 let id = self.id;
3898 let save_tasks = self.save_buffers(&entries, cx);
3899 let paths = entries
3900 .iter()
3901 .map(|p| p.as_unix_str())
3902 .collect::<Vec<_>>()
3903 .join(" ");
3904 let status = format!("git reset {paths}");
3905 let job_key = GitJobKey::WriteIndex(entries.clone());
3906
3907 self.spawn_job_with_tracking(
3908 entries.clone(),
3909 pending_op::GitStatus::Unstaged,
3910 cx,
3911 async move |this, cx| {
3912 for save_task in save_tasks {
3913 save_task.await?;
3914 }
3915
3916 this.update(cx, |this, _| {
3917 this.send_keyed_job(
3918 Some(job_key),
3919 Some(status.into()),
3920 move |git_repo, _cx| async move {
3921 match git_repo {
3922 RepositoryState::Local {
3923 backend,
3924 environment,
3925 ..
3926 } => backend.unstage_paths(entries, environment).await,
3927 RepositoryState::Remote { project_id, client } => {
3928 client
3929 .request(proto::Unstage {
3930 project_id: project_id.0,
3931 repository_id: id.to_proto(),
3932 paths: entries
3933 .into_iter()
3934 .map(|repo_path| repo_path.to_proto())
3935 .collect(),
3936 })
3937 .await
3938 .context("sending unstage request")?;
3939
3940 Ok(())
3941 }
3942 }
3943 },
3944 )
3945 })?
3946 .await?
3947 },
3948 )
3949 }
3950
3951 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3952 let to_stage = self
3953 .cached_status()
3954 .filter_map(|entry| {
3955 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
3956 if ops.staging() || ops.staged() {
3957 None
3958 } else {
3959 Some(entry.repo_path)
3960 }
3961 } else if entry.status.staging().has_staged() {
3962 None
3963 } else {
3964 Some(entry.repo_path)
3965 }
3966 })
3967 .collect();
3968 self.stage_entries(to_stage, cx)
3969 }
3970
3971 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3972 let to_unstage = self
3973 .cached_status()
3974 .filter_map(|entry| {
3975 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
3976 if !ops.staging() && !ops.staged() {
3977 None
3978 } else {
3979 Some(entry.repo_path)
3980 }
3981 } else if entry.status.staging().has_unstaged() {
3982 None
3983 } else {
3984 Some(entry.repo_path)
3985 }
3986 })
3987 .collect();
3988 self.unstage_entries(to_unstage, cx)
3989 }
3990
3991 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3992 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3993
3994 self.stash_entries(to_stash, cx)
3995 }
3996
3997 pub fn stash_entries(
3998 &mut self,
3999 entries: Vec<RepoPath>,
4000 cx: &mut Context<Self>,
4001 ) -> Task<anyhow::Result<()>> {
4002 let id = self.id;
4003
4004 cx.spawn(async move |this, cx| {
4005 this.update(cx, |this, _| {
4006 this.send_job(None, move |git_repo, _cx| async move {
4007 match git_repo {
4008 RepositoryState::Local {
4009 backend,
4010 environment,
4011 ..
4012 } => backend.stash_paths(entries, environment).await,
4013 RepositoryState::Remote { project_id, client } => {
4014 client
4015 .request(proto::Stash {
4016 project_id: project_id.0,
4017 repository_id: id.to_proto(),
4018 paths: entries
4019 .into_iter()
4020 .map(|repo_path| repo_path.to_proto())
4021 .collect(),
4022 })
4023 .await
4024 .context("sending stash request")?;
4025 Ok(())
4026 }
4027 }
4028 })
4029 })?
4030 .await??;
4031 Ok(())
4032 })
4033 }
4034
4035 pub fn stash_pop(
4036 &mut self,
4037 index: Option<usize>,
4038 cx: &mut Context<Self>,
4039 ) -> Task<anyhow::Result<()>> {
4040 let id = self.id;
4041 cx.spawn(async move |this, cx| {
4042 this.update(cx, |this, _| {
4043 this.send_job(None, move |git_repo, _cx| async move {
4044 match git_repo {
4045 RepositoryState::Local {
4046 backend,
4047 environment,
4048 ..
4049 } => backend.stash_pop(index, environment).await,
4050 RepositoryState::Remote { project_id, client } => {
4051 client
4052 .request(proto::StashPop {
4053 project_id: project_id.0,
4054 repository_id: id.to_proto(),
4055 stash_index: index.map(|i| i as u64),
4056 })
4057 .await
4058 .context("sending stash pop request")?;
4059 Ok(())
4060 }
4061 }
4062 })
4063 })?
4064 .await??;
4065 Ok(())
4066 })
4067 }
4068
4069 pub fn stash_apply(
4070 &mut self,
4071 index: Option<usize>,
4072 cx: &mut Context<Self>,
4073 ) -> Task<anyhow::Result<()>> {
4074 let id = self.id;
4075 cx.spawn(async move |this, cx| {
4076 this.update(cx, |this, _| {
4077 this.send_job(None, move |git_repo, _cx| async move {
4078 match git_repo {
4079 RepositoryState::Local {
4080 backend,
4081 environment,
4082 ..
4083 } => backend.stash_apply(index, environment).await,
4084 RepositoryState::Remote { project_id, client } => {
4085 client
4086 .request(proto::StashApply {
4087 project_id: project_id.0,
4088 repository_id: id.to_proto(),
4089 stash_index: index.map(|i| i as u64),
4090 })
4091 .await
4092 .context("sending stash apply request")?;
4093 Ok(())
4094 }
4095 }
4096 })
4097 })?
4098 .await??;
4099 Ok(())
4100 })
4101 }
4102
4103 pub fn stash_drop(
4104 &mut self,
4105 index: Option<usize>,
4106 cx: &mut Context<Self>,
4107 ) -> oneshot::Receiver<anyhow::Result<()>> {
4108 let id = self.id;
4109 let updates_tx = self
4110 .git_store()
4111 .and_then(|git_store| match &git_store.read(cx).state {
4112 GitStoreState::Local { downstream, .. } => downstream
4113 .as_ref()
4114 .map(|downstream| downstream.updates_tx.clone()),
4115 _ => None,
4116 });
4117 let this = cx.weak_entity();
4118 self.send_job(None, move |git_repo, mut cx| async move {
4119 match git_repo {
4120 RepositoryState::Local {
4121 backend,
4122 environment,
4123 ..
4124 } => {
4125 // TODO would be nice to not have to do this manually
4126 let result = backend.stash_drop(index, environment).await;
4127 if result.is_ok()
4128 && let Ok(stash_entries) = backend.stash_entries().await
4129 {
4130 let snapshot = this.update(&mut cx, |this, cx| {
4131 this.snapshot.stash_entries = stash_entries;
4132 cx.emit(RepositoryEvent::StashEntriesChanged);
4133 this.snapshot.clone()
4134 })?;
4135 if let Some(updates_tx) = updates_tx {
4136 updates_tx
4137 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4138 .ok();
4139 }
4140 }
4141
4142 result
4143 }
4144 RepositoryState::Remote { project_id, client } => {
4145 client
4146 .request(proto::StashDrop {
4147 project_id: project_id.0,
4148 repository_id: id.to_proto(),
4149 stash_index: index.map(|i| i as u64),
4150 })
4151 .await
4152 .context("sending stash pop request")?;
4153 Ok(())
4154 }
4155 }
4156 })
4157 }
4158
4159 pub fn commit(
4160 &mut self,
4161 message: SharedString,
4162 name_and_email: Option<(SharedString, SharedString)>,
4163 options: CommitOptions,
4164 _cx: &mut App,
4165 ) -> oneshot::Receiver<Result<()>> {
4166 let id = self.id;
4167
4168 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4169 match git_repo {
4170 RepositoryState::Local {
4171 backend,
4172 environment,
4173 ..
4174 } => {
4175 backend
4176 .commit(message, name_and_email, options, environment)
4177 .await
4178 }
4179 RepositoryState::Remote { project_id, client } => {
4180 let (name, email) = name_and_email.unzip();
4181 client
4182 .request(proto::Commit {
4183 project_id: project_id.0,
4184 repository_id: id.to_proto(),
4185 message: String::from(message),
4186 name: name.map(String::from),
4187 email: email.map(String::from),
4188 options: Some(proto::commit::CommitOptions {
4189 amend: options.amend,
4190 signoff: options.signoff,
4191 }),
4192 })
4193 .await
4194 .context("sending commit request")?;
4195
4196 Ok(())
4197 }
4198 }
4199 })
4200 }
4201
4202 pub fn fetch(
4203 &mut self,
4204 fetch_options: FetchOptions,
4205 askpass: AskPassDelegate,
4206 _cx: &mut App,
4207 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4208 let askpass_delegates = self.askpass_delegates.clone();
4209 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4210 let id = self.id;
4211
4212 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4213 match git_repo {
4214 RepositoryState::Local {
4215 backend,
4216 environment,
4217 ..
4218 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4219 RepositoryState::Remote { project_id, client } => {
4220 askpass_delegates.lock().insert(askpass_id, askpass);
4221 let _defer = util::defer(|| {
4222 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4223 debug_assert!(askpass_delegate.is_some());
4224 });
4225
4226 let response = client
4227 .request(proto::Fetch {
4228 project_id: project_id.0,
4229 repository_id: id.to_proto(),
4230 askpass_id,
4231 remote: fetch_options.to_proto(),
4232 })
4233 .await
4234 .context("sending fetch request")?;
4235
4236 Ok(RemoteCommandOutput {
4237 stdout: response.stdout,
4238 stderr: response.stderr,
4239 })
4240 }
4241 }
4242 })
4243 }
4244
4245 pub fn push(
4246 &mut self,
4247 branch: SharedString,
4248 remote: SharedString,
4249 options: Option<PushOptions>,
4250 askpass: AskPassDelegate,
4251 cx: &mut Context<Self>,
4252 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4253 let askpass_delegates = self.askpass_delegates.clone();
4254 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4255 let id = self.id;
4256
4257 let args = options
4258 .map(|option| match option {
4259 PushOptions::SetUpstream => " --set-upstream",
4260 PushOptions::Force => " --force-with-lease",
4261 })
4262 .unwrap_or("");
4263
4264 let updates_tx = self
4265 .git_store()
4266 .and_then(|git_store| match &git_store.read(cx).state {
4267 GitStoreState::Local { downstream, .. } => downstream
4268 .as_ref()
4269 .map(|downstream| downstream.updates_tx.clone()),
4270 _ => None,
4271 });
4272
4273 let this = cx.weak_entity();
4274 self.send_job(
4275 Some(format!("git push {} {} {}", args, remote, branch).into()),
4276 move |git_repo, mut cx| async move {
4277 match git_repo {
4278 RepositoryState::Local {
4279 backend,
4280 environment,
4281 ..
4282 } => {
4283 let result = backend
4284 .push(
4285 branch.to_string(),
4286 remote.to_string(),
4287 options,
4288 askpass,
4289 environment.clone(),
4290 cx.clone(),
4291 )
4292 .await;
4293 // TODO would be nice to not have to do this manually
4294 if result.is_ok() {
4295 let branches = backend.branches().await?;
4296 let branch = branches.into_iter().find(|branch| branch.is_head);
4297 log::info!("head branch after scan is {branch:?}");
4298 let snapshot = this.update(&mut cx, |this, cx| {
4299 this.snapshot.branch = branch;
4300 cx.emit(RepositoryEvent::BranchChanged);
4301 this.snapshot.clone()
4302 })?;
4303 if let Some(updates_tx) = updates_tx {
4304 updates_tx
4305 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4306 .ok();
4307 }
4308 }
4309 result
4310 }
4311 RepositoryState::Remote { project_id, client } => {
4312 askpass_delegates.lock().insert(askpass_id, askpass);
4313 let _defer = util::defer(|| {
4314 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4315 debug_assert!(askpass_delegate.is_some());
4316 });
4317 let response = client
4318 .request(proto::Push {
4319 project_id: project_id.0,
4320 repository_id: id.to_proto(),
4321 askpass_id,
4322 branch_name: branch.to_string(),
4323 remote_name: remote.to_string(),
4324 options: options.map(|options| match options {
4325 PushOptions::Force => proto::push::PushOptions::Force,
4326 PushOptions::SetUpstream => {
4327 proto::push::PushOptions::SetUpstream
4328 }
4329 }
4330 as i32),
4331 })
4332 .await
4333 .context("sending push request")?;
4334
4335 Ok(RemoteCommandOutput {
4336 stdout: response.stdout,
4337 stderr: response.stderr,
4338 })
4339 }
4340 }
4341 },
4342 )
4343 }
4344
4345 pub fn pull(
4346 &mut self,
4347 branch: Option<SharedString>,
4348 remote: SharedString,
4349 rebase: bool,
4350 askpass: AskPassDelegate,
4351 _cx: &mut App,
4352 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4353 let askpass_delegates = self.askpass_delegates.clone();
4354 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4355 let id = self.id;
4356
4357 let mut status = "git pull".to_string();
4358 if rebase {
4359 status.push_str(" --rebase");
4360 }
4361 status.push_str(&format!(" {}", remote));
4362 if let Some(b) = &branch {
4363 status.push_str(&format!(" {}", b));
4364 }
4365
4366 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4367 match git_repo {
4368 RepositoryState::Local {
4369 backend,
4370 environment,
4371 ..
4372 } => {
4373 backend
4374 .pull(
4375 branch.as_ref().map(|b| b.to_string()),
4376 remote.to_string(),
4377 rebase,
4378 askpass,
4379 environment.clone(),
4380 cx,
4381 )
4382 .await
4383 }
4384 RepositoryState::Remote { project_id, client } => {
4385 askpass_delegates.lock().insert(askpass_id, askpass);
4386 let _defer = util::defer(|| {
4387 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4388 debug_assert!(askpass_delegate.is_some());
4389 });
4390 let response = client
4391 .request(proto::Pull {
4392 project_id: project_id.0,
4393 repository_id: id.to_proto(),
4394 askpass_id,
4395 rebase,
4396 branch_name: branch.as_ref().map(|b| b.to_string()),
4397 remote_name: remote.to_string(),
4398 })
4399 .await
4400 .context("sending pull request")?;
4401
4402 Ok(RemoteCommandOutput {
4403 stdout: response.stdout,
4404 stderr: response.stderr,
4405 })
4406 }
4407 }
4408 })
4409 }
4410
4411 fn spawn_set_index_text_job(
4412 &mut self,
4413 path: RepoPath,
4414 content: Option<String>,
4415 hunk_staging_operation_count: Option<usize>,
4416 cx: &mut Context<Self>,
4417 ) -> oneshot::Receiver<anyhow::Result<()>> {
4418 let id = self.id;
4419 let this = cx.weak_entity();
4420 let git_store = self.git_store.clone();
4421 self.send_keyed_job(
4422 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4423 None,
4424 move |git_repo, mut cx| async move {
4425 log::debug!(
4426 "start updating index text for buffer {}",
4427 path.as_unix_str()
4428 );
4429 match git_repo {
4430 RepositoryState::Local {
4431 backend,
4432 environment,
4433 ..
4434 } => {
4435 backend
4436 .set_index_text(path.clone(), content, environment.clone())
4437 .await?;
4438 }
4439 RepositoryState::Remote { project_id, client } => {
4440 client
4441 .request(proto::SetIndexText {
4442 project_id: project_id.0,
4443 repository_id: id.to_proto(),
4444 path: path.to_proto(),
4445 text: content,
4446 })
4447 .await?;
4448 }
4449 }
4450 log::debug!(
4451 "finish updating index text for buffer {}",
4452 path.as_unix_str()
4453 );
4454
4455 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4456 let project_path = this
4457 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4458 .ok()
4459 .flatten();
4460 git_store.update(&mut cx, |git_store, cx| {
4461 let buffer_id = git_store
4462 .buffer_store
4463 .read(cx)
4464 .get_by_path(&project_path?)?
4465 .read(cx)
4466 .remote_id();
4467 let diff_state = git_store.diffs.get(&buffer_id)?;
4468 diff_state.update(cx, |diff_state, _| {
4469 diff_state.hunk_staging_operation_count_as_of_write =
4470 hunk_staging_operation_count;
4471 });
4472 Some(())
4473 })?;
4474 }
4475 Ok(())
4476 },
4477 )
4478 }
4479
4480 pub fn get_remotes(
4481 &mut self,
4482 branch_name: Option<String>,
4483 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4484 let id = self.id;
4485 self.send_job(None, move |repo, _cx| async move {
4486 match repo {
4487 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4488 RepositoryState::Remote { project_id, client } => {
4489 let response = client
4490 .request(proto::GetRemotes {
4491 project_id: project_id.0,
4492 repository_id: id.to_proto(),
4493 branch_name,
4494 })
4495 .await?;
4496
4497 let remotes = response
4498 .remotes
4499 .into_iter()
4500 .map(|remotes| git::repository::Remote {
4501 name: remotes.name.into(),
4502 })
4503 .collect();
4504
4505 Ok(remotes)
4506 }
4507 }
4508 })
4509 }
4510
4511 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4512 let id = self.id;
4513 self.send_job(None, move |repo, _| async move {
4514 match repo {
4515 RepositoryState::Local { backend, .. } => backend.branches().await,
4516 RepositoryState::Remote { project_id, client } => {
4517 let response = client
4518 .request(proto::GitGetBranches {
4519 project_id: project_id.0,
4520 repository_id: id.to_proto(),
4521 })
4522 .await?;
4523
4524 let branches = response
4525 .branches
4526 .into_iter()
4527 .map(|branch| proto_to_branch(&branch))
4528 .collect();
4529
4530 Ok(branches)
4531 }
4532 }
4533 })
4534 }
4535
4536 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4537 let id = self.id;
4538 self.send_job(None, move |repo, _| async move {
4539 match repo {
4540 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4541 RepositoryState::Remote { project_id, client } => {
4542 let response = client
4543 .request(proto::GitGetWorktrees {
4544 project_id: project_id.0,
4545 repository_id: id.to_proto(),
4546 })
4547 .await?;
4548
4549 let worktrees = response
4550 .worktrees
4551 .into_iter()
4552 .map(|worktree| proto_to_worktree(&worktree))
4553 .collect();
4554
4555 Ok(worktrees)
4556 }
4557 }
4558 })
4559 }
4560
4561 pub fn create_worktree(
4562 &mut self,
4563 name: String,
4564 path: PathBuf,
4565 commit: Option<String>,
4566 ) -> oneshot::Receiver<Result<()>> {
4567 let id = self.id;
4568 self.send_job(
4569 Some("git worktree add".into()),
4570 move |repo, _cx| async move {
4571 match repo {
4572 RepositoryState::Local { backend, .. } => {
4573 backend.create_worktree(name, path, commit).await
4574 }
4575 RepositoryState::Remote { project_id, client } => {
4576 client
4577 .request(proto::GitCreateWorktree {
4578 project_id: project_id.0,
4579 repository_id: id.to_proto(),
4580 name,
4581 directory: path.to_string_lossy().to_string(),
4582 commit,
4583 })
4584 .await?;
4585
4586 Ok(())
4587 }
4588 }
4589 },
4590 )
4591 }
4592
4593 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4594 let id = self.id;
4595 self.send_job(None, move |repo, _| async move {
4596 match repo {
4597 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4598 RepositoryState::Remote { project_id, client } => {
4599 let response = client
4600 .request(proto::GetDefaultBranch {
4601 project_id: project_id.0,
4602 repository_id: id.to_proto(),
4603 })
4604 .await?;
4605
4606 anyhow::Ok(response.branch.map(SharedString::from))
4607 }
4608 }
4609 })
4610 }
4611
4612 pub fn diff_tree(
4613 &mut self,
4614 diff_type: DiffTreeType,
4615 _cx: &App,
4616 ) -> oneshot::Receiver<Result<TreeDiff>> {
4617 let repository_id = self.snapshot.id;
4618 self.send_job(None, move |repo, _cx| async move {
4619 match repo {
4620 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4621 RepositoryState::Remote { client, project_id } => {
4622 let response = client
4623 .request(proto::GetTreeDiff {
4624 project_id: project_id.0,
4625 repository_id: repository_id.0,
4626 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4627 base: diff_type.base().to_string(),
4628 head: diff_type.head().to_string(),
4629 })
4630 .await?;
4631
4632 let entries = response
4633 .entries
4634 .into_iter()
4635 .filter_map(|entry| {
4636 let status = match entry.status() {
4637 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4638 proto::tree_diff_status::Status::Modified => {
4639 TreeDiffStatus::Modified {
4640 old: git::Oid::from_str(
4641 &entry.oid.context("missing oid").log_err()?,
4642 )
4643 .log_err()?,
4644 }
4645 }
4646 proto::tree_diff_status::Status::Deleted => {
4647 TreeDiffStatus::Deleted {
4648 old: git::Oid::from_str(
4649 &entry.oid.context("missing oid").log_err()?,
4650 )
4651 .log_err()?,
4652 }
4653 }
4654 };
4655 Some((
4656 RepoPath(RelPath::from_proto(&entry.path).log_err()?),
4657 status,
4658 ))
4659 })
4660 .collect();
4661
4662 Ok(TreeDiff { entries })
4663 }
4664 }
4665 })
4666 }
4667
4668 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4669 let id = self.id;
4670 self.send_job(None, move |repo, _cx| async move {
4671 match repo {
4672 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4673 RepositoryState::Remote { project_id, client } => {
4674 let response = client
4675 .request(proto::GitDiff {
4676 project_id: project_id.0,
4677 repository_id: id.to_proto(),
4678 diff_type: match diff_type {
4679 DiffType::HeadToIndex => {
4680 proto::git_diff::DiffType::HeadToIndex.into()
4681 }
4682 DiffType::HeadToWorktree => {
4683 proto::git_diff::DiffType::HeadToWorktree.into()
4684 }
4685 },
4686 })
4687 .await?;
4688
4689 Ok(response.diff)
4690 }
4691 }
4692 })
4693 }
4694
4695 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4696 let id = self.id;
4697 self.send_job(
4698 Some(format!("git switch -c {branch_name}").into()),
4699 move |repo, _cx| async move {
4700 match repo {
4701 RepositoryState::Local { backend, .. } => {
4702 backend.create_branch(branch_name).await
4703 }
4704 RepositoryState::Remote { project_id, client } => {
4705 client
4706 .request(proto::GitCreateBranch {
4707 project_id: project_id.0,
4708 repository_id: id.to_proto(),
4709 branch_name,
4710 })
4711 .await?;
4712
4713 Ok(())
4714 }
4715 }
4716 },
4717 )
4718 }
4719
4720 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4721 let id = self.id;
4722 self.send_job(
4723 Some(format!("git switch {branch_name}").into()),
4724 move |repo, _cx| async move {
4725 match repo {
4726 RepositoryState::Local { backend, .. } => {
4727 backend.change_branch(branch_name).await
4728 }
4729 RepositoryState::Remote { project_id, client } => {
4730 client
4731 .request(proto::GitChangeBranch {
4732 project_id: project_id.0,
4733 repository_id: id.to_proto(),
4734 branch_name,
4735 })
4736 .await?;
4737
4738 Ok(())
4739 }
4740 }
4741 },
4742 )
4743 }
4744
4745 pub fn rename_branch(
4746 &mut self,
4747 branch: String,
4748 new_name: String,
4749 ) -> oneshot::Receiver<Result<()>> {
4750 let id = self.id;
4751 self.send_job(
4752 Some(format!("git branch -m {branch} {new_name}").into()),
4753 move |repo, _cx| async move {
4754 match repo {
4755 RepositoryState::Local { backend, .. } => {
4756 backend.rename_branch(branch, new_name).await
4757 }
4758 RepositoryState::Remote { project_id, client } => {
4759 client
4760 .request(proto::GitRenameBranch {
4761 project_id: project_id.0,
4762 repository_id: id.to_proto(),
4763 branch,
4764 new_name,
4765 })
4766 .await?;
4767
4768 Ok(())
4769 }
4770 }
4771 },
4772 )
4773 }
4774
4775 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4776 let id = self.id;
4777 self.send_job(None, move |repo, _cx| async move {
4778 match repo {
4779 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4780 RepositoryState::Remote { project_id, client } => {
4781 let response = client
4782 .request(proto::CheckForPushedCommits {
4783 project_id: project_id.0,
4784 repository_id: id.to_proto(),
4785 })
4786 .await?;
4787
4788 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4789
4790 Ok(branches)
4791 }
4792 }
4793 })
4794 }
4795
4796 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4797 self.send_job(None, |repo, _cx| async move {
4798 match repo {
4799 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4800 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4801 }
4802 })
4803 }
4804
4805 pub fn restore_checkpoint(
4806 &mut self,
4807 checkpoint: GitRepositoryCheckpoint,
4808 ) -> oneshot::Receiver<Result<()>> {
4809 self.send_job(None, move |repo, _cx| async move {
4810 match repo {
4811 RepositoryState::Local { backend, .. } => {
4812 backend.restore_checkpoint(checkpoint).await
4813 }
4814 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4815 }
4816 })
4817 }
4818
4819 pub(crate) fn apply_remote_update(
4820 &mut self,
4821 update: proto::UpdateRepository,
4822 cx: &mut Context<Self>,
4823 ) -> Result<()> {
4824 let conflicted_paths = TreeSet::from_ordered_entries(
4825 update
4826 .current_merge_conflicts
4827 .into_iter()
4828 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4829 );
4830 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4831 let new_head_commit = update
4832 .head_commit_details
4833 .as_ref()
4834 .map(proto_to_commit_details);
4835 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4836 cx.emit(RepositoryEvent::BranchChanged)
4837 }
4838 self.snapshot.branch = new_branch;
4839 self.snapshot.head_commit = new_head_commit;
4840
4841 self.snapshot.merge.conflicted_paths = conflicted_paths;
4842 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4843 let new_stash_entries = GitStash {
4844 entries: update
4845 .stash_entries
4846 .iter()
4847 .filter_map(|entry| proto_to_stash(entry).ok())
4848 .collect(),
4849 };
4850 if self.snapshot.stash_entries != new_stash_entries {
4851 cx.emit(RepositoryEvent::StashEntriesChanged)
4852 }
4853 self.snapshot.stash_entries = new_stash_entries;
4854
4855 let edits = update
4856 .removed_statuses
4857 .into_iter()
4858 .filter_map(|path| {
4859 Some(sum_tree::Edit::Remove(PathKey(
4860 RelPath::from_proto(&path).log_err()?,
4861 )))
4862 })
4863 .chain(
4864 update
4865 .updated_statuses
4866 .into_iter()
4867 .filter_map(|updated_status| {
4868 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4869 }),
4870 )
4871 .collect::<Vec<_>>();
4872 if !edits.is_empty() {
4873 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4874 }
4875 self.snapshot.statuses_by_path.edit(edits, ());
4876 if update.is_last_update {
4877 self.snapshot.scan_id = update.scan_id;
4878 }
4879 Ok(())
4880 }
4881
4882 pub fn compare_checkpoints(
4883 &mut self,
4884 left: GitRepositoryCheckpoint,
4885 right: GitRepositoryCheckpoint,
4886 ) -> oneshot::Receiver<Result<bool>> {
4887 self.send_job(None, move |repo, _cx| async move {
4888 match repo {
4889 RepositoryState::Local { backend, .. } => {
4890 backend.compare_checkpoints(left, right).await
4891 }
4892 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4893 }
4894 })
4895 }
4896
4897 pub fn diff_checkpoints(
4898 &mut self,
4899 base_checkpoint: GitRepositoryCheckpoint,
4900 target_checkpoint: GitRepositoryCheckpoint,
4901 ) -> oneshot::Receiver<Result<String>> {
4902 self.send_job(None, move |repo, _cx| async move {
4903 match repo {
4904 RepositoryState::Local { backend, .. } => {
4905 backend
4906 .diff_checkpoints(base_checkpoint, target_checkpoint)
4907 .await
4908 }
4909 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4910 }
4911 })
4912 }
4913
4914 fn schedule_scan(
4915 &mut self,
4916 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4917 cx: &mut Context<Self>,
4918 ) {
4919 let this = cx.weak_entity();
4920 let _ = self.send_keyed_job(
4921 Some(GitJobKey::ReloadGitState),
4922 None,
4923 |state, mut cx| async move {
4924 log::debug!("run scheduled git status scan");
4925
4926 let Some(this) = this.upgrade() else {
4927 return Ok(());
4928 };
4929 let RepositoryState::Local { backend, .. } = state else {
4930 bail!("not a local repository")
4931 };
4932 let (snapshot, events) = this
4933 .update(&mut cx, |this, _| {
4934 this.paths_needing_status_update.clear();
4935 compute_snapshot(
4936 this.id,
4937 this.work_directory_abs_path.clone(),
4938 this.snapshot.clone(),
4939 backend.clone(),
4940 )
4941 })?
4942 .await?;
4943 this.update(&mut cx, |this, cx| {
4944 this.snapshot = snapshot.clone();
4945 for event in events {
4946 cx.emit(event);
4947 }
4948 })?;
4949 if let Some(updates_tx) = updates_tx {
4950 updates_tx
4951 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4952 .ok();
4953 }
4954 Ok(())
4955 },
4956 );
4957 }
4958
4959 fn spawn_local_git_worker(
4960 work_directory_abs_path: Arc<Path>,
4961 dot_git_abs_path: Arc<Path>,
4962 _repository_dir_abs_path: Arc<Path>,
4963 _common_dir_abs_path: Arc<Path>,
4964 project_environment: WeakEntity<ProjectEnvironment>,
4965 fs: Arc<dyn Fs>,
4966 cx: &mut Context<Self>,
4967 ) -> mpsc::UnboundedSender<GitJob> {
4968 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4969
4970 cx.spawn(async move |_, cx| {
4971 let environment = project_environment
4972 .upgrade()
4973 .context("missing project environment")?
4974 .update(cx, |project_environment, cx| {
4975 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
4976 })?
4977 .await
4978 .unwrap_or_else(|| {
4979 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4980 HashMap::default()
4981 });
4982 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4983 let backend = cx
4984 .background_spawn(async move {
4985 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4986 .or_else(|| which::which("git").ok());
4987 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4988 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4989 })
4990 .await?;
4991
4992 if let Some(git_hosting_provider_registry) =
4993 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4994 {
4995 git_hosting_providers::register_additional_providers(
4996 git_hosting_provider_registry,
4997 backend.clone(),
4998 );
4999 }
5000
5001 let state = RepositoryState::Local {
5002 backend,
5003 environment: Arc::new(environment),
5004 };
5005 let mut jobs = VecDeque::new();
5006 loop {
5007 while let Ok(Some(next_job)) = job_rx.try_next() {
5008 jobs.push_back(next_job);
5009 }
5010
5011 if let Some(job) = jobs.pop_front() {
5012 if let Some(current_key) = &job.key
5013 && jobs
5014 .iter()
5015 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5016 {
5017 continue;
5018 }
5019 (job.job)(state.clone(), cx).await;
5020 } else if let Some(job) = job_rx.next().await {
5021 jobs.push_back(job);
5022 } else {
5023 break;
5024 }
5025 }
5026 anyhow::Ok(())
5027 })
5028 .detach_and_log_err(cx);
5029
5030 job_tx
5031 }
5032
5033 fn spawn_remote_git_worker(
5034 project_id: ProjectId,
5035 client: AnyProtoClient,
5036 cx: &mut Context<Self>,
5037 ) -> mpsc::UnboundedSender<GitJob> {
5038 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5039
5040 cx.spawn(async move |_, cx| {
5041 let state = RepositoryState::Remote { project_id, client };
5042 let mut jobs = VecDeque::new();
5043 loop {
5044 while let Ok(Some(next_job)) = job_rx.try_next() {
5045 jobs.push_back(next_job);
5046 }
5047
5048 if let Some(job) = jobs.pop_front() {
5049 if let Some(current_key) = &job.key
5050 && jobs
5051 .iter()
5052 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5053 {
5054 continue;
5055 }
5056 (job.job)(state.clone(), cx).await;
5057 } else if let Some(job) = job_rx.next().await {
5058 jobs.push_back(job);
5059 } else {
5060 break;
5061 }
5062 }
5063 anyhow::Ok(())
5064 })
5065 .detach_and_log_err(cx);
5066
5067 job_tx
5068 }
5069
5070 fn load_staged_text(
5071 &mut self,
5072 buffer_id: BufferId,
5073 repo_path: RepoPath,
5074 cx: &App,
5075 ) -> Task<Result<Option<String>>> {
5076 let rx = self.send_job(None, move |state, _| async move {
5077 match state {
5078 RepositoryState::Local { backend, .. } => {
5079 anyhow::Ok(backend.load_index_text(repo_path).await)
5080 }
5081 RepositoryState::Remote { project_id, client } => {
5082 let response = client
5083 .request(proto::OpenUnstagedDiff {
5084 project_id: project_id.to_proto(),
5085 buffer_id: buffer_id.to_proto(),
5086 })
5087 .await?;
5088 Ok(response.staged_text)
5089 }
5090 }
5091 });
5092 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5093 }
5094
5095 fn load_committed_text(
5096 &mut self,
5097 buffer_id: BufferId,
5098 repo_path: RepoPath,
5099 cx: &App,
5100 ) -> Task<Result<DiffBasesChange>> {
5101 let rx = self.send_job(None, move |state, _| async move {
5102 match state {
5103 RepositoryState::Local { backend, .. } => {
5104 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5105 let staged_text = backend.load_index_text(repo_path).await;
5106 let diff_bases_change = if committed_text == staged_text {
5107 DiffBasesChange::SetBoth(committed_text)
5108 } else {
5109 DiffBasesChange::SetEach {
5110 index: staged_text,
5111 head: committed_text,
5112 }
5113 };
5114 anyhow::Ok(diff_bases_change)
5115 }
5116 RepositoryState::Remote { project_id, client } => {
5117 use proto::open_uncommitted_diff_response::Mode;
5118
5119 let response = client
5120 .request(proto::OpenUncommittedDiff {
5121 project_id: project_id.to_proto(),
5122 buffer_id: buffer_id.to_proto(),
5123 })
5124 .await?;
5125 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5126 let bases = match mode {
5127 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5128 Mode::IndexAndHead => DiffBasesChange::SetEach {
5129 head: response.committed_text,
5130 index: response.staged_text,
5131 },
5132 };
5133 Ok(bases)
5134 }
5135 }
5136 });
5137
5138 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5139 }
5140 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5141 let repository_id = self.snapshot.id;
5142 let rx = self.send_job(None, move |state, _| async move {
5143 match state {
5144 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5145 RepositoryState::Remote { client, project_id } => {
5146 let response = client
5147 .request(proto::GetBlobContent {
5148 project_id: project_id.to_proto(),
5149 repository_id: repository_id.0,
5150 oid: oid.to_string(),
5151 })
5152 .await?;
5153 Ok(response.content)
5154 }
5155 }
5156 });
5157 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5158 }
5159
5160 fn paths_changed(
5161 &mut self,
5162 paths: Vec<RepoPath>,
5163 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5164 cx: &mut Context<Self>,
5165 ) {
5166 self.paths_needing_status_update.extend(paths);
5167
5168 let this = cx.weak_entity();
5169 let _ = self.send_keyed_job(
5170 Some(GitJobKey::RefreshStatuses),
5171 None,
5172 |state, mut cx| async move {
5173 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5174 (
5175 this.snapshot.clone(),
5176 mem::take(&mut this.paths_needing_status_update),
5177 )
5178 })?;
5179 let RepositoryState::Local { backend, .. } = state else {
5180 bail!("not a local repository")
5181 };
5182
5183 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5184 if paths.is_empty() {
5185 return Ok(());
5186 }
5187 let statuses = backend.status(&paths).await?;
5188 let stash_entries = backend.stash_entries().await?;
5189
5190 let changed_path_statuses = cx
5191 .background_spawn(async move {
5192 let mut changed_path_statuses = Vec::new();
5193 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5194 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5195
5196 for (repo_path, status) in &*statuses.entries {
5197 changed_paths.remove(repo_path);
5198 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5199 && cursor.item().is_some_and(|entry| entry.status == *status)
5200 {
5201 continue;
5202 }
5203
5204 changed_path_statuses.push(Edit::Insert(StatusEntry {
5205 repo_path: repo_path.clone(),
5206 status: *status,
5207 }));
5208 }
5209 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5210 for path in changed_paths.into_iter() {
5211 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5212 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5213 }
5214 }
5215 changed_path_statuses
5216 })
5217 .await;
5218
5219 this.update(&mut cx, |this, cx| {
5220 if this.snapshot.stash_entries != stash_entries {
5221 cx.emit(RepositoryEvent::StashEntriesChanged);
5222 this.snapshot.stash_entries = stash_entries;
5223 }
5224
5225 if !changed_path_statuses.is_empty() {
5226 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5227 this.snapshot
5228 .statuses_by_path
5229 .edit(changed_path_statuses, ());
5230 this.snapshot.scan_id += 1;
5231 }
5232
5233 if let Some(updates_tx) = updates_tx {
5234 updates_tx
5235 .unbounded_send(DownstreamUpdate::UpdateRepository(
5236 this.snapshot.clone(),
5237 ))
5238 .ok();
5239 }
5240 })
5241 },
5242 );
5243 }
5244
5245 /// currently running git command and when it started
5246 pub fn current_job(&self) -> Option<JobInfo> {
5247 self.active_jobs.values().next().cloned()
5248 }
5249
5250 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5251 self.send_job(None, |_, _| async {})
5252 }
5253
5254 fn spawn_job_with_tracking<AsyncFn>(
5255 &mut self,
5256 paths: Vec<RepoPath>,
5257 git_status: pending_op::GitStatus,
5258 cx: &mut Context<Self>,
5259 f: AsyncFn,
5260 ) -> Task<Result<()>>
5261 where
5262 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5263 {
5264 let ids = self.new_pending_ops_for_paths(paths, git_status);
5265
5266 cx.spawn(async move |this, cx| {
5267 let (job_status, result) = match f(this.clone(), cx).await {
5268 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5269 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5270 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5271 };
5272
5273 this.update(cx, |this, _| {
5274 let mut edits = Vec::with_capacity(ids.len());
5275 for (id, entry) in ids {
5276 if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
5277 if let Some(op) = ops.op_by_id_mut(id) {
5278 op.job_status = job_status;
5279 }
5280 edits.push(sum_tree::Edit::Insert(ops));
5281 }
5282 }
5283 this.snapshot.pending_ops_by_path.edit(edits, ());
5284 })?;
5285
5286 result
5287 })
5288 }
5289
5290 fn new_pending_ops_for_paths(
5291 &mut self,
5292 paths: Vec<RepoPath>,
5293 git_status: pending_op::GitStatus,
5294 ) -> Vec<(PendingOpId, RepoPath)> {
5295 let mut edits = Vec::with_capacity(paths.len());
5296 let mut ids = Vec::with_capacity(paths.len());
5297 for path in paths {
5298 let mut ops = self
5299 .snapshot
5300 .pending_ops_for_path(&path)
5301 .unwrap_or_else(|| PendingOps::new(&path));
5302 let id = ops.max_id() + 1;
5303 ops.ops.push(PendingOp {
5304 id,
5305 git_status,
5306 job_status: pending_op::JobStatus::Running,
5307 });
5308 edits.push(sum_tree::Edit::Insert(ops));
5309 ids.push((id, path));
5310 }
5311 self.snapshot.pending_ops_by_path.edit(edits, ());
5312 ids
5313 }
5314}
5315
5316fn get_permalink_in_rust_registry_src(
5317 provider_registry: Arc<GitHostingProviderRegistry>,
5318 path: PathBuf,
5319 selection: Range<u32>,
5320) -> Result<url::Url> {
5321 #[derive(Deserialize)]
5322 struct CargoVcsGit {
5323 sha1: String,
5324 }
5325
5326 #[derive(Deserialize)]
5327 struct CargoVcsInfo {
5328 git: CargoVcsGit,
5329 path_in_vcs: String,
5330 }
5331
5332 #[derive(Deserialize)]
5333 struct CargoPackage {
5334 repository: String,
5335 }
5336
5337 #[derive(Deserialize)]
5338 struct CargoToml {
5339 package: CargoPackage,
5340 }
5341
5342 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5343 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5344 Some((dir, json))
5345 }) else {
5346 bail!("No .cargo_vcs_info.json found in parent directories")
5347 };
5348 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5349 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5350 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5351 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5352 .context("parsing package.repository field of manifest")?;
5353 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5354 let permalink = provider.build_permalink(
5355 remote,
5356 BuildPermalinkParams::new(
5357 &cargo_vcs_info.git.sha1,
5358 &RepoPath(
5359 RelPath::new(&path, PathStyle::local())
5360 .context("invalid path")?
5361 .into_arc(),
5362 ),
5363 Some(selection),
5364 ),
5365 );
5366 Ok(permalink)
5367}
5368
5369fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5370 let Some(blame) = blame else {
5371 return proto::BlameBufferResponse {
5372 blame_response: None,
5373 };
5374 };
5375
5376 let entries = blame
5377 .entries
5378 .into_iter()
5379 .map(|entry| proto::BlameEntry {
5380 sha: entry.sha.as_bytes().into(),
5381 start_line: entry.range.start,
5382 end_line: entry.range.end,
5383 original_line_number: entry.original_line_number,
5384 author: entry.author,
5385 author_mail: entry.author_mail,
5386 author_time: entry.author_time,
5387 author_tz: entry.author_tz,
5388 committer: entry.committer_name,
5389 committer_mail: entry.committer_email,
5390 committer_time: entry.committer_time,
5391 committer_tz: entry.committer_tz,
5392 summary: entry.summary,
5393 previous: entry.previous,
5394 filename: entry.filename,
5395 })
5396 .collect::<Vec<_>>();
5397
5398 let messages = blame
5399 .messages
5400 .into_iter()
5401 .map(|(oid, message)| proto::CommitMessage {
5402 oid: oid.as_bytes().into(),
5403 message,
5404 })
5405 .collect::<Vec<_>>();
5406
5407 proto::BlameBufferResponse {
5408 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5409 entries,
5410 messages,
5411 remote_url: blame.remote_url,
5412 }),
5413 }
5414}
5415
5416fn deserialize_blame_buffer_response(
5417 response: proto::BlameBufferResponse,
5418) -> Option<git::blame::Blame> {
5419 let response = response.blame_response?;
5420 let entries = response
5421 .entries
5422 .into_iter()
5423 .filter_map(|entry| {
5424 Some(git::blame::BlameEntry {
5425 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5426 range: entry.start_line..entry.end_line,
5427 original_line_number: entry.original_line_number,
5428 committer_name: entry.committer,
5429 committer_time: entry.committer_time,
5430 committer_tz: entry.committer_tz,
5431 committer_email: entry.committer_mail,
5432 author: entry.author,
5433 author_mail: entry.author_mail,
5434 author_time: entry.author_time,
5435 author_tz: entry.author_tz,
5436 summary: entry.summary,
5437 previous: entry.previous,
5438 filename: entry.filename,
5439 })
5440 })
5441 .collect::<Vec<_>>();
5442
5443 let messages = response
5444 .messages
5445 .into_iter()
5446 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5447 .collect::<HashMap<_, _>>();
5448
5449 Some(Blame {
5450 entries,
5451 messages,
5452 remote_url: response.remote_url,
5453 })
5454}
5455
5456fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5457 proto::Branch {
5458 is_head: branch.is_head,
5459 ref_name: branch.ref_name.to_string(),
5460 unix_timestamp: branch
5461 .most_recent_commit
5462 .as_ref()
5463 .map(|commit| commit.commit_timestamp as u64),
5464 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5465 ref_name: upstream.ref_name.to_string(),
5466 tracking: upstream
5467 .tracking
5468 .status()
5469 .map(|upstream| proto::UpstreamTracking {
5470 ahead: upstream.ahead as u64,
5471 behind: upstream.behind as u64,
5472 }),
5473 }),
5474 most_recent_commit: branch
5475 .most_recent_commit
5476 .as_ref()
5477 .map(|commit| proto::CommitSummary {
5478 sha: commit.sha.to_string(),
5479 subject: commit.subject.to_string(),
5480 commit_timestamp: commit.commit_timestamp,
5481 author_name: commit.author_name.to_string(),
5482 }),
5483 }
5484}
5485
5486fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5487 proto::Worktree {
5488 path: worktree.path.to_string_lossy().to_string(),
5489 ref_name: worktree.ref_name.to_string(),
5490 sha: worktree.sha.to_string(),
5491 }
5492}
5493
5494fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5495 git::repository::Worktree {
5496 path: PathBuf::from(proto.path.clone()),
5497 ref_name: proto.ref_name.clone().into(),
5498 sha: proto.sha.clone().into(),
5499 }
5500}
5501
5502fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5503 git::repository::Branch {
5504 is_head: proto.is_head,
5505 ref_name: proto.ref_name.clone().into(),
5506 upstream: proto
5507 .upstream
5508 .as_ref()
5509 .map(|upstream| git::repository::Upstream {
5510 ref_name: upstream.ref_name.to_string().into(),
5511 tracking: upstream
5512 .tracking
5513 .as_ref()
5514 .map(|tracking| {
5515 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5516 ahead: tracking.ahead as u32,
5517 behind: tracking.behind as u32,
5518 })
5519 })
5520 .unwrap_or(git::repository::UpstreamTracking::Gone),
5521 }),
5522 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5523 git::repository::CommitSummary {
5524 sha: commit.sha.to_string().into(),
5525 subject: commit.subject.to_string().into(),
5526 commit_timestamp: commit.commit_timestamp,
5527 author_name: commit.author_name.to_string().into(),
5528 has_parent: true,
5529 }
5530 }),
5531 }
5532}
5533
5534fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5535 proto::GitCommitDetails {
5536 sha: commit.sha.to_string(),
5537 message: commit.message.to_string(),
5538 commit_timestamp: commit.commit_timestamp,
5539 author_email: commit.author_email.to_string(),
5540 author_name: commit.author_name.to_string(),
5541 }
5542}
5543
5544fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5545 CommitDetails {
5546 sha: proto.sha.clone().into(),
5547 message: proto.message.clone().into(),
5548 commit_timestamp: proto.commit_timestamp,
5549 author_email: proto.author_email.clone().into(),
5550 author_name: proto.author_name.clone().into(),
5551 }
5552}
5553
5554async fn compute_snapshot(
5555 id: RepositoryId,
5556 work_directory_abs_path: Arc<Path>,
5557 prev_snapshot: RepositorySnapshot,
5558 backend: Arc<dyn GitRepository>,
5559) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5560 let mut events = Vec::new();
5561 let branches = backend.branches().await?;
5562 let branch = branches.into_iter().find(|branch| branch.is_head);
5563 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5564 let stash_entries = backend.stash_entries().await?;
5565 let statuses_by_path = SumTree::from_iter(
5566 statuses
5567 .entries
5568 .iter()
5569 .map(|(repo_path, status)| StatusEntry {
5570 repo_path: repo_path.clone(),
5571 status: *status,
5572 }),
5573 (),
5574 );
5575 let (merge_details, merge_heads_changed) =
5576 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5577 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5578
5579 let pending_ops_by_path = SumTree::from_iter(
5580 prev_snapshot.pending_ops_by_path.iter().filter_map(|ops| {
5581 let inner_ops: Vec<PendingOp> =
5582 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5583 if inner_ops.is_empty() {
5584 None
5585 } else {
5586 Some(PendingOps {
5587 repo_path: ops.repo_path.clone(),
5588 ops: inner_ops,
5589 })
5590 }
5591 }),
5592 (),
5593 );
5594
5595 if pending_ops_by_path != prev_snapshot.pending_ops_by_path {
5596 events.push(RepositoryEvent::PendingOpsChanged {
5597 pending_ops: prev_snapshot.pending_ops_by_path.clone(),
5598 })
5599 }
5600
5601 if merge_heads_changed {
5602 events.push(RepositoryEvent::MergeHeadsChanged);
5603 }
5604
5605 if statuses_by_path != prev_snapshot.statuses_by_path {
5606 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5607 }
5608
5609 // Useful when branch is None in detached head state
5610 let head_commit = match backend.head_sha().await {
5611 Some(head_sha) => backend.show(head_sha).await.log_err(),
5612 None => None,
5613 };
5614
5615 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5616 events.push(RepositoryEvent::BranchChanged);
5617 }
5618
5619 // Used by edit prediction data collection
5620 let remote_origin_url = backend.remote_url("origin");
5621 let remote_upstream_url = backend.remote_url("upstream");
5622
5623 let snapshot = RepositorySnapshot {
5624 id,
5625 statuses_by_path,
5626 pending_ops_by_path,
5627 work_directory_abs_path,
5628 path_style: prev_snapshot.path_style,
5629 scan_id: prev_snapshot.scan_id + 1,
5630 branch,
5631 head_commit,
5632 merge: merge_details,
5633 remote_origin_url,
5634 remote_upstream_url,
5635 stash_entries,
5636 };
5637
5638 Ok((snapshot, events))
5639}
5640
5641fn status_from_proto(
5642 simple_status: i32,
5643 status: Option<proto::GitFileStatus>,
5644) -> anyhow::Result<FileStatus> {
5645 use proto::git_file_status::Variant;
5646
5647 let Some(variant) = status.and_then(|status| status.variant) else {
5648 let code = proto::GitStatus::from_i32(simple_status)
5649 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5650 let result = match code {
5651 proto::GitStatus::Added => TrackedStatus {
5652 worktree_status: StatusCode::Added,
5653 index_status: StatusCode::Unmodified,
5654 }
5655 .into(),
5656 proto::GitStatus::Modified => TrackedStatus {
5657 worktree_status: StatusCode::Modified,
5658 index_status: StatusCode::Unmodified,
5659 }
5660 .into(),
5661 proto::GitStatus::Conflict => UnmergedStatus {
5662 first_head: UnmergedStatusCode::Updated,
5663 second_head: UnmergedStatusCode::Updated,
5664 }
5665 .into(),
5666 proto::GitStatus::Deleted => TrackedStatus {
5667 worktree_status: StatusCode::Deleted,
5668 index_status: StatusCode::Unmodified,
5669 }
5670 .into(),
5671 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5672 };
5673 return Ok(result);
5674 };
5675
5676 let result = match variant {
5677 Variant::Untracked(_) => FileStatus::Untracked,
5678 Variant::Ignored(_) => FileStatus::Ignored,
5679 Variant::Unmerged(unmerged) => {
5680 let [first_head, second_head] =
5681 [unmerged.first_head, unmerged.second_head].map(|head| {
5682 let code = proto::GitStatus::from_i32(head)
5683 .with_context(|| format!("Invalid git status code: {head}"))?;
5684 let result = match code {
5685 proto::GitStatus::Added => UnmergedStatusCode::Added,
5686 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5687 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5688 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5689 };
5690 Ok(result)
5691 });
5692 let [first_head, second_head] = [first_head?, second_head?];
5693 UnmergedStatus {
5694 first_head,
5695 second_head,
5696 }
5697 .into()
5698 }
5699 Variant::Tracked(tracked) => {
5700 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5701 .map(|status| {
5702 let code = proto::GitStatus::from_i32(status)
5703 .with_context(|| format!("Invalid git status code: {status}"))?;
5704 let result = match code {
5705 proto::GitStatus::Modified => StatusCode::Modified,
5706 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5707 proto::GitStatus::Added => StatusCode::Added,
5708 proto::GitStatus::Deleted => StatusCode::Deleted,
5709 proto::GitStatus::Renamed => StatusCode::Renamed,
5710 proto::GitStatus::Copied => StatusCode::Copied,
5711 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5712 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5713 };
5714 Ok(result)
5715 });
5716 let [index_status, worktree_status] = [index_status?, worktree_status?];
5717 TrackedStatus {
5718 index_status,
5719 worktree_status,
5720 }
5721 .into()
5722 }
5723 };
5724 Ok(result)
5725}
5726
5727fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5728 use proto::git_file_status::{Tracked, Unmerged, Variant};
5729
5730 let variant = match status {
5731 FileStatus::Untracked => Variant::Untracked(Default::default()),
5732 FileStatus::Ignored => Variant::Ignored(Default::default()),
5733 FileStatus::Unmerged(UnmergedStatus {
5734 first_head,
5735 second_head,
5736 }) => Variant::Unmerged(Unmerged {
5737 first_head: unmerged_status_to_proto(first_head),
5738 second_head: unmerged_status_to_proto(second_head),
5739 }),
5740 FileStatus::Tracked(TrackedStatus {
5741 index_status,
5742 worktree_status,
5743 }) => Variant::Tracked(Tracked {
5744 index_status: tracked_status_to_proto(index_status),
5745 worktree_status: tracked_status_to_proto(worktree_status),
5746 }),
5747 };
5748 proto::GitFileStatus {
5749 variant: Some(variant),
5750 }
5751}
5752
5753fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5754 match code {
5755 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5756 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5757 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5758 }
5759}
5760
5761fn tracked_status_to_proto(code: StatusCode) -> i32 {
5762 match code {
5763 StatusCode::Added => proto::GitStatus::Added as _,
5764 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5765 StatusCode::Modified => proto::GitStatus::Modified as _,
5766 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5767 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5768 StatusCode::Copied => proto::GitStatus::Copied as _,
5769 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5770 }
5771}